浏览代码

Merge branch 'Branch_RTRefactoring' of https://github.com/Unity-Technologies/ScriptableRenderLoop into Branch_RTRefactoring

/main
Julien Ignace 7 年前
当前提交
edd34849
共有 19 个文件被更改,包括 96 次插入84 次删除
  1. 9
      ScriptableRenderPipeline/Core/CoreRP/RTHandle.cs
  2. 7
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl
  3. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  4. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/LightLoopSettingsUI.cs
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/RenderPipelineSettingsUI.cs
  6. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedRenderPipelineSettings.cs
  7. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  8. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  9. 50
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  10. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopSettings.cs
  11. 28
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute
  12. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
  13. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs
  14. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  15. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  16. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/RenderPipelineSettings.cs
  17. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs
  18. 0
      /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs.meta
  19. 0
      /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs

9
ScriptableRenderPipeline/Core/CoreRP/RTHandle.cs


bindTextureMS = false;
}
// MSAA Does not support random read/write.
if (allocForMSAA && enableRandomWrite == true)
{
Debug.LogWarning("RTHandle allocated with MSAA can't be enableRandomWrite.");
enableRandomWrite = false;
}
bool UAV = allocForMSAA ? false : enableRandomWrite; // MSAA Does not support random read/write.
bool UAV = enableRandomWrite;
RTCategory category = allocForMSAA ? RTCategory.MSAA : RTCategory.Regular;
var rt = new RenderTexture(width, height, (int)depthBufferBits, colorFormat, sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear)

7
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl


real x = 1 - exp(-extinction * intervalLength);
// Avoid division by 0.
real rcpExt = (extinction != 0) ? rcp(extinction) : 0;
weight = x * rcpExt;
offset = -log(1 - rndVal * x) * rcpExt;
weight = x * rcp(extinction);
offset = -log(1 - rndVal * x) * rcp(extinction);
}
// Implements equiangular light sampling.

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


int m_ActualHeight;
// This is the scale and bias of the camera viewport compared to the reference size of our Render Targets (RHandle.maxSize)
Vector2 m_CameraScaleBias;
// Current mssa sample
MSAASamples m_msaaSamples;
public MSAASamples msaaSamples { get { return m_msaaSamples; } }
public Matrix4x4 viewProjMatrix
{

}
// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
MSAASamples msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, msaaSamples);
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
int maxWidth = RTHandle.maxWidth;
int maxHeight = RTHandle.maxHeight;

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/LightLoopSettingsUI.cs


if (EditorGUILayout.BeginFadeGroup(s.isSectionExpandedEnableTileAndCluster.faded))
{
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(p.isFptlEnabled, _.GetContent("Enable FPTL"));
EditorGUILayout.PropertyField(p.enableFptlForForwardOpaque, _.GetContent("Enable FPTL For Forward Opaque"));
EditorGUILayout.PropertyField(p.enableBigTilePrepass, _.GetContent("Enable Big Tile Prepass"));
EditorGUILayout.PropertyField(p.enableComputeLightEvaluation, _.GetContent("Enable Compute Light Evaluation"));

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/RenderPipelineSettingsUI.cs


EditorGUILayout.PropertyField(d.supportSSR, _.GetContent("Support SSR"));
EditorGUILayout.PropertyField(d.supportSSAO, _.GetContent("Support SSAO"));
EditorGUILayout.PropertyField(d.supportDBuffer, _.GetContent("Support Decal Buffer"));
EditorGUILayout.PropertyField(d.supportMSAAAntiAliasing, _.GetContent("Support MSAA Anti-Aliasing"));
EditorGUILayout.PropertyField(d.supportMSAA, _.GetContent("Support Multi Sampling Anti-Aliasing"));
EditorGUILayout.PropertyField(d.supportsForwardOnly, _.GetContent("Support Forward Only"));
EditorGUILayout.PropertyField(d.supportsMotionVectors, _.GetContent("Support Motion Vectors"));
EditorGUILayout.PropertyField(d.supportsStereo, _.GetContent("Support Stereo Rendering"));
EditorGUILayout.PropertyField(d.supportForwardOnly, _.GetContent("Support Forward Only"));
EditorGUILayout.PropertyField(d.supportMotionVectors, _.GetContent("Support Motion Vectors"));
EditorGUILayout.PropertyField(d.supportStereo, _.GetContent("Support Stereo Rendering"));
--EditorGUI.indentLevel;
}
}

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedRenderPipelineSettings.cs


public SerializedProperty supportSSR;
public SerializedProperty supportSSAO;
public SerializedProperty supportDBuffer;
public SerializedProperty supportMSAAAntiAliasing;
public SerializedProperty supportMSAA;
public SerializedProperty supportsForwardOnly;
public SerializedProperty supportsMotionVectors;
public SerializedProperty supportsStereo;
public SerializedProperty supportForwardOnly;
public SerializedProperty supportMotionVectors;
public SerializedProperty supportStereo;
public SerializedGlobalLightLoopSettings lightLoopSettings;
public SerializedShadowInitParameters shadowInitParams;

supportSSR = root.Find((RenderPipelineSettings s) => s.supportSSR);
supportSSAO = root.Find((RenderPipelineSettings s) => s.supportSSAO);
supportDBuffer = root.Find((RenderPipelineSettings s) => s.supportDBuffer);
supportMSAAAntiAliasing = root.Find((RenderPipelineSettings s) => s.supportMSAAAntiAliasing);
supportMSAA = root.Find((RenderPipelineSettings s) => s.supportMSAA);
supportsForwardOnly = root.Find((RenderPipelineSettings s) => s.supportsForwardOnly);
supportsMotionVectors = root.Find((RenderPipelineSettings s) => s.supportsMotionVectors);
supportsStereo = root.Find((RenderPipelineSettings s) => s.supportsStereo);
supportForwardOnly = root.Find((RenderPipelineSettings s) => s.supportForwardOnly);
supportMotionVectors = root.Find((RenderPipelineSettings s) => s.supportMotionVectors);
supportStereo = root.Find((RenderPipelineSettings s) => s.supportStereo);
lightLoopSettings = new SerializedGlobalLightLoopSettings(root.Find((RenderPipelineSettings s) => s.lightLoopSettings));
shadowInitParams = new SerializedShadowInitParameters(root.Find((RenderPipelineSettings s) => s.shadowInitParams));

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


// Initial state of the RTHandle system.
// Tells the system that we will require MSAA or not so that we can avoid wasteful render texture allocation.
// TODO: Might want to initialize to at least the window resolution to avoid un-necessary re-alloc in the player
RTHandle.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAAAntiAliasing, m_Asset.renderPipelineSettings.msaaSampleCount);
RTHandle.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAA, m_Asset.renderPipelineSettings.msaaSampleCount);
if(m_Asset.renderPipelineSettings.supportDBuffer)
if (m_Asset.renderPipelineSettings.supportDBuffer)
m_DbufferManager.CreateBuffers();
m_BufferPyramid.CreateBuffers();

m_AmbientOcclusionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Bilinear, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true);
}
if (m_Asset.renderPipelineSettings.supportsMotionVectors)
if (m_Asset.renderPipelineSettings.supportMotionVectors)
m_VelocityBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBuffer_sRGBFlag(), enableMSAA: true);
m_VelocityBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBufferSRGBFlag(), enableMSAA: true);
m_DistortionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBuffer_sRGBFlag());
m_DistortionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag());
// TODO: For MSAA, we'll need to add a Draw path in order to support MSAA properly
m_DeferredShadowBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true);

if (m_FrameSettings.enableAsyncCompute)
{
m_LightLoop.BuildGPULightListAsyncEnd(camera, cmd, buildGPULightListsCompleteFence);
m_LightLoop.BuildGPULightListAsyncEnd(hdCamera, cmd, buildGPULightListsCompleteFence);
}
else
{

}
else
{
using (new ProfilingSample(cmd, "Blit to final RT", CustomSamplerId.CopyDepthForSceneView.GetSampler()))
using (new ProfilingSample(cmd, "Blit to final RT", CustomSamplerId.BlitToFinalRT.GetSampler()))
{
// This Blit will flip the screen on anything other than openGL
HDUtils.BlitCameraTexture(cmd, hdCamera, m_CameraColorBuffer, BuiltinRenderTextureType.CameraTarget);

// Make sure RenderDebug does not change the current Render Target
if (camera.cameraType == CameraType.SceneView)
{
using (new ProfilingSample(cmd, "Copy Depth For SceneView", CustomSamplerId.BlitToFinalRT.GetSampler()))
using (new ProfilingSample(cmd, "Copy Depth For SceneView", CustomSamplerId.CopyDepthForSceneView.GetSampler()))
{
m_CopyDepth.SetTexture(HDShaderIDs._InputDepth, m_CameraDepthStencilBuffer);
cmd.Blit(null, BuiltinRenderTextureType.CameraTarget, m_CopyDepth);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int g_mScrProjection = Shader.PropertyToID("g_mScrProjection");
public static readonly int g_mInvScrProjection = Shader.PropertyToID("g_mInvScrProjection");
public static readonly int g_iLog2NumClusters = Shader.PropertyToID("g_iLog2NumClusters");
public static readonly int g_screenSize = Shader.PropertyToID("g_screenSize");
public static readonly int g_iNumSamplesMSAA = Shader.PropertyToID("g_iNumSamplesMSAA");
public static readonly int g_fNearPlane = Shader.PropertyToID("g_fNearPlane");
public static readonly int g_fFarPlane = Shader.PropertyToID("g_fFarPlane");
public static readonly int g_fClustScale = Shader.PropertyToID("g_fClustScale");

50
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


}
}
int GetNumTileFtplX(Camera camera)
int GetNumTileFtplX(HDCamera hdCamera)
return (camera.pixelWidth + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
int GetNumTileFtplY(Camera camera)
int GetNumTileFtplY(HDCamera hdCamera)
return (camera.pixelHeight + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
int GetNumTileClusteredX(Camera camera)
int GetNumTileClusteredX(HDCamera hdCamera)
return (camera.pixelWidth + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
int GetNumTileClusteredY(Camera camera)
int GetNumTileClusteredY(HDCamera hdCamera)
return (camera.pixelHeight + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
}
public bool GetFeatureVariantsEnabled()

return (uint)logVolume << 20 | (uint)lightVolumeType << 17 | listType << 16 | ((uint)probeIndex & 0xFFFF);
}
void VoxelLightListGeneration(CommandBuffer cmd, Camera camera, Matrix4x4 projscr, Matrix4x4 invProjscr, RenderTargetIdentifier cameraDepthBufferRT)
void VoxelLightListGeneration(CommandBuffer cmd, HDCamera hdCamera, Matrix4x4 projscr, Matrix4x4 invProjscr, RenderTargetIdentifier cameraDepthBufferRT)
Camera camera = hdCamera.camera;
// clear atomic offset index
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, 1, 1, 1);

cmd.SetComputeMatrixParam(buildPerVoxelLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscr);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs.g_iLog2NumClusters, k_Log2NumClusters);
cmd.SetComputeVectorParam(buildPerVoxelLightListShader, HDShaderIDs.g_screenSize, hdCamera.screenSize);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs.g_iNumSamplesMSAA, (int)hdCamera.msaaSamples);
//Vector4 v2_near = invProjscr * new Vector4(0.0f, 0.0f, 0.0f, 1.0f);
//Vector4 v2_far = invProjscr * new Vector4(0.0f, 0.0f, 1.0f, 1.0f);

cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
var numTilesX = GetNumTileClusteredX(camera);
var numTilesY = GetNumTileClusteredY(camera);
var numTilesX = GetNumTileClusteredX(hdCamera);
var numTilesY = GetNumTileClusteredY(hdCamera);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, numTilesX, numTilesY, 1);
}

cmd.DispatchCompute(buildPerBigTileLightListShader, s_GenListPerBigTileKernel, numBigTilesX, numBigTilesY, 1);
}
var numTilesX = GetNumTileFtplX(camera);
var numTilesY = GetNumTileFtplY(camera);
var numTilesX = GetNumTileFtplX(hdCamera);
var numTilesY = GetNumTileFtplY(hdCamera);
var numTiles = numTilesX * numTilesY;
bool enableFeatureVariants = GetFeatureVariantsEnabled();

}
// Cluster
VoxelLightListGeneration(cmd, camera, projscr, invProjscr, cameraDepthBufferRT);
VoxelLightListGeneration(cmd, hdCamera, projscr, invProjscr, cameraDepthBufferRT);
if (enableFeatureVariants)
{

cmd.SetRenderTarget(BuiltinRenderTextureType.None);
BuildGPULightListsCommon(hdCamera, cmd, cameraDepthBufferRT, stencilTextureRT, skyEnabled);
PushGlobalParams(hdCamera.camera, cmd);
PushGlobalParams(hdCamera, cmd);
}
public GPUFence BuildGPULightListsAsyncBegin(HDCamera hdCamera, ScriptableRenderContext renderContext, RenderTargetIdentifier cameraDepthBufferRT, RenderTargetIdentifier stencilTextureRT, GPUFence startFence, bool skyEnabled)

return completeFence;
}
public void BuildGPULightListAsyncEnd(Camera camera, CommandBuffer cmd, GPUFence doneFence)
public void BuildGPULightListAsyncEnd(HDCamera hdCamera, CommandBuffer cmd, GPUFence doneFence)
PushGlobalParams(camera, cmd);
PushGlobalParams(hdCamera, cmd);
}
private void UpdateDataBuffers()

return add;
}
void PushGlobalParams(Camera camera, CommandBuffer cmd)
void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd)
Camera camera = hdCamera.camera;
// Shadows
m_ShadowMgr.SyncData();
m_ShadowMgr.BindResources(cmd, null, 0);

cmd.SetGlobalInt(HDShaderIDs._EnvLightCount, m_lightList.envLights.Count);
cmd.SetGlobalBuffer(HDShaderIDs._ShadowDatas, s_shadowDatas);
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplX, GetNumTileFtplX(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplY, GetNumTileFtplY(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplX, GetNumTileFtplX(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplY, GetNumTileFtplY(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredX, GetNumTileClusteredX(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredY, GetNumTileClusteredY(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredX, GetNumTileClusteredX(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredY, GetNumTileClusteredY(hdCamera));
if (m_FrameSettings.lightLoopSettings.enableBigTilePrepass)
cmd.SetGlobalBuffer(HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);

cmd.SetGlobalFloat(HDShaderIDs.g_fNearPlane, camera.nearClipPlane);
cmd.SetGlobalFloat(HDShaderIDs.g_fFarPlane, camera.farClipPlane);
cmd.SetGlobalInt(HDShaderIDs.g_iLog2NumClusters, k_Log2NumClusters);
cmd.SetGlobalInt(HDShaderIDs.g_isLogBaseBufferEnabled, k_UseDepthBuffer ? 1 : 0);
cmd.SetGlobalBuffer(HDShaderIDs.g_vLayeredOffsetsBuffer, s_PerVoxelOffset);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopSettings.cs


[Serializable]
public class LightLoopSettings
{
public static string kEnableFptlForForwardOpaque = "Enable Fptl for Forward Opaque";
public static string kEnableTileCluster = "Enable Tile/Cluster";
public static string kEnableBigTile = "Enable Big Tile";
public static string kEnableComputeLighting = "Enable Compute Lighting";

static public void RegisterDebug(String menuName, LightLoopSettings lightLoopSettings)
{
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableFptlForForwardOpaque, () => lightLoopSettings.enableFptlForForwardOpaque, (value) => lightLoopSettings.enableFptlForForwardOpaque = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableTileCluster, () => lightLoopSettings.enableTileAndCluster, (value) => lightLoopSettings.enableTileAndCluster = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableBigTile, () => lightLoopSettings.enableBigTilePrepass, (value) => lightLoopSettings.enableBigTilePrepass = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableComputeLighting, () => lightLoopSettings.enableComputeLightEvaluation, (value) => lightLoopSettings.enableComputeLightEvaluation = (bool)value);

28
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute


float g_fNearPlane;
float g_fFarPlane;
int g_iLog2NumClusters; // numClusters = (1<<g_iLog2NumClusters)
float4 g_screenSize;
int g_iNumSamplesMSAA;
CBUFFER_END
#include "ClusteredUtils.hlsl"

uint2 tileIDX = u3GroupID.xy;
uint t=threadID;
uint iWidth;
uint iHeight;
#ifdef MSAA_ENABLED
uint iNumSamplesMSAA;
g_depth_tex.GetDimensions(iWidth, iHeight, iNumSamplesMSAA);
#else
g_depth_tex.GetDimensions(iWidth, iHeight);
#endif
uint nrTilesX = (iWidth+(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesY = (iHeight+(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesX = ((uint)g_screenSize.x +(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesY = ((uint)g_screenSize.y +(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint2 viTilUR = min( viTilLL+uint2(TILE_SIZE_CLUSTERED,TILE_SIZE_CLUSTERED), uint2(iWidth, iHeight) ); // not width and height minus 1 since viTilUR represents the end of the tile corner.
uint2 viTilUR = min( viTilLL+uint2(TILE_SIZE_CLUSTERED,TILE_SIZE_CLUSTERED), uint2(g_screenSize.x, g_screenSize.y) ); // not width and height minus 1 since viTilUR represents the end of the tile corner.
if(t==0)
{

for(int idx=t; idx<(TILE_SIZE_CLUSTERED*TILE_SIZE_CLUSTERED); idx+=NR_THREADS)
{
uint2 uPixCrd = min( uint2(viTilLL.x+(idx&(TILE_SIZE_CLUSTERED-1)), viTilLL.y+(idx>>log2TileSize)), uint2(iWidth-1, iHeight-1) );
uint2 uPixCrd = min( uint2(viTilLL.x+(idx&(TILE_SIZE_CLUSTERED-1)), viTilLL.y+(idx>>log2TileSize)), uint2(g_screenSize.x-1, g_screenSize.y-1) );
for(uint i=0; i<iNumSamplesMSAA; i++)
for(uint i=0; i<g_iNumSamplesMSAA; i++)
{
const float fDpth = FetchDepthMSAA(g_depth_tex, uPixCrd, i);
#else

if(dpt_ma<=0.0) dpt_ma = VIEWPORT_SCALE_Z; // assume sky pixel
#endif
float2 vTileLL = float2(viTilLL.x/(float) iWidth, viTilLL.y/(float) iHeight);
float2 vTileUR = float2(viTilUR.x/(float) iWidth, viTilUR.y/(float) iHeight);
float2 vTileLL = float2(viTilLL.x/g_screenSize.x, viTilLL.y/g_screenSize.y);
float2 vTileUR = float2(viTilUR.x/g_screenSize.x, viTilUR.y/g_screenSize.y);
// build coarse list using AABB
#ifdef USE_TWO_PASS_TILED_LIGHTING

int iNrCoarseLights = min(lightOffs,MAX_NR_COARSE_ENTRIES);
#ifdef PERFORM_SPHERICAL_INTERSECTION_TESTS
iNrCoarseLights = SphericalIntersectionTests( t, iNrCoarseLights, float2(min(viTilLL.xy+uint2(TILE_SIZE_CLUSTERED/2,TILE_SIZE_CLUSTERED/2), uint2(iWidth-1, iHeight-1))) );
iNrCoarseLights = SphericalIntersectionTests( t, iNrCoarseLights, float2(min(viTilLL.xy+uint2(TILE_SIZE_CLUSTERED/2,TILE_SIZE_CLUSTERED/2), uint2(g_screenSize.x-1, g_screenSize.y-1))) );
#endif
#ifdef ENABLE_DEPTH_TEXTURE_BACKPLANE

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute


// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = _GlobalFog_Scattering;
float extinction = _GlobalFog_Extinction;
float extinction = max(_GlobalFog_Extinction, FLT_MIN); // Avoid NaNs
float asymmetry = _GlobalFog_Asymmetry;
// TODO: define a function ComputeGlobalFogCoefficients(float3 centerWS),

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs


return RenderTextureFormat.ARGB32;
}
public static bool GetShadowMask_sRGBFlag()
public static bool GetShadowMaskSRGBFlag()
{
return false;
}

return RenderTextureFormat.RGHalf; // TODO: We should use 16bit normalized instead, better precision // RGInt
}
public static bool GetVelocityBuffer_sRGBFlag()
public static bool GetVelocityBufferSRGBFlag()
{
return false;
}

return RenderTextureFormat.ARGBHalf;
}
public static bool GetDistortionBuffer_sRGBFlag()
public static bool GetDistortionBufferSRGBFlag()
{
return false;
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


public void InitSSSBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings)
{
// TODO: For MSAA, at least initially, we can only support Jimenez, because we can't create MSAA + UAV render targets.
if (settings.supportsForwardOnly)
if (settings.supportForwardOnly)
{
// In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
// TODO: Provide a way to reuse a render target

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


// We have to fall back to forward-only rendering when scene view is using wireframe rendering mode
// as rendering everything in wireframe + deferred do not play well together
aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportsForwardOnly;
aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportForwardOnly;
aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportsMotionVectors;
aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportsMotionVectors;
aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;

// Planar and real time cubemap doesn't need post process and render in FP16
aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportsStereo;
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo;
// Force forward if we request stereo. TODO: We should not enforce that, users should be able to chose deferred
aggregate.enableForwardRenderingOnly = aggregate.enableForwardRenderingOnly || aggregate.enableStereo;

aggregate.enableTransparentObjects = srcFrameSettings.enableTransparentObjects;
aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAAAntiAliasing;
aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA;
aggregate.ConfigureMSAADependentSettings();
aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/RenderPipelineSettings.cs


public bool supportSSR = true;
public bool supportSSAO = true;
public bool supportSubsurfaceScattering = true;
public bool supportsForwardOnly = false;
public bool supportForwardOnly = false;
public bool supportDBuffer = false;
public bool supportMSAAAntiAliasing = false;
public MSAASamples msaaSampleCount = MSAASamples.None;
public bool supportsMotionVectors = true;
public bool supportsStereo = false;
public bool supportDBuffer = false;
public bool supportMSAA = false;
public MSAASamples msaaSampleCount = MSAASamples.None;
public bool supportMotionVectors = true;
public bool supportStereo = false;
public GlobalLightLoopSettings lightLoopSettings = new GlobalLightLoopSettings();
public ShadowInitParameters shadowInitParams = new ShadowInitParameters();

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs


if (m_EnableShadowMask)
{
m_RTs[m_GBufferCount] = RTHandle.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMask_sRGBFlag(), filterMode: FilterMode.Point);
m_RTs[m_GBufferCount] = RTHandle.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMaskSRGBFlag(), filterMode: FilterMode.Point);
m_RTIDs[m_GBufferCount] = new RenderTargetIdentifier(m_RTs[m_GBufferCount]);
m_TextureShaderIDs[m_GBufferCount] = HDShaderIDs._ShadowMaskTexture;
}

/ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/GBufferManager.cs.meta → /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs.meta

/ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/GBufferManager.cs → /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs

正在加载...
取消
保存