浏览代码

Merge branch 'master' into OnTileDeferred

/main
Filip Iliescu 7 年前
当前提交
4adbf45e
共有 125 个文件被更改,包括 4692 次插入2712 次删除
  1. 2
      Assets/GraphicsTests/Framework/Editor/TestFramework.cs
  2. 3
      Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Assets/2D/Common/box.prefab
  3. 208
      Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/2D/SpriteDemo.unity
  4. 938
      Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/MultiplePointLights.unity
  5. 3
      Assets/ScriptableRenderPipeline/Core/Debugging/DebugActionManager.cs
  6. 125
      Assets/ScriptableRenderPipeline/Core/Shadow/Shadow.cs
  7. 34
      Assets/ScriptableRenderPipeline/Core/Shadow/ShadowBase.cs
  8. 2
      Assets/ScriptableRenderPipeline/Core/Shadow/ShadowBase.cs.hlsl
  9. 67
      Assets/ScriptableRenderPipeline/Core/TextureCache.cs
  10. 74
      Assets/ScriptableRenderPipeline/Fptl/FptlLighting.cs
  11. 2
      Assets/ScriptableRenderPipeline/Fptl/LightDefinitions.cs.hlsl
  12. 16
      Assets/ScriptableRenderPipeline/Fptl/LightingTemplate.hlsl
  13. 4
      Assets/ScriptableRenderPipeline/Fptl/ShadowDispatch.hlsl
  14. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/AdditionalData/HDAdditionalLightData.cs
  15. 47
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs
  16. 11
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs.hlsl
  17. 39
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.hlsl
  18. 124
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugFullScreen.shader
  19. 40
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader
  20. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/LightingDebugPanel.cs
  21. 18
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs
  22. 655
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  23. 12
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset
  24. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs
  25. 66
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/ScreenSpaceAmbientOcclusion.cs
  26. 66
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs
  27. 180
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl
  28. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/ShadowDispatch.hlsl
  29. 702
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
  30. 22
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs.hlsl
  31. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl
  32. 73
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl
  33. 14
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/builddispatchindirect.compute
  34. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/lightlistbuild.compute
  35. 7
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute
  36. 68
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/shadeopaque.compute
  37. 33
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs
  38. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs.hlsl
  39. 702
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl
  40. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitDataInternal.hlsl
  41. 39
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitTessellation.hlsl
  42. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader
  43. 20
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SubsurfaceScatteringProfile.cs
  44. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/RenderPipelineMaterial.cs
  45. 43
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader
  46. 6
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs
  47. 1
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs.hlsl
  48. 1
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl
  49. 37
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl
  50. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl
  51. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VertMesh.hlsl
  52. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl
  53. 47
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariablesFunctions.hlsl
  54. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/HDRISky/HDRISkyRenderer.cs
  55. 8
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/ProceduralSkyRenderer.cs
  56. 35
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/RuntimeFilterIBL.cs
  57. 114
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/SkyManager.cs
  58. 162
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Utilities.cs
  59. 44
      Assets/ScriptableRenderPipeline/LightweightPipeline/Editor/LightweightAssetInspector.cs
  60. 170
      Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipeline.cs
  61. 6
      Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipelineAsset.asset
  62. 26
      Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipelineAsset.cs
  63. 4
      Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-Default.mat
  64. 4
      Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultSprite.mat
  65. 19
      Assets/ScriptableRenderPipeline/LightweightPipeline/Shaders/LightweightPipeline.shader
  66. 2
      Assets/ScriptableRenderPipeline/LightweightPipeline/Shaders/LightweightPipelineCore.cginc
  67. 50
      Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/LDRenderPipelineBasicScene.unity
  68. 12
      Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/Materials/LDRenderPipeMaterials/MobilePlane.mat
  69. 17
      Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/Textures/154.JPG.meta
  70. 3
      Assets/ScriptableRenderPipeline/ShaderLibrary/AreaLighting.hlsl
  71. 28
      Assets/ScriptableRenderPipeline/ShaderLibrary/Common.hlsl
  72. 6
      Assets/ScriptableRenderPipeline/ShaderLibrary/CommonMaterial.hlsl
  73. 16
      Assets/ScriptableRenderPipeline/ShaderLibrary/Packing.hlsl
  74. 12
      Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/Shadow.hlsl
  75. 78
      Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowAlgorithms.hlsl
  76. 10
      Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowAlgorithmsCustom.hlsl
  77. 2
      Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowMoments.hlsl
  78. 12
      Assets/ScriptableRenderPipeline/ShaderLibrary/Tessellation.hlsl
  79. 195
      Assets/TestScenes/HDTest/BasicProfiling.unity
  80. 287
      Assets/TestScenes/HDTest/CascadedShadowsTest.unity
  81. 593
      Assets/TestScenes/HDTest/HDRenderLoopTest.unity
  82. 3
      Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Gray.mat
  83. 3
      Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Green.mat
  84. 3
      Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Red.mat
  85. 3
      Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Std_Blue_Emissive.mat
  86. 404
      Assets/Textures/Batman.png
  87. 2
      ProjectSettings/ProjectVersion.txt
  88. 7
      README.md
  89. 10
      Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Assets/Scripts.meta
  90. 10
      Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/Materials.meta
  91. 248
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl
  92. 10
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl.meta
  93. 58
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader
  94. 10
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader.meta
  95. 10
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SSSProfile/Resources.meta
  96. 10
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Resources.meta
  97. 79
      Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultETC1.mat
  98. 10
      Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultETC1.mat.meta

2
Assets/GraphicsTests/Framework/Editor/TestFramework.cs


yield return null;
}
while (Lightmapping.isRunning)
while (UnityEditor.Lightmapping.isRunning)
{
yield return null;
}

3
Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Assets/2D/Common/box.prefab


m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10754, guid: 0000000000000000f000000000000000, type: 0}
- {fileID: 2100000, guid: e3ef893926d86c448a80512fe05b8a37, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0

m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 0
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

208
Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/2D/SpriteDemo.unity
文件差异内容过多而无法显示
查看文件

938
Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/MultiplePointLights.unity
文件差异内容过多而无法显示
查看文件

3
Assets/ScriptableRenderPipeline/Core/Debugging/DebugActionManager.cs


{
#if UNITY_EDITOR
// Grab reference to input manager
var currentSelection = UnityEditor.Selection.activeObject;
UnityEditor.EditorApplication.ExecuteMenuItem("Edit/Project Settings/Input");
var inputManager = UnityEditor.Selection.activeObject;

// Commit
soInputManager.ApplyModifiedProperties();
UnityEditor.Selection.activeObject = currentSelection;
#endif
}

125
Assets/ScriptableRenderPipeline/Core/Shadow/Shadow.cs


Object.DestroyImmediate(m_DebugMaterial);
}
override public bool Reserve( FrameId frameId, ref ShadowData shadowData, ShadowRequest sr, uint width, uint height, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payload, List<VisibleLight> lights)
override public bool Reserve( FrameId frameId, Camera camera, bool cameraRelativeRendering, ref ShadowData shadowData, ShadowRequest sr, uint width, uint height, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payload, List<VisibleLight> lights)
{
for( uint i = 0, cnt = sr.facecount; i < cnt; ++i )
{

return Reserve( frameId, ref shadowData, sr, m_TmpWidths, m_TmpHeights, ref entries, ref payload, lights );
return Reserve( frameId, camera, cameraRelativeRendering, ref shadowData, sr, m_TmpWidths, m_TmpHeights, ref entries, ref payload, lights );
override public bool Reserve( FrameId frameId, ref ShadowData shadowData, ShadowRequest sr, uint[] widths, uint[] heights, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payload, List<VisibleLight> lights)
override public bool Reserve( FrameId frameId, Camera camera, bool cameraRelativeRendering, ref ShadowData shadowData, ShadowRequest sr, uint[] widths, uint[] heights, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payload, List<VisibleLight> lights)
{
if( m_FrameId.frameCount != frameId.frameCount )
m_ActiveEntriesCount = 0;

// For lights with multiple faces, the first shadow data contains
// per light information, so not all fields contain valid data.
// Shader code must make sure to read per face data from per face entries.
sd.texelSizeRcp = new Vector2( m_WidthRcp, m_HeightRcp );
sd.texelSizeRcp = new Vector4( m_WidthRcp, m_HeightRcp, 1.0f / widths[0], 1.0f / heights[0] );
sd.PackShadowType( sr.shadowType, sanitizedAlgo );
sd.payloadOffset = payload.Count();
entries.AddUnchecked( sd );

}
else
vp = Matrix4x4.identity; // should never happen, though
if (cameraRelativeRendering)
{
Vector3 camPosWS = camera.transform.position;
Matrix4x4 translation = Matrix4x4.Translate(camPosWS);
ce.current.view *= translation;
vp *= translation;
if (sr.shadowType == GPUShadowType.Directional)
{
m_TmpSplits[key.faceIdx].x -= camPosWS.x;
m_TmpSplits[key.faceIdx].y -= camPosWS.y;
m_TmpSplits[key.faceIdx].z -= camPosWS.z;
}
}
// write :(
ce.current.shadowAlgo = shadowAlgo;
m_EntryCache[ceIdx] = ce;

cb.ClearRenderTarget( true, !IsNativeDepth(), m_ClearColor );
}
override public void Update( FrameId frameId, ScriptableRenderContext renderContext, CullResults cullResults, List<VisibleLight> lights)
override public void Update( FrameId frameId, ScriptableRenderContext renderContext, CommandBuffer cmd, CullResults cullResults, List<VisibleLight> lights)
var profilingSample = new HDPipeline.Utilities.ProfilingSample(string.Format("Shadowmap{0}",m_TexSlot), renderContext);
var profilingSample = new HDPipeline.Utilities.ProfilingSample(string.Format("Shadowmap{0}",m_TexSlot), cmd);
string cbName = "";
var cb = CommandBufferPool.Get();
cb.name = "Shadowmap.EnableShadowKeyword";
cb.EnableShaderKeyword(m_ShaderKeyword);
renderContext.ExecuteCommandBuffer( cb );
CommandBufferPool.Release(cb);
cbName = "Shadowmap.EnableShadowKeyword";
cmd.BeginSample(cbName);
cmd.EnableShaderKeyword(m_ShaderKeyword);
cmd.EndSample(cbName);
}
// loop for generating each individual shadowmap

if( !cullResults.GetShadowCasterBounds( m_EntryCache[i].key.visibleIdx, out bounds ) )
continue;
var cb = CommandBufferPool.Get();
cb.name = string.Format("Shadowmap.Update.Slice{0}", entrySlice);
cbName = string.Format("Shadowmap.Update.Slice{0}", entrySlice);
cmd.BeginSample(cbName);
PostUpdate( frameId, cb, curSlice, lights );
PostUpdate( frameId, cmd, curSlice, lights );
PreUpdate( frameId, cb, curSlice );
PreUpdate( frameId, cmd, curSlice );
cmd.EndSample(cbName);
cb.name = string.Format("Shadowmap.Update - slice: {0}, vp.x: {1}, vp.y: {2}, vp.w: {3}, vp.h: {4}", curSlice, m_EntryCache[i].current.viewport.x, m_EntryCache[i].current.viewport.y, m_EntryCache[i].current.viewport.width, m_EntryCache[i].current.viewport.height);
cb.SetViewport( m_EntryCache[i].current.viewport );
cb.SetViewProjectionMatrices( m_EntryCache[i].current.view, m_EntryCache[i].current.proj );
cb.SetGlobalVector( "g_vLightDirWs", m_EntryCache[i].current.lightDir );
renderContext.ExecuteCommandBuffer( cb );
CommandBufferPool.Release(cb);
cbName = string.Format("Shadowmap.Update - slice: {0}, vp.x: {1}, vp.y: {2}, vp.w: {3}, vp.h: {4}", curSlice, m_EntryCache[i].current.viewport.x, m_EntryCache[i].current.viewport.y, m_EntryCache[i].current.viewport.width, m_EntryCache[i].current.viewport.height);
cmd.BeginSample(cbName);
cmd.SetViewport( m_EntryCache[i].current.viewport );
cmd.SetViewProjectionMatrices( m_EntryCache[i].current.view, m_EntryCache[i].current.proj );
cmd.SetGlobalVector( "g_vLightDirWs", m_EntryCache[i].current.lightDir );
cmd.EndSample(cbName);
// This is done here because DrawRenderers API lives outside command buffers so we need to make sur eto call this before doing any DrawRenders
renderContext.ExecuteCommandBuffer(cmd);
cmd.Clear();
var cblast = CommandBufferPool.Get();
PostUpdate( frameId, cblast, curSlice, lights );
PostUpdate( frameId, cmd, curSlice, lights );
cblast.name = "Shadowmap.DisableShaderKeyword";
cblast.DisableShaderKeyword( m_ShaderKeyword );
cmd.BeginSample("Shadowmap.DisableShaderKeyword");
cmd.DisableShaderKeyword( m_ShaderKeyword );
cmd.EndSample("Shadowmap.DisableShaderKeyword");
renderContext.ExecuteCommandBuffer( cblast );
CommandBufferPool.Release(cblast);
m_ActiveEntriesCount = 0;

// Nothing to do for this implementation here, as the atlas is reconstructed each frame, instead of keeping state across frames
}
override public void DisplayShadowMap(ScriptableRenderContext renderContext, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
override public void DisplayShadowMap(CommandBuffer debugCB, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
CommandBuffer debugCB = CommandBufferPool.Get();
debugCB.name = "";
Vector4 validRange = new Vector4(minValue, 1.0f / (maxValue - minValue));
MaterialPropertyBlock propertyBlock = new MaterialPropertyBlock();

propertyBlock.SetVector("_ValidRange", validRange);
debugCB.SetViewport(new Rect(screenX, screenY, screenSizeX, screenSizeY));
debugCB.DrawProcedural(Matrix4x4.identity, m_DebugMaterial, m_DebugMaterial.FindPass("REGULARSHADOW"), MeshTopology.Triangles, 3, 1, propertyBlock);
renderContext.ExecuteCommandBuffer(debugCB);
CommandBufferPool.Release(debugCB);
}
}

protected override void PostUpdate( FrameId frameId, CommandBuffer cb, uint rendertargetSlice, List<VisibleLight> lights)
{
cb.name = "VSM conversion";
if ( rendertargetSlice == uint.MaxValue )
{
base.PostUpdate( frameId, cb, rendertargetSlice, lights );

if( i >= cnt || m_EntryCache[i].current.slice > rendertargetSlice )
return;
cb.BeginSample("VSM conversion");
int kernelIdx = 2;
int currentKernel = 0;

i++;
}
base.PostUpdate( frameId, cb, rendertargetSlice, lights );
cb.EndSample("VSM conversion");
override public void DisplayShadowMap(ScriptableRenderContext renderContext, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
override public void DisplayShadowMap(CommandBuffer debugCB, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
CommandBuffer debugCB = CommandBufferPool.Get();
debugCB.name = "";
Vector4 validRange = new Vector4(minValue, 1.0f / (maxValue - minValue));
MaterialPropertyBlock propertyBlock = new MaterialPropertyBlock();

propertyBlock.SetVector("_ValidRange", validRange);
debugCB.SetViewport(new Rect(screenX, screenY, screenSizeX, screenSizeY));
debugCB.DrawProcedural(Matrix4x4.identity, m_DebugMaterial, m_DebugMaterial.FindPass("VARIANCESHADOW"), MeshTopology.Triangles, 3, 1, propertyBlock);
renderContext.ExecuteCommandBuffer(debugCB);
CommandBufferPool.Release(debugCB);
}
}
// -------------------------------------------------------------------------------------------------------------------------------------------------

cullingParams.shadowDistance = Mathf.Min( m_ShadowSettings.maxShadowDistance, cullingParams.shadowDistance );
}
public override void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices )
public override void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, bool cameraRelativeRendering, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices )
{
shadowDataIndices = null;

ShadowDataVector shadowVector = m_ShadowCtxt.shadowDatas;
ShadowPayloadVector payloadVector = m_ShadowCtxt.payloads;
m_ShadowIndices.Reset( m_TmpRequests.Count() );
AllocateShadows( frameId, lights, totalGranted, ref m_TmpRequests, ref m_ShadowIndices, ref shadowVector, ref payloadVector );
AllocateShadows( frameId, camera, cameraRelativeRendering, lights, totalGranted, ref m_TmpRequests, ref m_ShadowIndices, ref shadowVector, ref payloadVector );
Debug.Assert( m_TmpRequests.Count() == m_ShadowIndices.Count() );
m_ShadowCtxt.shadowDatas = shadowVector;
m_ShadowCtxt.payloads = payloadVector;

m_TmpSortKeys.ExtractTo( ref shadowRequests, (long idx) => { return (int) idx; } );
}
protected override void AllocateShadows( FrameId frameId, List<VisibleLight> lights, uint totalGranted, ref ShadowRequestVector grantedRequests, ref ShadowIndicesVector shadowIndices, ref ShadowDataVector shadowDatas, ref ShadowPayloadVector shadowmapPayload )
protected override void AllocateShadows( FrameId frameId, Camera camera, bool cameraRelativeRendering, List<VisibleLight> lights, uint totalGranted, ref ShadowRequestVector grantedRequests, ref ShadowIndicesVector shadowIndices, ref ShadowDataVector shadowDatas, ref ShadowPayloadVector shadowmapPayload )
{
ShadowData sd = new ShadowData();
shadowDatas.Reserve( totalGranted );

int smidx = 0;
while( smidx < k_MaxShadowmapPerType )
{
if( m_ShadowmapsPerType[(int)shadowtype,smidx] != null && m_ShadowmapsPerType[(int)shadowtype,smidx].Reserve( frameId, ref sd, grantedRequests[i], (uint) asd.shadowResolution, (uint) asd.shadowResolution, ref shadowDatas, ref shadowmapPayload, lights ) )
if( m_ShadowmapsPerType[(int)shadowtype,smidx] != null && m_ShadowmapsPerType[(int)shadowtype,smidx].Reserve( frameId, camera, cameraRelativeRendering, ref sd, grantedRequests[i], (uint) asd.shadowResolution, (uint) asd.shadowResolution, ref shadowDatas, ref shadowmapPayload, lights ) )
break;
smidx++;
}

}
}
public override void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CullResults cullResults, List<VisibleLight> lights)
public override void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CommandBuffer cmd, CullResults cullResults, List<VisibleLight> lights)
using (new HDPipeline.Utilities.ProfilingSample("Render Shadows Exp", renderContext))
using (new HDPipeline.Utilities.ProfilingSample("Render Shadows Exp", cmd))
sm.Update( frameId, renderContext, cullResults, lights );
sm.Update( frameId, renderContext, cmd, cullResults, lights );
public override void DisplayShadow(ScriptableRenderContext renderContext, int shadowRequestIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
public override void DisplayShadow(CommandBuffer cmd, int shadowRequestIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
{
if (m_ShadowIndices.Count() == 0)
return;

ShadowData faceData = shadowDatas[(uint)(m_ShadowIndices[index] + offset + faceIndex)];
uint texID, samplerID, slice;
faceData.UnpackShadowmapId(out texID, out samplerID, out slice);
m_Shadowmaps[texID].DisplayShadowMap(renderContext, faceData.scaleOffset, slice, screenX, screenY, screenSizeX, screenSizeY, minValue, maxValue);
m_Shadowmaps[texID].DisplayShadowMap(cmd, faceData.scaleOffset, slice, screenX, screenY, screenSizeX, screenSizeY, minValue, maxValue);
public override void DisplayShadowMap(ScriptableRenderContext renderContext, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
public override void DisplayShadowMap(CommandBuffer cmd, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue)
m_Shadowmaps[index].DisplayShadowMap(renderContext, new Vector4(1.0f, 1.0f, 0.0f, 0.0f), sliceIndex, screenX, screenY, screenSizeX, screenSizeY, minValue, maxValue);
m_Shadowmaps[index].DisplayShadowMap(cmd, new Vector4(1.0f, 1.0f, 0.0f, 0.0f), sliceIndex, screenX, screenY, screenSizeX, screenSizeY, minValue, maxValue);
}
public override void SyncData()

public override void BindResources(ScriptableRenderContext renderContext)
public override void BindResources(CommandBuffer cmd, ComputeShader computeShader, int computeKernel)
CommandBuffer cb = CommandBufferPool.Get(); // <- can we just keep this around or does this have to be newed every frame?
cb.name = "Bind resources to GPU";
m_ShadowCtxt.BindResources(cb);
renderContext.ExecuteCommandBuffer(cb);
CommandBufferPool.Release(cb);
cmd.BeginSample("Bind resources to GPU");
m_ShadowCtxt.BindResources(cmd, computeShader, computeKernel);
cmd.EndSample("Bind resources to GPU");
}
// resets the shadow slot counters and returns the sum of all slots

34
Assets/ScriptableRenderPipeline/Core/Shadow/ShadowBase.cs


public class ShadowContext : ShadowContextStorage
{
public delegate void SyncDel( ShadowContext sc );
public delegate void BindDel( ShadowContext sc, CommandBuffer cb );
public delegate void BindDel( ShadowContext sc, CommandBuffer cb, ComputeShader computeShader, int computeKernel);
public struct CtxtInit
{
public Init storage;

// delegate that takes care of syncing data to the GPU
public void SyncData() { m_DataSyncerDel( this ); }
// delegate that takes care of binding textures, buffers and samplers to shaders just before rendering
public void BindResources( CommandBuffer cb ) { m_ResourceBinderDel( this, cb ); }
public void BindResources( CommandBuffer cb, ComputeShader computeShader, int computeKernel) { m_ResourceBinderDel( this, cb, computeShader, computeKernel); }
// the following functions are to be used by the bind and sync delegates
public void GetShadowDatas( out ShadowData[] shadowDatas, out uint offset, out uint count ) { shadowDatas = m_ShadowDatas.AsArray( out offset, out count ); }

public ShadowSupport QueryShadowSupport() { return m_ShadowSupport; }
public uint GetMaxPayload() { return m_MaxPayloadCount; }
public void Assign( CullResults cullResults ) { m_CullResults = cullResults; } // TODO: Remove when m_CullResults is removed again
abstract public bool Reserve( FrameId frameId, ref ShadowData shadowData, ShadowRequest sr, uint width, uint height, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payloads, List<VisibleLight> lights);
abstract public bool Reserve( FrameId frameId, ref ShadowData shadowData, ShadowRequest sr, uint[] widths, uint[] heights, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payloads, List<VisibleLight> lights);
abstract public bool Reserve( FrameId frameId, Camera camera, bool cameraRelativeRendering, ref ShadowData shadowData, ShadowRequest sr, uint width, uint height, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payloads, List<VisibleLight> lights);
abstract public bool Reserve( FrameId frameId, Camera camera, bool cameraRelativeRendering, ref ShadowData shadowData, ShadowRequest sr, uint[] widths, uint[] heights, ref VectorArray<ShadowData> entries, ref VectorArray<ShadowPayload> payloads, List<VisibleLight> lights);
abstract public void Update( FrameId frameId, ScriptableRenderContext renderContext, CullResults cullResults, List<VisibleLight> lights);
abstract public void Update( FrameId frameId, ScriptableRenderContext renderContext, CommandBuffer cmd, CullResults cullResults, List<VisibleLight> lights);
abstract public void DisplayShadowMap(ScriptableRenderContext renderContext, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
abstract public void DisplayShadowMap(CommandBuffer cmd, Vector4 scaleBias, uint slice, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
}
interface IShadowManager

// shadowPayloads contains implementation specific data that is accessed from the shader by indexing into an Buffer<int> using ShadowData.ShadowmapData.payloadOffset.
// This is the equivalent of a void pointer in the shader and there needs to be loader code that knows how to interpret the data.
// If there are no valid shadow casters all output arrays will be null, otherwise they will contain valid data that can be passed to shaders.
void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices );
void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, bool cameraRelativeRendering, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices );
void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CullResults cullResults, List<VisibleLight> lights);
void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CommandBuffer cmd, CullResults cullResults, List<VisibleLight> lights);
void DisplayShadow(ScriptableRenderContext renderContext, int shadowIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
void DisplayShadowMap(ScriptableRenderContext renderContext, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
void DisplayShadow(CommandBuffer cmd, int shadowIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
void DisplayShadowMap(CommandBuffer cmd, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
void BindResources( ScriptableRenderContext renderContext );
void BindResources( CommandBuffer cmd, ComputeShader computeShader, int computeKernel);
// Fixes up some parameters within the cullResults
void UpdateCullingParameters( ref ScriptableCullingParameters cullingParams );

abstract public class ShadowManagerBase : ShadowRegistry, IShadowManager
{
public abstract void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices );
public abstract void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CullResults cullResults, List<VisibleLight> lights);
public abstract void DisplayShadow(ScriptableRenderContext renderContext, int shadowIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
public abstract void DisplayShadowMap(ScriptableRenderContext renderContext, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
public abstract void ProcessShadowRequests( FrameId frameId, CullResults cullResults, Camera camera, bool cameraRelativeRendering, List<VisibleLight> lights, ref uint shadowRequestsCount, int[] shadowRequests, out int[] shadowDataIndices );
public abstract void RenderShadows( FrameId frameId, ScriptableRenderContext renderContext, CommandBuffer cmd, CullResults cullResults, List<VisibleLight> lights);
public abstract void DisplayShadow(CommandBuffer cmd, int shadowIndex, uint faceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
public abstract void DisplayShadowMap(CommandBuffer cmd, uint shadowMapIndex, uint sliceIndex, float screenX, float screenY, float screenSizeX, float screenSizeY, float minValue, float maxValue);
public abstract void BindResources( ScriptableRenderContext renderContext );
public abstract void BindResources( CommandBuffer cmd, ComputeShader computeShader, int computeKernel);
public abstract void UpdateCullingParameters( ref ScriptableCullingParameters cullingParams );
// sort the shadow requests in descending priority - may only modify shadowRequests
protected abstract void PrioritizeShadowCasters( Camera camera, List<VisibleLight> lights, uint shadowRequestsCount, int[] shadowRequests );

protected abstract void AllocateShadows( FrameId frameId, List<VisibleLight> lights, uint totalGranted, ref VectorArray<ShadowmapBase.ShadowRequest> grantedRequests, ref VectorArray<int> shadowIndices, ref VectorArray<ShadowData> shadowmapDatas, ref VectorArray<ShadowPayload> shadowmapPayload );
protected abstract void AllocateShadows( FrameId frameId, Camera camera, bool cameraRelativeRendering, List<VisibleLight> lights, uint totalGranted, ref VectorArray<ShadowmapBase.ShadowRequest> grantedRequests, ref VectorArray<int> shadowIndices, ref VectorArray<ShadowData> shadowmapDatas, ref VectorArray<ShadowPayload> shadowmapPayload );
public abstract uint GetShadowMapCount();
public abstract uint GetShadowMapSliceCount(uint shadowMapIndex);

2
Assets/ScriptableRenderPipeline/Core/Shadow/ShadowBase.cs.hlsl


//
// This file was automatically generated from Assets/ScriptableRenderPipeline/core/Shadow/ShadowBase.cs. Please don't edit by hand.
// This file was automatically generated from Assets/ScriptableRenderPipeline/Core/Shadow/ShadowBase.cs. Please don't edit by hand.
//
#ifndef SHADOWBASE_CS_HLSL

67
Assets/ScriptableRenderPipeline/Core/TextureCache.cs


{
protected int m_NumMipLevels;
static int s_GlobalTextureCacheVersion = 0;
int m_TextureCacheVersion = 0;
#if UNITY_EDITOR
static int s_TextureCacheIdGenerator = 0;
int m_TextureCacheId = 0;
static bool s_ForceReinjectGlobalFirst = false;
static bool s_ForceReinjectGlobalSecond = false;
static int s_GlobalSecondSetByTexCacheID = -1;
#if UNITY_EDITOR
// here we receive the in-editor updated textures. These must be reinjected into
// any texture cache which has a stale copy of it. However, we don't have a this-pointer to the texture cache
// so instead we defer this to NewFrame() where we force reinject.
// Ideally we'd build up a list here of textures which are to be reinjected but unfortunately the texture we receive
// is an intermediate one and not the final compressed one. So instead we will have to reinject all in NewFrame().
s_GlobalTextureCacheVersion++;
s_ForceReinjectGlobalFirst = true;
s_ForceReinjectGlobalSecond = false;
s_GlobalSecondSetByTexCacheID = -1;
#endif
#endif
public static bool isMobileBuildTarget
{

{
sliceIndex = m_LocatorInSliceArray[texId];
bFoundAvailOrExistingSlice = true;
#if UNITY_EDITOR
if(m_TextureCacheVersion!=s_GlobalTextureCacheVersion)
{
m_TextureCacheVersion++;
Debug.Assert(m_TextureCacheVersion <= s_GlobalTextureCacheVersion);
bSwapSlice = true; // force a reinject.
}
#endif
//assert(m_SliceArray[sliceIndex].TexID==TexID);
}

//for(int q=1; q<m_numTextures; q++)
// assert(m_SliceArray[m_SortedIdxArray[q-1]].CountLRU>=m_SliceArray[m_SortedIdxArray[q]].CountLRU);
#if UNITY_EDITOR
// one or more textures got updated in editor. Unfortunately we do not know exactly which since
// OnPostprocessTexture() receives intermediate uncompressed textures. So we will have to reinject all slices to force an update.
if(s_ForceReinjectGlobalSecond && s_GlobalSecondSetByTexCacheID==m_TextureCacheId)
{
s_ForceReinjectGlobalSecond = false;
s_GlobalSecondSetByTexCacheID = -1;
}
if(s_ForceReinjectGlobalFirst)
{
s_ForceReinjectGlobalSecond = true;
s_GlobalSecondSetByTexCacheID = m_TextureCacheId;
s_ForceReinjectGlobalFirst = false;
}
if(s_ForceReinjectGlobalSecond)
{
// all texture caches must loop through and force a reinject on all entries when this is true.
for(int i = 0; i < m_NumTextures; i++)
{
var texID = m_SliceArray[i].texId;
if(texID!=g_InvalidTexID)
{
Texture texture = (Texture) EditorUtility.InstanceIDToObject((int) texID);
if(texture!=null) TransferToSlice(i, texture);
}
}
}
#endif
}
protected TextureCache()

#if UNITY_EDITOR
m_TextureCacheId = s_TextureCacheIdGenerator; // assign an ID so we can tell the caches apart
++s_TextureCacheIdGenerator; // static/global
#endif
}
public virtual void TransferToSlice(int sliceIndex, Texture texture)

74
Assets/ScriptableRenderPipeline/Fptl/FptlLighting.cs


};
// binding code. This needs to be in sync with ShadowContext.hlsl
ShadowContext.BindDel binder = (ShadowContext sc, CommandBuffer cb) =>
ShadowContext.BindDel binder = (ShadowContext sc, CommandBuffer cb, ComputeShader computeShader, int computeKernel) =>
{
// bind buffers
cb.SetGlobalBuffer("_ShadowDatasExp", s_ShadowDataBuffer);

var invProjscr = projscr.inverse;
cmd.SetComputeIntParam(deferredComputeShader, "g_iNrVisibLights", numLights);
SetMatrixCS(cmd, deferredComputeShader, "g_mScrProjection", projscr);
SetMatrixCS(cmd, deferredComputeShader, "g_mInvScrProjection", invProjscr);
SetMatrixCS(cmd, deferredComputeShader, "g_mViewToWorld", camera.cameraToWorldMatrix);
cmd.SetComputeMatrixParam(deferredComputeShader, "g_mScrProjection", projscr);
cmd.SetComputeMatrixParam(deferredComputeShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeMatrixParam(deferredComputeShader, "g_mViewToWorld", camera.cameraToWorldMatrix);
if (bUseClusteredForDeferred)

cmd.SetComputeBufferParam(deferredComputeShader, kernel, "g_dirLightData", s_DirLightList);
cmd.SetComputeTextureParam(deferredComputeShader, kernel, "uavOutput", new RenderTargetIdentifier(s_CameraTarget));
SetMatrixArrayCS(cmd, deferredComputeShader, "g_matWorldToShadow", m_MatWorldToShadow);
SetVectorArrayCS(cmd, deferredComputeShader, "g_vDirShadowSplitSpheres", m_DirShadowSplitSpheres);
cmd.SetComputeMatrixArrayParam(deferredComputeShader, "g_matWorldToShadow", m_MatWorldToShadow);
cmd.SetComputeVectorArrayParam(deferredComputeShader, "g_vDirShadowSplitSpheres", m_DirShadowSplitSpheres);
cmd.SetComputeVectorParam(deferredComputeShader, "g_vShadow3x3PCFTerms0", m_Shadow3X3PCFTerms[0]);
cmd.SetComputeVectorParam(deferredComputeShader, "g_vShadow3x3PCFTerms1", m_Shadow3X3PCFTerms[1]);
cmd.SetComputeVectorParam(deferredComputeShader, "g_vShadow3x3PCFTerms2", m_Shadow3X3PCFTerms[2]);

CommandBufferPool.Release(cmd);
}
private static void SetMatrixCS(CommandBuffer cmd, ComputeShader shadercs, string name, Matrix4x4 mat)
{
var data = new float[16];
for (int c = 0; c < 4; c++)
for (int r = 0; r < 4; r++)
data[4 * c + r] = mat[r, c];
cmd.SetComputeFloatParams(shadercs, name, data);
}
private static void SetMatrixArrayCS(CommandBuffer cmd, ComputeShader shadercs, string name, Matrix4x4[] matArray)
{
int numMatrices = matArray.Length;
var data = new float[numMatrices * 16];
for (int n = 0; n < numMatrices; n++)
for (int c = 0; c < 4; c++)
for (int r = 0; r < 4; r++)
data[16 * n + 4 * c + r] = matArray[n][r, c];
cmd.SetComputeFloatParams(shadercs, name, data);
}
private static void SetVectorArrayCS(CommandBuffer cmd, ComputeShader shadercs, string name, Vector4[] vecArray)
{
int numVectors = vecArray.Length;
var data = new float[numVectors * 4];
for (int n = 0; n < numVectors; n++)
for (int i = 0; i < 4; i++)
data[4 * n + i] = vecArray[n][i];
cmd.SetComputeFloatParams(shadercs, name, data);
}
static Matrix4x4 GetFlipMatrix()
{
Matrix4x4 flip = Matrix4x4.identity;

uint shadowRequestCount = (uint)m_ShadowRequests.Count;
int[] shadowRequests = m_ShadowRequests.ToArray();
int[] shadowDataIndices;
m_ShadowMgr.ProcessShadowRequests(m_FrameId, inputs, camera, inputs.visibleLights,
m_ShadowMgr.ProcessShadowRequests(m_FrameId, inputs, camera, false, inputs.visibleLights,
ref shadowRequestCount, shadowRequests, out shadowDataIndices);
// update the visibleLights with the shadow information

var numLights = GenerateSourceLightBuffers(camera, cullResults);
BuildPerTileLightLists(camera, loop, numLights, projscr, invProjscr);
m_ShadowMgr.RenderShadows( m_FrameId, loop, cullResults, cullResults.visibleLights );
CommandBuffer cmdShadow = CommandBufferPool.Get();
m_ShadowMgr.RenderShadows( m_FrameId, loop, cmdShadow, cullResults, cullResults.visibleLights );
m_ShadowMgr.BindResources( loop );
m_ShadowMgr.BindResources( cmdShadow, null, 0 );
loop.ExecuteCommandBuffer(cmdShadow);
CommandBufferPool.Release(cmdShadow);
// Push all global params
var numDirLights = UpdateDirectionalLights(camera, cullResults.visibleLights, m_ShadowIndices);

cmd.DispatchCompute(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, 1, 1, 1);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, "g_iNrVisibLights", numLights);
SetMatrixCS(cmd, buildPerVoxelLightListShader, "g_mScrProjection", projscr);
SetMatrixCS(cmd, buildPerVoxelLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeMatrixParam(buildPerVoxelLightListShader, "g_mScrProjection", projscr);
cmd.SetComputeMatrixParam(buildPerVoxelLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, "g_iLog2NumClusters", k_Log2NumClusters);

var invProjh = projh.inverse;
cmd.SetComputeIntParam(buildScreenAABBShader, "g_iNrVisibLights", numLights);
SetMatrixCS(cmd, buildScreenAABBShader, "g_mProjection", projh);
SetMatrixCS(cmd, buildScreenAABBShader, "g_mInvProjection", invProjh);
cmd.SetComputeMatrixParam(buildScreenAABBShader, "g_mProjection", projh);
cmd.SetComputeMatrixParam(buildScreenAABBShader, "g_mInvProjection", invProjh);
cmd.SetComputeBufferParam(buildScreenAABBShader, s_GenAABBKernel, "g_vBoundsBuffer", s_AABBBoundsBuffer);
cmd.DispatchCompute(buildScreenAABBShader, s_GenAABBKernel, (numLights + 7) / 8, 1, 1);
}

{
cmd.SetComputeIntParams(buildPerBigTileLightListShader, "g_viDimensions", new int[2] { w, h });
cmd.SetComputeIntParam(buildPerBigTileLightListShader, "g_iNrVisibLights", numLights);
SetMatrixCS(cmd, buildPerBigTileLightListShader, "g_mScrProjection", projscr);
SetMatrixCS(cmd, buildPerBigTileLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, "g_mScrProjection", projscr);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, "g_fNearPlane", camera.nearClipPlane);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, "g_fFarPlane", camera.farClipPlane);
cmd.SetComputeBufferParam(buildPerBigTileLightListShader, s_GenListPerBigTileKernel, "g_vLightList", s_BigTileLightList);

{
cmd.SetComputeIntParams(buildPerTileLightListShader, "g_viDimensions", new int[2] { w, h });
cmd.SetComputeIntParam(buildPerTileLightListShader, "g_iNrVisibLights", numLights);
SetMatrixCS(cmd, buildPerTileLightListShader, "g_mScrProjection", projscr);
SetMatrixCS(cmd, buildPerTileLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, "g_mScrProjection", projscr);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, "g_mInvScrProjection", invProjscr);
cmd.SetComputeTextureParam(buildPerTileLightListShader, s_GenListPerTileKernel, "g_depth_tex", new RenderTargetIdentifier(s_CameraDepthTexture));
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, "g_vLightList", s_LightList);
if (enableBigTilePrepass) cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, "g_vBigTileLightList", s_BigTileLightList);

2
Assets/ScriptableRenderPipeline/Fptl/LightDefinitions.cs.hlsl


//
// This file was automatically generated from Assets/ScriptableRenderPipeline/fptl/LightDefinitions.cs. Please don't edit by hand.
// This file was automatically generated from Assets/ScriptableRenderPipeline/Fptl/LightDefinitions.cs. Please don't edit by hand.
//
#ifndef LIGHTDEFINITIONS_CS_HLSL

16
Assets/ScriptableRenderPipeline/Fptl/LightingTemplate.hlsl


DirectionalLight lightData = g_dirLightData[i];
float atten = 1;
UnityLight light;
light.dir.xyz = mul((float3x3) g_mViewToWorld, -lightData.lightAxisZ).xyz;
float shadow = GetDirectionalShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, 0.0.xxx);
float shadow = GetDirectionalShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, normalize(light.dir.xyz));
UnityLight light;
light.dir.xyz = mul((float3x3) g_mViewToWorld, -lightData.lightAxisZ).xyz;
ints += EvalMaterial(light, ind);
}

}
atten *= angularAtt.w*(fProjVec>0.0); // finally apply this to the dist att.
UnityLight light;
light.dir.xyz = mul((float3x3) g_mViewToWorld, vL).xyz; //unity_CameraToWorld
float shadow = GetPunctualShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, 0.0.xxx);
float shadow = GetPunctualShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, float4(normalize(light.dir.xyz), dist));
UnityLight light;
light.dir.xyz = mul((float3x3) g_mViewToWorld, vL).xyz; //unity_CameraToWorld
ints += EvalMaterial(light, ind);

[branch]
if (shadowIdx >= 0)
{
float shadow = GetPunctualShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, vLw);
float shadow = GetPunctualShadowAttenuation(shadowContext, vPw, 0.0.xxx, shadowIdx, float4(vLw, dist));
atten *= shadow;
}

4
Assets/ScriptableRenderPipeline/Fptl/ShadowDispatch.hlsl


// example of overriding punctual lights
#ifdef SHADOW_DISPATCH_USE_CUSTOM_PUNCTUAL
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L )
{
// example for choosing the same algo
Texture2DArray tex = shadowContext.tex2DArray[0];

}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/AdditionalData/HDAdditionalLightData.cs


namespace UnityEngine.Experimental.Rendering
{
public enum LightArchetype { Punctual, Area, Projector };
public enum LightArchetype { Punctual, Area };
public enum SpotLightShape { Cone, Pyramid, Box };
//@TODO: We should continuously move these values
// into the engine when we can see them being generally useful

public bool affectSpecular = true;
public LightArchetype archetype = LightArchetype.Punctual;
public SpotLightShape spotLightShape = SpotLightShape.Cone; // Note: Only for Spotlight, should be hide for other light
[Range(0.0f, 20.0f)]
public float lightLength = 0.0f; // Area & projector lights

47
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs


public static string kFullScreenDebugMode = "Fullscreen Debug Mode";
public static string kDisplaySkyReflectionDebug = "Display Sky Reflection";
public static string kSkyReflectionMipmapDebug = "Sky Reflection Mipmap";
public static string kTileDebug = "Tile Debug By Category";
public float debugOverlayRatio = 0.33f;

public static int[] debugViewEngineValues = null;
public static GUIContent[] debugViewMaterialVaryingStrings = null;
public static int[] debugViewMaterialVaryingValues = null;
public static GUIContent[] debugViewMaterialPropertiesStrings = null;
public static int[] debugViewMaterialPropertiesValues = null;
public static GUIContent[] debugViewMaterialGBufferStrings = null;
public static int[] debugViewMaterialGBufferValues = null;

materialDebugSettings.SetDebugViewVarying(value);
}
public void SetDebugViewProperties(Attributes.DebugViewProperties value)
{
if (value != 0)
lightingDebugSettings.debugLightingMode = DebugLightingMode.None;
materialDebugSettings.SetDebugViewProperties(value);
}
public void SetDebugViewGBuffer(int value)
{
if (value != 0)

DebugMenuManager.instance.AddDebugItem<int>("Material", "Material",() => materialDebugSettings.debugViewMaterial, (value) => SetDebugViewMaterial((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewMaterialStrings, DebugDisplaySettings.debugViewMaterialValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "Engine",() => materialDebugSettings.debugViewEngine, (value) => SetDebugViewEngine((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewEngineStrings, DebugDisplaySettings.debugViewEngineValues));
DebugMenuManager.instance.AddDebugItem<Attributes.DebugViewVarying>("Material", "Attributes",() => materialDebugSettings.debugViewVarying, (value) => SetDebugViewVarying((Attributes.DebugViewVarying)value));
DebugMenuManager.instance.AddDebugItem<Attributes.DebugViewProperties>("Material", "Properties", () => materialDebugSettings.debugViewProperties, (value) => SetDebugViewProperties((Attributes.DebugViewProperties)value));
DebugMenuManager.instance.AddDebugItem<int>("Material", "GBuffer",() => materialDebugSettings.debugViewGBuffer, (value) => SetDebugViewGBuffer((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewMaterialGBufferStrings, DebugDisplaySettings.debugViewMaterialGBufferValues));
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, bool>(kEnableShadowDebug, () => lightingDebugSettings.enableShadows, (value) => lightingDebugSettings.enableShadows = (bool)value);

DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, Color>(kDebugLightingAlbedo, () => lightingDebugSettings.debugLightingAlbedo, (value) => lightingDebugSettings.debugLightingAlbedo = (Color)value);
DebugMenuManager.instance.AddDebugItem<bool>("Lighting", kDisplaySkyReflectionDebug, () => lightingDebugSettings.displaySkyReflection, (value) => lightingDebugSettings.displaySkyReflection = (bool)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, float>(kSkyReflectionMipmapDebug, () => lightingDebugSettings.skyReflectionMipmap, (value) => lightingDebugSettings.skyReflectionMipmap = (float)value, DebugItemFlag.None, new DebugItemHandlerFloatMinMax(0.0f, 1.0f));
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, TilePass.TileSettings.TileDebug>(kTileDebug,() => lightingDebugSettings.tileDebugByCategory, (value) => lightingDebugSettings.tileDebugByCategory = (TilePass.TileSettings.TileDebug)value);
DebugMenuManager.instance.AddDebugItem<bool>("Rendering", "Display Opaque",() => renderingDebugSettings.displayOpaqueObjects, (value) => renderingDebugSettings.displayOpaqueObjects = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>("Rendering", "Display Transparency",() => renderingDebugSettings.displayTransparentObjects, (value) => renderingDebugSettings.displayTransparentObjects = (bool)value);

index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewVarying), debugViewMaterialVaryingStrings, debugViewMaterialVaryingValues, "", ref index);
// Properties debug
var propertiesNames = Enum.GetNames(typeof(Attributes.DebugViewProperties));
debugViewMaterialPropertiesStrings = new GUIContent[propertiesNames.Length];
debugViewMaterialPropertiesValues = new int[propertiesNames.Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewProperties), debugViewMaterialPropertiesStrings, debugViewMaterialPropertiesValues, "", ref index);
// Gbuffer debug
var gbufferNames = Enum.GetNames(typeof(Attributes.DebugViewGbuffer));
debugViewMaterialGBufferStrings = new GUIContent[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length];

Depth = DebugViewVarying.VertexColorAlpha + 1,
BakeDiffuseLightingWithAlbedoPlusEmissive,
}
// Number must be contiguous
[GenerateHLSL]
public enum DebugViewProperties
{
None = 0,
Tessellation = DebugViewGbuffer.BakeDiffuseLightingWithAlbedoPlusEmissive + 1,
PerPixelDisplacement,
DepthOffset,
Lightmap,
}
}
[Serializable]

public int debugViewEngine { get { return m_DebugViewEngine; } }
public Attributes.DebugViewVarying debugViewVarying { get { return m_DebugViewVarying; } }
public Attributes.DebugViewProperties debugViewProperties { get { return m_DebugViewProperties; } }
Attributes.DebugViewProperties m_DebugViewProperties = Attributes.DebugViewProperties.None;
int m_DebugViewGBuffer = 0; // Can't use GBuffer enum here because the values are actually split between this enum and values from Lit.BSDFData
public int GetDebugMaterialIndex()

// They are all mutually exclusive so return the sum will return the right index.
return m_DebugViewGBuffer + m_DebugViewMaterial + m_DebugViewEngine + (int)m_DebugViewVarying;
return m_DebugViewGBuffer + m_DebugViewMaterial + m_DebugViewEngine + (int)m_DebugViewVarying + (int)m_DebugViewProperties;
}
public void DisableMaterialDebug()

m_DebugViewVarying = Attributes.DebugViewVarying.None;
m_DebugViewProperties = Attributes.DebugViewProperties.None;
m_DebugViewGBuffer = 0;
}

DisableMaterialDebug();
m_DebugViewVarying = value;
}
public void SetDebugViewProperties(Attributes.DebugViewProperties value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewProperties = value;
}
public void SetDebugViewGBuffer(int value)
{

public bool IsDebugDisplayEnabled()
{
return (m_DebugViewEngine != 0 || m_DebugViewMaterial != 0 || m_DebugViewVarying != Attributes.DebugViewVarying.None || m_DebugViewGBuffer != 0);
return (m_DebugViewEngine != 0 || m_DebugViewMaterial != 0 || m_DebugViewVarying != Attributes.DebugViewVarying.None || m_DebugViewProperties != Attributes.DebugViewProperties.None || m_DebugViewGBuffer != 0);
}
}

None,
SSAO,
SSAOBeforeFiltering,
MotionVectors,
NanTracker
}
[Serializable]

public bool displaySkyReflection = false;
public float skyReflectionMipmap = 0.0f;
public TilePass.TileSettings.TileDebug tileDebugByCategory = TilePass.TileSettings.TileDebug.None;
public void OnValidate()
{

11
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs.hlsl


#define DEBUGVIEWGBUFFER_BAKE_DIFFUSE_LIGHTING_WITH_ALBEDO_PLUS_EMISSIVE (11)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewProperties: static fields
//
#define DEBUGVIEWPROPERTIES_NONE (0)
#define DEBUGVIEWPROPERTIES_TESSELLATION (12)
#define DEBUGVIEWPROPERTIES_PER_PIXEL_DISPLACEMENT (13)
#define DEBUGVIEWPROPERTIES_DEPTH_OFFSET (14)
#define DEBUGVIEWPROPERTIES_LIGHTMAP (15)
//
#define FULLSCREENDEBUGMODE_MOTION_VECTORS (3)
#define FULLSCREENDEBUGMODE_NAN_TRACKER (4)
#endif

39
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.hlsl


float4 _DebugLightingAlbedo; // xyz = albedo for diffuse, w unused
float4 _DebugLightingSmoothness; // x == bool override, y == override value
void GetPropertiesDataDebug(uint paramId, inout float3 result, inout bool needLinearToSRGB)
{
switch (paramId)
{
case DEBUGVIEWPROPERTIES_TESSELLATION:
#ifdef TESSELLATION_ON
result = float3(1.0, 0.0, 0.0);
#else
result = float3(0.0, 0.0, 0.0);
#endif
break;
case DEBUGVIEWPROPERTIES_PER_PIXEL_DISPLACEMENT:
#ifdef _PER_PIXEL_DISPLACEMENT // Caution: This define is related to a shader features (But it may become a standard features for HD
result = float3(1.0, 0.0, 0.0);
#else
result = float3(0.0, 0.0, 0.0);
#endif
break;
case DEBUGVIEWPROPERTIES_DEPTH_OFFSET:
#ifdef _DEPTHOFFSET_ON // Caution: This define is related to a shader features (But it may become a standard features for HD
result = float3(1.0, 0.0, 0.0);
#else
result = float3(0.0, 0.0, 0.0);
#endif
break;
case DEBUGVIEWPROPERTIES_LIGHTMAP:
#if defined(LIGHTMAP_ON) || defined (DIRLIGHTMAP_COMBINED) || defined(DYNAMICLIGHTMAP_ON)
result = float3(1.0, 0.0, 0.0);
#else
result = float3(0.0, 0.0, 0.0);
#endif
break;
}
}
#endif

124
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugFullScreen.shader


#include "../../ShaderLibrary/Common.hlsl"
#include "../Debug/DebugDisplay.cs.hlsl"
#include "../ShaderVariables.hlsl"
TEXTURE2D(_DebugFullScreenTexture);
SAMPLER2D(sampler_DebugFullScreenTexture);

return output;
}
// Motion vector debug utilities
// >>>
float DistanceToLine(float2 p, float2 p1, float2 p2)
{
float2 center = (p1 + p2) * 0.5;
float len = length(p2 - p1);
float2 dir = (p2 - p1) / len;
float2 rel_p = p - center;
return dot(rel_p, float2(dir.y, -dir.x));
}
float DistanceToSegment(float2 p, float2 p1, float2 p2)
{
float2 center = (p1 + p2) * 0.5;
float len = length(p2 - p1);
float2 dir = (p2 - p1) / len;
float2 rel_p = p - center;
float dist1 = abs(dot(rel_p, float2(dir.y, -dir.x)));
float dist2 = abs(dot(rel_p, dir)) - 0.5 * len;
return max(dist1, dist2);
}
float DrawArrow(float2 texcoord, float body, float head, float height, float linewidth, float antialias)
{
float w = linewidth / 2.0 + antialias;
float2 start = -float2(body / 2.0, 0.0);
float2 end = float2(body / 2.0, 0.0);
// Head: 3 lines
float d1 = DistanceToLine(texcoord, end, end - head * float2(1.0, -height));
float d2 = DistanceToLine(texcoord, end - head * float2(1.0, height), end);
float d3 = texcoord.x - end.x + head;
// Body: 1 segment
float d4 = DistanceToSegment(texcoord, start, end - float2(linewidth, 0.0));
float d = min(max(max(d1, d2), -d3), d4);
return d;
}
float2 SampleMotionVectors(float2 coords)
{
#if UNITY_UV_STARTS_AT_TOP
coords.y = 1.0 - coords.y;
#endif
float2 mv = SAMPLE_TEXTURE2D(_DebugFullScreenTexture, sampler_DebugFullScreenTexture, coords).xy;
#if UNITY_UV_STARTS_AT_TOP
mv.y *= -1.0;
#endif
return mv;
}
// <<<
float4 Frag(Varyings input) : SV_Target
{
// SSAO

if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_SSAOBEFORE_FILTERING)
{
return 1.0f - SAMPLE_TEXTURE2D(_DebugFullScreenTexture, sampler_DebugFullScreenTexture, input.texcoord).xxxx;
}
if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_NAN_TRACKER)
{
#if UNITY_UV_STARTS_AT_TOP
input.texcoord.y = 1.0 - input.texcoord.y;
#endif
float4 color = SAMPLE_TEXTURE2D(_DebugFullScreenTexture, sampler_DebugFullScreenTexture, input.texcoord);
if (any(isnan(color)) || any(isinf(color)))
{
color = float4(1.0, 0.0, 1.0, 1.0);
}
else
{
// Dim the color buffer so we can see NaNs & Infs better
color.rgb *= 0.25;
}
return color;
}
if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_MOTION_VECTORS)
{
float2 mv = SampleMotionVectors(input.texcoord);
// Background color intensity - keep this low unless you want to make your eyes bleed
const float kIntensity = 0.15;
// Map motion vector direction to color wheel (hue between 0 and 360deg)
float phi = atan2(mv.x, mv.y);
float hue = (phi / PI + 1.0) * 0.5;
float r = abs(hue * 6.0 - 3.0) - 1.0;
float g = 2.0 - abs(hue * 6.0 - 2.0);
float b = 2.0 - abs(hue * 6.0 - 4.0);
float3 color = saturate(float3(r, g, b) * kIntensity);
// Grid subdivisions - should be dynamic
const float kGrid = 64.0;
// Arrow grid (aspect ratio is kept)
float rows = floor(kGrid * _ScreenParams.y / _ScreenParams.x);
float cols = kGrid;
float2 size = _ScreenParams.xy / float2(cols, rows);
float body = min(size.x, size.y) / 1.4142135623730951; // sqrt(2)
float2 texcoord = input.positionCS.xy;
float2 center = (floor(texcoord / size) + 0.5) * size;
texcoord -= center;
// Sample the center of the cell to get the current arrow vector
float2 arrow_coord = center / _ScreenParams.xy;
float2 mv_arrow = SampleMotionVectors(arrow_coord);
// Skip empty motion
float d = 0.0;
if (any(mv_arrow))
{
// Rotate the arrow according to the direction
mv_arrow = normalize(mv_arrow);
float2x2 rot = float2x2(mv_arrow.x, -mv_arrow.y, mv_arrow.y, mv_arrow.x);
texcoord = mul(rot, texcoord);
d = DrawArrow(texcoord, body, 0.25 * body, 0.5, 2.0, 1.0);
d = 1.0 - saturate(d);
}
return float4(color + d.xxx, 1.0);
}
return float4(0.0, 0.0, 0.0, 0.0);

40
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader


StructuredBuffer<uint> g_TileList;
Buffer<uint> g_DispatchIndirectBuffer;
struct VSOut
struct Attributes
{
uint vertexID : SV_VertexID;
};
struct Varyings
float4 Pos : SV_POSITION;
int Variant : TEXCOORD0;
float4 positionCS : SV_POSITION;
int variant : TEXCOORD0;
VSOut Vert(uint vertexID : SV_VertexID)
Varyings Vert(uint vertexID : SV_VertexID)
{
uint quadIndex = vertexID / 6;
uint quadVertex = vertexID - quadIndex * 6;

float2 clipCoord = (pixelCoord / _ScreenParams.xy) * 2.0 - 1.0;
clipCoord.y *= -1;
VSOut Out;
Out.Pos = float4(clipCoord, 0, 1.0);
Out.Variant = variant;
return Out;
Varyings output;
output.positionCS = float4(clipCoord, 0, 1.0);
output.variant = variant;
return output;
VSOut Vert(float3 positionOS : POSITION)
Varyings Vert(Attributes input)
VSOut Out;
Out.Pos = TransformWorldToHClip(TransformObjectToWorld(positionOS));
Out.Variant = 0;
return Out;
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
output.variant = 0; // unused
return output;
}
#endif

return color;
}
float4 Frag(float4 positionCS : SV_POSITION, int Variant : TEXCOORD0) : SV_Target
float4 Frag(Varyings input) : SV_Target
PositionInputs posInput = GetPositionInput(positionCS.xy, _ScreenSize.zw, uint2(positionCS.xy) / GetTileSize());
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);

n += count;
}
}
if(n == 0) n = -1;
if (n == 0)
n = -1;
n = Variant;
n = input.variant;
#endif
float4 result = float4(0.0, 0.0, 0.0, 0.0);

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/LightingDebugPanel.cs


m_DebugPanel.GetDebugItem(DebugDisplaySettings.kFullScreenDebugMode).handler.OnEditorGUI();
m_DebugPanel.GetDebugItem(DebugDisplaySettings.kTileDebug).handler.OnEditorGUI();
DebugItem displaySkyReflecItem = m_DebugPanel.GetDebugItem(DebugDisplaySettings.kDisplaySkyReflectionDebug);
displaySkyReflecItem.handler.OnEditorGUI();
if ((bool)displaySkyReflecItem.GetValue())

18
Assets/ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs


// Tile pass Settings
public readonly GUIContent tileLightLoopSettings = new GUIContent("Tile Light Loop Settings");
public readonly GUIContent enableTileAndCluster = new GUIContent("Enable tile/clustered", "Toggle");
public readonly GUIContent enableSplitLightEvaluation = new GUIContent("Split light and reflection evaluation", "Toggle");
public readonly GUIContent enableComputeLightEvaluation = new GUIContent("Enable Compute Light Evaluation", "Toggle");
public readonly GUIContent enableComputeLightVariants = new GUIContent("Enable Compute Light Variants", "Toggle");
public readonly GUIContent enableComputeMaterialVariants = new GUIContent("Enable Compute Material Variants", "Toggle");

SerializedProperty m_enableClustered;
SerializedProperty m_enableFptlForOpaqueWhenClustered;
SerializedProperty m_enableBigTilePrepass;
SerializedProperty m_tileDebugByCategory;
// Rendering Settings
SerializedProperty m_RenderingUseForwardOnly = null;

// Tile settings
m_enableTileAndCluster = FindProperty(x => x.tileSettings.enableTileAndCluster);
m_enableSplitLightEvaluation = FindProperty(x => x.tileSettings.enableSplitLightEvaluation);
m_enableComputeLightEvaluation = FindProperty(x => x.tileSettings.enableComputeLightEvaluation);
m_enableComputeLightVariants = FindProperty(x => x.tileSettings.enableComputeLightVariants);
m_enableComputeMaterialVariants = FindProperty(x => x.tileSettings.enableComputeMaterialVariants);

m_tileDebugByCategory = FindProperty(x => x.tileSettings.tileDebugByCategory);
// Shadow settings
m_ShadowAtlasWidth = FindProperty(x => x.shadowInitParams.shadowAtlasWidth);

EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(m_enableTileAndCluster, styles.enableTileAndCluster);
EditorGUILayout.PropertyField(m_enableSplitLightEvaluation, styles.enableSplitLightEvaluation);
EditorGUILayout.PropertyField(m_enableComputeLightEvaluation, styles.enableComputeLightEvaluation);
EditorGUILayout.PropertyField(m_enableComputeLightVariants, styles.enableComputeLightVariants);
EditorGUILayout.PropertyField(m_enableComputeMaterialVariants, styles.enableComputeMaterialVariants);

EditorGUILayout.PropertyField(m_tileDebugByCategory, styles.tileDebugByCategory);
if (EditorGUI.EndChangeCheck())
{

EditorGUILayout.Space();
EditorGUILayout.LabelField(styles.renderingSettingsLabel);
EditorGUI.indentLevel++;
EditorGUI.BeginChangeCheck();
if (EditorGUI.EndChangeCheck())
{
if (m_RenderingUseForwardOnly.boolValue && !m_RenderingUseDepthPrepass.boolValue)
{
// Force depth prepass for forward-only rendering (for FPTL, etc).
m_RenderingUseDepthPrepass.boolValue = true;
HackSetDirty(renderContext); // Repaint
}
}
EditorGUI.indentLevel--;
}

655
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


using UnityEngine.Rendering;
using System;
using System.Linq;
using UnityEngine.Experimental.PostProcessing;
using UnityEngine.Rendering.PostProcessing;
using UnityEngine.Experimental.Rendering.HDPipeline.TilePass;
#if UNITY_EDITOR

{
public Matrix4x4 viewMatrix;
public Matrix4x4 projMatrix;
public Matrix4x4 nonJitteredProjMatrix;
public Vector4[] frustumPlaneEquations;
public Camera camera;
public Matrix4x4 viewProjMatrix

public Matrix4x4 nonJitteredViewProjMatrix
{
get { return nonJitteredProjMatrix * viewMatrix; }
}
public Vector4 invProjParam
{
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.

// View-projection matrix from the previous frame.
public Matrix4x4 prevViewProjMatrix;
// We need to keep track of these when camera relative rendering is enabled so we can take
// camera translation into account when generating camera motion vectors
public Vector3 cameraPos;
public Vector3 prevCameraPos;
// The only way to reliably keep track of a frame change right now is to compare the frame
// count Unity gives us. We need this as a single camera could be rendered several times per
// frame and some matrices only have to be computed once. Realistically this shouldn't

// avoid one-frame jumps/hiccups with temporal effects (motion blur, TAA...)
bool m_FirstFrame;
public HDCamera(Camera camera)
public HDCamera(Camera cam)
this.camera = camera;
camera = cam;
frustumPlaneEquations = new Vector4[6];
public void Update()
public void Update(PostProcessLayer postProcessLayer)
// If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
// non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
bool taaEnabled = camera.cameraType == CameraType.Game
&& Utilities.IsTemporalAntialiasingActive(postProcessLayer);
Matrix4x4 nonJitteredCameraProj = camera.projectionMatrix;
Matrix4x4 cameraProj = taaEnabled
? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
: nonJitteredCameraProj;
var gpuProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true); // Had to change this from 'false'
var gpuVP = gpuProj * camera.worldToCameraMatrix;
Matrix4x4 gpuProj = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
Matrix4x4 gpuView = camera.worldToCameraMatrix;
Matrix4x4 gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);
// A camera could be rendered multiple time per frame, only updates the previous viewproj if needed
Vector3 pos = camera.transform.position;
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// Zero out the translation component.
gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
}
Matrix4x4 gpuVP = gpuNonJitteredProj * gpuView;
// A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
prevViewProjMatrix = !m_FirstFrame
? viewProjMatrix
: gpuVP;
if (m_FirstFrame)
{
prevCameraPos = pos;
prevViewProjMatrix = gpuVP;
}
else
{
prevCameraPos = cameraPos;
prevViewProjMatrix = nonJitteredViewProjMatrix;
}
viewMatrix = camera.worldToCameraMatrix;
viewMatrix = gpuView;
nonJitteredProjMatrix = gpuNonJitteredProj;
cameraPos = pos;
Plane[] planes = GeometryUtility.CalculateFrustumPlanes(viewProjMatrix);
for (int i = 0; i < 6; i++)
{
frustumPlaneEquations[i] = new Vector4(planes[i].normal.x, planes[i].normal.y, planes[i].normal.z, planes[i].distance);
}
m_LastFrameActive = Time.frameCount;
}

static List<Camera> m_Cleanup = new List<Camera>(); // Recycled to reduce GC pressure
// Grab the HDCamera tied to a given Camera and update it.
public static HDCamera Get(Camera camera)
public static HDCamera Get(Camera camera, PostProcessLayer postProcessLayer)
{
HDCamera hdcam;

m_Cameras.Add(camera, hdcam);
}
hdcam.Update();
hdcam.Update(postProcessLayer);
return hdcam;
}

public void SetupGlobalParams(CommandBuffer cmd)
{
cmd.SetGlobalMatrix("_ViewMatrix", viewMatrix);
cmd.SetGlobalMatrix("_InvViewMatrix", viewMatrix.inverse);
cmd.SetGlobalMatrix("_ProjMatrix", projMatrix);
cmd.SetGlobalMatrix("_InvProjMatrix", projMatrix.inverse);
cmd.SetGlobalMatrix("_ViewProjMatrix", viewProjMatrix);
cmd.SetGlobalMatrix("_InvViewProjMatrix", viewProjMatrix.inverse);
cmd.SetGlobalVector("_InvProjParam", invProjParam);
cmd.SetGlobalVector("_ScreenSize", screenSize);
cmd.SetGlobalMatrix("_PrevViewProjMatrix", prevViewProjMatrix);
cmd.SetGlobalMatrix("_ViewMatrix", viewMatrix);
cmd.SetGlobalMatrix("_InvViewMatrix", viewMatrix.inverse);
cmd.SetGlobalMatrix("_ProjMatrix", projMatrix);
cmd.SetGlobalMatrix("_InvProjMatrix", projMatrix.inverse);
cmd.SetGlobalMatrix("_NonJitteredViewProjMatrix", nonJitteredViewProjMatrix);
cmd.SetGlobalMatrix("_ViewProjMatrix", viewProjMatrix);
cmd.SetGlobalMatrix("_InvViewProjMatrix", viewProjMatrix.inverse);
cmd.SetGlobalVector("_InvProjParam", invProjParam);
cmd.SetGlobalVector("_ScreenSize", screenSize);
cmd.SetGlobalMatrix("_PrevViewProjMatrix", prevViewProjMatrix);
cmd.SetGlobalVectorArray("_FrustumPlanes", frustumPlaneEquations);
material.SetMatrix("_ViewMatrix", viewMatrix);
material.SetMatrix("_InvViewMatrix", viewMatrix.inverse);
material.SetMatrix("_ProjMatrix", projMatrix);
material.SetMatrix("_InvProjMatrix", projMatrix.inverse);
material.SetMatrix("_ViewProjMatrix", viewProjMatrix);
material.SetMatrix("_InvViewProjMatrix", viewProjMatrix.inverse);
material.SetVector("_InvProjParam", invProjParam);
material.SetVector("_ScreenSize", screenSize);
material.SetMatrix("_PrevViewProjMatrix", prevViewProjMatrix);
material.SetMatrix("_ViewMatrix", viewMatrix);
material.SetMatrix("_InvViewMatrix", viewMatrix.inverse);
material.SetMatrix("_ProjMatrix", projMatrix);
material.SetMatrix("_InvProjMatrix", projMatrix.inverse);
material.SetMatrix("_NonJitteredViewProjMatrix", nonJitteredViewProjMatrix);
material.SetMatrix("_ViewProjMatrix", viewProjMatrix);
material.SetMatrix("_InvViewProjMatrix", viewProjMatrix.inverse);
material.SetVector("_InvProjParam", invProjParam);
material.SetVector("_ScreenSize", screenSize);
material.SetMatrix("_PrevViewProjMatrix", prevViewProjMatrix);
material.SetVectorArray("_FrustumPlanes", frustumPlaneEquations);
Utilities.SetMatrixCS(cmd, cs, "_ViewMatrix", viewMatrix);
Utilities.SetMatrixCS(cmd, cs, "_InvViewMatrix", viewMatrix.inverse);
Utilities.SetMatrixCS(cmd, cs, "_ProjMatrix", projMatrix);
Utilities.SetMatrixCS(cmd, cs, "_InvProjMatrix", projMatrix.inverse);
Utilities.SetMatrixCS(cmd, cs, "_ViewProjMatrix", viewProjMatrix);
Utilities.SetMatrixCS(cmd, cs, "_InvViewProjMatrix", viewProjMatrix.inverse);
cmd.SetComputeVectorParam( cs, "_InvProjParam", invProjParam);
cmd.SetComputeVectorParam( cs, "_ScreenSize", screenSize);
Utilities.SetMatrixCS(cmd, cs, "_PrevViewProjMatrix", prevViewProjMatrix);
cmd.SetComputeMatrixParam(cs, "_ViewMatrix", viewMatrix);
cmd.SetComputeMatrixParam(cs, "_InvViewMatrix", viewMatrix.inverse);
cmd.SetComputeMatrixParam(cs, "_ProjMatrix", projMatrix);
cmd.SetComputeMatrixParam(cs, "_InvProjMatrix", projMatrix.inverse);
cmd.SetComputeMatrixParam(cs, "_NonJitteredViewProjMatrix", nonJitteredViewProjMatrix);
cmd.SetComputeMatrixParam(cs, "_ViewProjMatrix", viewProjMatrix);
cmd.SetComputeMatrixParam(cs, "_InvViewProjMatrix", viewProjMatrix.inverse);
cmd.SetComputeVectorParam(cs, "_InvProjParam", invProjParam);
cmd.SetComputeVectorParam(cs, "_ScreenSize", screenSize);
cmd.SetComputeMatrixParam(cs, "_PrevViewProjMatrix", prevViewProjMatrix);
cmd.SetComputeVectorArrayParam(cs, "_FrustumPlanes", frustumPlaneEquations);
}
}

readonly GBufferManager m_gbufferManager = new GBufferManager();
Material m_CopyStencilBuffer;
// Various set of material use in render loop
Material m_FilterAndCombineSubsurfaceScattering;
// Old SSS Model >>>

readonly RenderTargetIdentifier m_DistortionBufferRT;
private RenderTexture m_CameraDepthStencilBuffer = null;
private RenderTexture m_CameraDepthStencilBufferCopy = null;
private RenderTexture m_CameraDepthBufferCopy = null;
private RenderTexture m_CameraStencilBufferCopy = null; // Currently, it's manually copied using a pixel shader, and optimized to only contain the SSS bit
private RenderTargetIdentifier m_CameraDepthStencilBufferCopyRT;
private RenderTargetIdentifier m_CameraDepthBufferCopyRT;
private RenderTargetIdentifier m_CameraStencilBufferCopyRT;
// Post-processing context and screen-space effects (recycled on every frame to avoid GC alloc)
readonly PostProcessRenderContext m_PostProcessContext;

// Currently we use only 2 bits to identify the kind of lighting that is expected from the render pipeline
// Usage is define in LightDefinitions.cs
[Flags]
public enum StencilBits
public enum StencilBitMask
Lighting = 3, // 0
All = 255 // 0xFF
Clear = 0, // 0x0
Lighting = 3, // 0x3 - 2 bit
All = 255 // 0xFF - 8 bit
}
// Detect when windows size is changing

CreateSssMaterials(sssSettings.useDisneySSS);
// <<< Old SSS Model
m_CopyStencilBuffer = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/CopyStencilBuffer");
m_CameraMotionVectorsMaterial = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/CameraMotionVectors");
InitializeDebugMaterials();

};
#endif
void CreateDepthBuffer(Camera camera)
void CreateDepthStencilBuffer(Camera camera)
{
if (m_CameraDepthStencilBuffer != null)
{

if (NeedDepthBufferCopy())
{
if (m_CameraDepthStencilBufferCopy != null)
if (m_CameraDepthBufferCopy != null)
m_CameraDepthStencilBufferCopy.Release();
m_CameraDepthBufferCopy.Release();
m_CameraDepthStencilBufferCopy = new RenderTexture(camera.pixelWidth, camera.pixelHeight, 24, RenderTextureFormat.Depth);
m_CameraDepthStencilBufferCopy.filterMode = FilterMode.Point;
m_CameraDepthStencilBufferCopy.Create();
m_CameraDepthStencilBufferCopyRT = new RenderTargetIdentifier(m_CameraDepthStencilBufferCopy);
m_CameraDepthBufferCopy = new RenderTexture(camera.pixelWidth, camera.pixelHeight, 24, RenderTextureFormat.Depth);
m_CameraDepthBufferCopy.filterMode = FilterMode.Point;
m_CameraDepthBufferCopy.Create();
m_CameraDepthBufferCopyRT = new RenderTargetIdentifier(m_CameraDepthBufferCopy);
}
if (NeedStencilBufferCopy())
{
if (m_CameraStencilBufferCopy != null)
{
m_CameraStencilBufferCopy.Release();
}
m_CameraStencilBufferCopy = new RenderTexture(camera.pixelWidth, camera.pixelHeight, 0, RenderTextureFormat.R8);
m_CameraStencilBufferCopy.filterMode = FilterMode.Point;
m_CameraStencilBufferCopy.Create();
m_CameraStencilBufferCopyRT = new RenderTargetIdentifier(m_CameraStencilBufferCopy);
}
}

if (resolutionChanged || m_CameraDepthStencilBuffer == null)
{
CreateDepthBuffer(camera);
CreateDepthStencilBuffer(camera);
}
if (resolutionChanged || m_LightLoop.NeedResize())

m_CurrentHeight = camera.pixelHeight;
}
public void PushGlobalParams(HDCamera hdCamera, ScriptableRenderContext renderContext, SubsurfaceScatteringSettings sssParameters)
public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd, SubsurfaceScatteringSettings sssParameters)
var cmd = CommandBufferPool.Get("Push Global Parameters");
using (new Utilities.ProfilingSample("Push Global Parameters", cmd))
{
hdCamera.SetupGlobalParams(cmd);
hdCamera.SetupGlobalParams(cmd);
// TODO: cmd.SetGlobalInt() does not exist, so we are forced to use Shader.SetGlobalInt() instead.
// TODO: cmd.SetGlobalInt() does not exist, so we are forced to use Shader.SetGlobalInt() instead.
if (m_SkyManager.IsSkyValid())
{
m_SkyManager.SetGlobalSkyTexture();
Shader.SetGlobalInt("_EnvLightSkyEnabled", 1);
}
else
{
Shader.SetGlobalInt("_EnvLightSkyEnabled", 0);
}
if (m_SkyManager.IsSkyValid())
{
m_SkyManager.SetGlobalSkyTexture();
Shader.SetGlobalInt("_EnvLightSkyEnabled", 1);
}
else
{
Shader.SetGlobalInt("_EnvLightSkyEnabled", 0);
// Broadcast SSS parameters to all shaders.
Shader.SetGlobalInt( "_EnableSSSAndTransmission", m_DebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission ? 1 : 0);
Shader.SetGlobalInt( "_TexturingModeFlags", (int)sssParameters.texturingModeFlags);
Shader.SetGlobalInt( "_TransmissionFlags", (int)sssParameters.transmissionFlags);
cmd.SetGlobalVectorArray( "_ThicknessRemaps", sssParameters.thicknessRemaps);
// We are currently supporting two different SSS mode: Jimenez (with 2-Gaussian profile) and Disney
// We have added the ability to switch between each other for subsurface scattering, but for transmittance this is more tricky as we need to add
// shader variant for forward, gbuffer and deferred shader. We want to avoid this.
// So for transmittance we use Disney profile formulation (that we know is more correct) in both case, and in the case of Jimenez we hack the parameters with 2-Gaussian parameters (Ideally we should fit but haven't find good fit) so it approximately match.
// Note: Jimenez SSS is in cm unit whereas Disney is in mm unit making an inconsistency here to compare model side by side
cmd.SetGlobalVectorArray("_ShapeParams", sssParameters.useDisneySSS ? sssParameters.shapeParams : sssParameters.halfRcpWeightedVariances);
cmd.SetGlobalVectorArray("_TransmissionTints", sssParameters.transmissionTints);
// Broadcast SSS parameters to all shaders.
Shader.SetGlobalInt( "_EnableSSSAndTransmission", m_DebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission ? 1 : 0);
Shader.SetGlobalInt( "_TexturingModeFlags", (int)sssParameters.texturingModeFlags);
Shader.SetGlobalInt( "_TransmissionFlags", (int)sssParameters.transmissionFlags);
cmd.SetGlobalFloatArray( "_ThicknessRemaps", sssParameters.thicknessRemaps);
// We are currently supporting two different SSS mode: Jimenez (with 2-Gaussian profile) and Disney
// We have added the ability to switch between each other for subsurface scattering, but for transmittance this is more tricky as we need to add
// shader variant for forward, gbuffer and deferred shader. We want to avoid this.
// So for transmittance we use Disney profile formulation (that we know is more correct) in both case, and in the case of Jimenez we hack the parameters with 2-Gaussian parameters (Ideally we should fit but haven't find good fit) so it approximately match.
// Note: Jimenez SSS is in cm unit whereas Disney is in mm unit making an inconsistency here to compare model side by side
cmd.SetGlobalVectorArray("_ShapeParams", sssParameters.useDisneySSS ? sssParameters.shapeParams : sssParameters.halfRcpWeightedVariances);
cmd.SetGlobalVectorArray("_TransmissionTints", sssParameters.transmissionTints);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
bool NeedDepthBufferCopy()

}
Texture GetDepthTexture()
bool NeedStencilBufferCopy()
if (NeedDepthBufferCopy())
return m_CameraDepthStencilBufferCopy;
else
return m_CameraDepthStencilBuffer;
// Currently, Unity does not offer a way to bind the stencil buffer as a texture in a compute shader.
// Therefore, it's manually copied using a pixel shader, and optimized to only contain the SSS bit.
return m_DebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission;
private void CopyDepthBufferIfNeeded(ScriptableRenderContext renderContext)
RenderTargetIdentifier GetDepthTexture()
var cmd = CommandBufferPool.Get(NeedDepthBufferCopy() ? "Copy DepthBuffer" : "Set DepthBuffer");
return NeedDepthBufferCopy() ? m_CameraDepthBufferCopy : m_CameraDepthStencilBuffer;
}
if (NeedDepthBufferCopy())
RenderTargetIdentifier GetStencilTexture()
{
return NeedStencilBufferCopy() ? m_CameraStencilBufferCopyRT : m_CameraDepthStencilBufferRT;
}
private void CopyDepthBufferIfNeeded(CommandBuffer cmd)
{
using (new Utilities.ProfilingSample(NeedDepthBufferCopy() ? "Copy DepthBuffer" : "Set DepthBuffer", cmd))
{
if (NeedDepthBufferCopy())
{
using (new Utilities.ProfilingSample("Copy depth-stencil buffer", cmd))
{
cmd.CopyTexture(m_CameraDepthStencilBufferRT, m_CameraDepthBufferCopyRT);
}
}
cmd.SetGlobalTexture("_MainDepthTexture", GetDepthTexture());
}
}
private void PrepareAndBindStencilTexture(CommandBuffer cmd)
{
if (NeedStencilBufferCopy())
using (new Utilities.ProfilingSample("Copy depth-stencil buffer", renderContext))
using (new Utilities.ProfilingSample("Copy StencilBuffer", cmd))
cmd.CopyTexture(m_CameraDepthStencilBufferRT, m_CameraDepthStencilBufferCopyRT);
Utilities.DrawFullScreen(cmd, m_CopyStencilBuffer, m_CameraStencilBufferCopyRT, m_CameraDepthStencilBufferRT);
cmd.SetGlobalTexture("_MainDepthTexture", GetDepthTexture());
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
cmd.SetGlobalTexture("_StencilTexture", GetStencilTexture());
}
public void UpdateCommonSettings()

GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;
m_MaterialList.ForEach(material => material.RenderInit(renderContext));
// This is the main command buffer used for the frame.
CommandBuffer cmd = CommandBufferPool.Get("");
m_MaterialList.ForEach(material => material.RenderInit(cmd));
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame();

renderContext.SetupCameraProperties(camera);
var hdCamera = HDCamera.Get(camera);
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
HDCamera hdCamera = HDCamera.Get(camera, postProcessLayer);
PushGlobalParams(hdCamera, cmd, m_Asset.sssSettings);
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode

// TODO: Add another path dedicated to planar reflection / real time cubemap that implement simpler lighting
string passName = "Forward"; // It is up to the users to only send unlit object for this camera path
using (new Utilities.ProfilingSample(passName, renderContext))
using (new Utilities.ProfilingSample(passName, cmd))
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor | ClearFlag.ClearDepth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, passName);
RenderTransparentRenderList(m_CullResults, camera, renderContext, passName);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor | ClearFlag.ClearDepth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, cmd, passName);
RenderTransparentRenderList(m_CullResults, camera, renderContext, cmd, passName);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
InitAndClearBuffer(camera, renderContext);
PushGlobalParams(hdCamera, renderContext, m_Asset.sssSettings);
InitAndClearBuffer(camera, cmd);
RenderDepthPrepass(m_CullResults, camera, renderContext);
RenderDepthPrepass(m_CullResults, camera, renderContext, cmd);
RenderForwardOnlyOpaqueDepthPrepass(m_CullResults, camera, renderContext);
RenderGBuffer(m_CullResults, camera, renderContext);
RenderForwardOnlyOpaqueDepthPrepass(m_CullResults, camera, renderContext, cmd);
RenderGBuffer(m_CullResults, camera, renderContext, cmd);
CopyDepthBufferIfNeeded(renderContext);
CopyDepthBufferIfNeeded(cmd);
// Required for the SSS pass.
PrepareAndBindStencilTexture(cmd);
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext);
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext, cmd);
using (new Utilities.ProfilingSample("Build Light list and render shadows", renderContext))
using (new Utilities.ProfilingSample("Build Light list and render shadows", cmd))
m_SsaoEffect.Render(ssaoSettingsToUse, this, hdCamera, renderContext, m_Asset.renderingSettings.useForwardRenderingOnly);
m_SsaoEffect.Render(ssaoSettingsToUse, this, hdCamera, renderContext, cmd, m_Asset.renderingSettings.useForwardRenderingOnly);
m_LightLoop.RenderShadows(renderContext, m_CullResults);
m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
m_LightLoop.BuildGPULightLists(camera, renderContext, m_CameraDepthStencilBufferRT);
m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT);
UpdateSkyEnvironment(hdCamera, renderContext);
UpdateSkyEnvironment(hdCamera, cmd);
RenderDeferredLighting(hdCamera, renderContext);
RenderDeferredLighting(hdCamera, cmd);
CombineSubsurfaceScattering(hdCamera, renderContext, m_Asset.sssSettings);
CombineSubsurfaceScattering(hdCamera, cmd, m_Asset.sssSettings);
RenderForward(m_CullResults, camera, renderContext, true); // Render deferred or forward opaque
RenderForwardOnlyOpaque(m_CullResults, camera, renderContext);
RenderForward(m_CullResults, camera, renderContext, cmd, true); // Render deferred or forward opaque
RenderForwardOnlyOpaque(m_CullResults, camera, renderContext, cmd);
RenderLightingDebug(hdCamera, renderContext, m_CameraColorBufferRT);
RenderLightingDebug(hdCamera, cmd, m_CameraColorBufferRT, m_DebugDisplaySettings);
CopyDepthBufferIfNeeded(renderContext);
CopyDepthBufferIfNeeded(cmd);
RenderSky(hdCamera, renderContext);
RenderSky(hdCamera, cmd);
RenderForward(m_CullResults, camera, renderContext, false);
RenderForward(m_CullResults, camera, renderContext, cmd, false);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, camera, renderContext, FullScreenDebugMode.NanTracker);
// Simple blit
var cmd = CommandBufferPool.Get("Blit to final RT" );
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
using (new Utilities.ProfilingSample("Blit to final RT", cmd))
{
// Simple blit
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
}
RenderVelocity(m_CullResults, hdCamera, renderContext); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
RenderVelocity(m_CullResults, hdCamera, renderContext, cmd); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
RenderDistortion(m_CullResults, camera, renderContext);
RenderDistortion(m_CullResults, camera, renderContext, cmd);
RenderPostProcesses(camera, renderContext);
RenderPostProcesses(camera, cmd, postProcessLayer);
RenderDebug(hdCamera, renderContext);
RenderDebug(hdCamera, cmd);
var cmd = CommandBufferPool.Get();
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
void RenderOpaqueRenderList(CullResults cull, Camera camera, ScriptableRenderContext renderContext, string passName, RendererConfiguration rendererConfiguration = 0)
void RenderOpaqueRenderList(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd, string passName, RendererConfiguration rendererConfiguration = 0)
// This is done here because DrawRenderers API lives outside command buffers so we need to make call this before doing any DrawRenders
renderContext.ExecuteCommandBuffer(cmd);
cmd.Clear();
var settings = new DrawRendererSettings(cull, camera, new ShaderPassName(passName))
{

renderContext.DrawRenderers(ref settings);
}
void RenderTransparentRenderList(CullResults cull, Camera camera, ScriptableRenderContext renderContext, string passName, RendererConfiguration rendererConfiguration = 0)
void RenderTransparentRenderList(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd, string passName, RendererConfiguration rendererConfiguration = 0)
// This is done here because DrawRenderers API lives outside command buffers so we need to make call this before doing any DrawRenders
renderContext.ExecuteCommandBuffer(cmd);
cmd.Clear();
var settings = new DrawRendererSettings(cull, camera, new ShaderPassName(passName))
{

renderContext.DrawRenderers(ref settings);
}
void RenderDepthPrepass(CullResults cull, Camera camera, ScriptableRenderContext renderContext)
void RenderDepthPrepass(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
// If we are forward only we will do a depth prepass
// TODO: Depth prepass should be enabled based on light loop settings. LightLoop define if they need a depth prepass + forward only...

using (new Utilities.ProfilingSample("Depth Prepass", renderContext))
using (new Utilities.ProfilingSample("Depth Prepass", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraDepthStencilBufferRT);
RenderOpaqueRenderList(cull, camera, renderContext, "DepthOnly");
Utilities.SetRenderTarget(cmd, m_CameraDepthStencilBufferRT);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, "DepthOnly");
void RenderGBuffer(CullResults cull, Camera camera, ScriptableRenderContext renderContext)
void RenderGBuffer(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
{

string passName = m_DebugDisplaySettings.IsDebugDisplayEnabled() ? "GBufferDebugDisplay" : "GBuffer";
using (new Utilities.ProfilingSample(passName, renderContext))
using (new Utilities.ProfilingSample(passName, cmd))
Utilities.SetRenderTarget(renderContext, m_gbufferManager.GetGBuffers(), m_CameraDepthStencilBufferRT);
Utilities.SetRenderTarget(cmd, m_gbufferManager.GetGBuffers(), m_CameraDepthStencilBufferRT);
RenderOpaqueRenderList(cull, camera, renderContext, passName, Utilities.kRendererConfigurationBakedLighting);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, passName, Utilities.kRendererConfigurationBakedLighting);
void RenderForwardOnlyOpaqueDepthPrepass(CullResults cull, Camera camera, ScriptableRenderContext renderContext)
void RenderForwardOnlyOpaqueDepthPrepass(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
// If we are forward only we don't need to render ForwardOnlyOpaqueDepthOnly object
// But in case we request a prepass we render it

using (new Utilities.ProfilingSample("Forward opaque depth", renderContext))
using (new Utilities.ProfilingSample("Forward opaque depth", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraDepthStencilBufferRT);
RenderOpaqueRenderList(cull, camera, renderContext, "ForwardOnlyOpaqueDepthOnly");
Utilities.SetRenderTarget(cmd, m_CameraDepthStencilBufferRT);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, "ForwardOnlyOpaqueDepthOnly");
void RenderDebugViewMaterial(CullResults cull, HDCamera hdCamera, ScriptableRenderContext renderContext)
void RenderDebugViewMaterial(CullResults cull, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
using (new Utilities.ProfilingSample("DisplayDebug ViewMaterial", renderContext))
using (new Utilities.ProfilingSample("DisplayDebug ViewMaterial", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, Utilities.kClearAll, Color.black);
RenderOpaqueRenderList(cull, hdCamera.camera, renderContext, "ForwardDisplayDebug", Utilities.kRendererConfigurationBakedLighting);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, Utilities.kClearAll, Color.black);
RenderOpaqueRenderList(cull, hdCamera.camera, renderContext, cmd, "ForwardDisplayDebug", Utilities.kRendererConfigurationBakedLighting);
// TODO: Bind depth textures
var cmd = CommandBufferPool.Get("DebugViewMaterialGBuffer" );
cmd.Blit(null, m_CameraColorBufferRT, m_DebugViewMaterialGBuffer, 0);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
using (new Utilities.ProfilingSample("DebugViewMaterialGBuffer", cmd))
{
// TODO: Bind depth textures
cmd.Blit(null, m_CameraColorBufferRT, m_DebugViewMaterialGBuffer, 0);
}
RenderTransparentRenderList(cull, hdCamera.camera, renderContext, "ForwardDisplayDebug", Utilities.kRendererConfigurationBakedLighting);
RenderTransparentRenderList(cull, hdCamera.camera, renderContext, cmd, "ForwardDisplayDebug", Utilities.kRendererConfigurationBakedLighting);
var cmd = CommandBufferPool.Get("Blit DebugView Material Debug");
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
using (new Utilities.ProfilingSample("Blit DebugView Material Debug", cmd))
{
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
}
void RenderDeferredLighting(HDCamera hdCamera, ScriptableRenderContext renderContext)
void RenderDeferredLighting(HDCamera hdCamera, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
{

RenderTargetIdentifier[] colorRTs = { m_CameraColorBufferRT, m_CameraSubsurfaceBufferRT };
RenderTargetIdentifier[] colorRTs = { m_CameraColorBufferRT, m_CameraSubsurfaceBufferRT };
RenderTargetIdentifier depthTexture = GetDepthTexture();
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, m_DebugDisplaySettings, colorRTs, m_CameraDepthStencilBufferRT, new RenderTargetIdentifier(GetDepthTexture()), true);
m_LightLoop.RenderDeferredLighting(hdCamera, cmd, m_DebugDisplaySettings, colorRTs, m_CameraDepthStencilBufferRT, depthTexture, true);
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, m_DebugDisplaySettings, colorRTs, m_CameraDepthStencilBufferRT, new RenderTargetIdentifier(GetDepthTexture()), false);
m_LightLoop.RenderDeferredLighting(hdCamera, cmd, m_DebugDisplaySettings, colorRTs, m_CameraDepthStencilBufferRT, depthTexture, false);
void CombineSubsurfaceScattering(HDCamera hdCamera, ScriptableRenderContext context, SubsurfaceScatteringSettings sssParameters)
void CombineSubsurfaceScattering(HDCamera hdCamera, CommandBuffer cmd, SubsurfaceScatteringSettings sssParameters)
var cmd = CommandBufferPool.Get("Subsurface Scattering");
if (sssSettings.useDisneySSS)
using (new Utilities.ProfilingSample("Subsurface Scattering", cmd))
cmd.SetGlobalTexture("_IrradianceSource", m_CameraSubsurfaceBufferRT); // Cannot set a RT on a material
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_FilterKernelsNearField", sssParameters.filterKernelsNearField);
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_FilterKernelsFarField", sssParameters.filterKernelsFarField);
if (sssSettings.useDisneySSS)
{
cmd.SetGlobalTexture("_IrradianceSource", m_CameraSubsurfaceBufferRT); // Cannot set a RT on a material
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_FilterKernelsNearField", sssParameters.filterKernelsNearField);
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_FilterKernelsFarField", sssParameters.filterKernelsFarField);
Utilities.DrawFullScreen(cmd, m_FilterAndCombineSubsurfaceScattering, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
}
else
{
// Perform the vertical SSS filtering pass.
cmd.SetGlobalTexture("_IrradianceSource", m_CameraSubsurfaceBufferRT); // Cannot set a RT on a material
m_FilterSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterSubsurfaceScattering.SetVectorArray("_FilterKernelsBasic", sssParameters.filterKernelsBasic);
m_FilterSubsurfaceScattering.SetVectorArray("_HalfRcpWeightedVariances", sssParameters.halfRcpWeightedVariances);
Utilities.DrawFullScreen(cmd, m_FilterSubsurfaceScattering, m_CameraFilteringBufferRT, m_CameraDepthStencilBufferRT);
Utilities.DrawFullScreen(cmd, m_FilterAndCombineSubsurfaceScattering, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
}
else
{
// Perform the vertical SSS filtering pass.
cmd.SetGlobalTexture("_IrradianceSource", m_CameraSubsurfaceBufferRT); // Cannot set a RT on a material
m_FilterSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterSubsurfaceScattering.SetVectorArray("_FilterKernelsBasic", sssParameters.filterKernelsBasic);
m_FilterSubsurfaceScattering.SetVectorArray("_HalfRcpWeightedVariances", sssParameters.halfRcpWeightedVariances);
Utilities.DrawFullScreen(cmd, m_FilterSubsurfaceScattering, m_CameraFilteringBufferRT, m_CameraDepthStencilBufferRT);
// Perform the horizontal SSS filtering pass, and combine diffuse and specular lighting.
cmd.SetGlobalTexture("_IrradianceSource", m_CameraFilteringBufferRT); // Cannot set a RT on a material
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterAndCombineSubsurfaceScattering.SetVectorArray("_FilterKernelsBasic", sssParameters.filterKernelsBasic);
m_FilterAndCombineSubsurfaceScattering.SetVectorArray("_HalfRcpWeightedVariances", sssParameters.halfRcpWeightedVariances);
Utilities.DrawFullScreen(cmd, m_FilterAndCombineSubsurfaceScattering, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
// Perform the horizontal SSS filtering pass, and combine diffuse and specular lighting.
cmd.SetGlobalTexture("_IrradianceSource", m_CameraFilteringBufferRT); // Cannot set a RT on a material
m_FilterAndCombineSubsurfaceScattering.SetFloatArray("_WorldScales", sssParameters.worldScales);
m_FilterAndCombineSubsurfaceScattering.SetVectorArray("_FilterKernelsBasic", sssParameters.filterKernelsBasic);
m_FilterAndCombineSubsurfaceScattering.SetVectorArray("_HalfRcpWeightedVariances", sssParameters.halfRcpWeightedVariances);
Utilities.DrawFullScreen(cmd, m_FilterAndCombineSubsurfaceScattering, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
void UpdateSkyEnvironment(HDCamera hdCamera, ScriptableRenderContext renderContext)
void UpdateSkyEnvironment(HDCamera hdCamera, CommandBuffer cmd)
m_SkyManager.UpdateEnvironment(hdCamera,m_LightLoop.GetCurrentSunLight(), renderContext);
m_SkyManager.UpdateEnvironment(hdCamera,m_LightLoop.GetCurrentSunLight(), cmd);
void RenderSky(HDCamera hdCamera, ScriptableRenderContext renderContext)
void RenderSky(HDCamera hdCamera, CommandBuffer cmd)
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, renderContext);
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, cmd);
}
public Texture2D ExportSkyToTexture()

void RenderLightingDebug(HDCamera camera, ScriptableRenderContext renderContext, RenderTargetIdentifier colorBuffer)
void RenderLightingDebug(HDCamera camera, CommandBuffer cmd, RenderTargetIdentifier colorBuffer, DebugDisplaySettings debugDisplaySettings)
m_LightLoop.RenderLightingDebug(camera, renderContext, colorBuffer);
m_LightLoop.RenderLightingDebug(camera, cmd, colorBuffer, debugDisplaySettings);
void RenderForward(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext, bool renderOpaque)
void RenderForward(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd, bool renderOpaque)
{
// TODO: Currently we can't render opaque object forward when deferred is enabled
// miss option

string passName = m_DebugDisplaySettings.IsDebugDisplayEnabled() ? "ForwardDisplayDebug" : "Forward";
using (new Utilities.ProfilingSample(passName, renderContext))
using (new Utilities.ProfilingSample(passName, cmd))
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
m_LightLoop.RenderForward(camera, renderContext, renderOpaque);
m_LightLoop.RenderForward(camera, cmd, renderOpaque);
RenderOpaqueRenderList(cullResults, camera, renderContext, passName, Utilities.kRendererConfigurationBakedLighting);
RenderOpaqueRenderList(cullResults, camera, renderContext, cmd, passName, Utilities.kRendererConfigurationBakedLighting);
RenderTransparentRenderList(cullResults, camera, renderContext, passName, Utilities.kRendererConfigurationBakedLighting);
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, passName, Utilities.kRendererConfigurationBakedLighting);
void RenderForwardOnlyOpaque(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext)
void RenderForwardOnlyOpaque(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
using (new Utilities.ProfilingSample(passName, renderContext))
using (new Utilities.ProfilingSample(passName, cmd))
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
m_LightLoop.RenderForward(camera, renderContext, true);
m_LightLoop.RenderForward(camera, cmd, true);
RenderOpaqueRenderList(cullResults, camera, renderContext, passName, Utilities.kRendererConfigurationBakedLighting);
RenderOpaqueRenderList(cullResults, camera, renderContext, cmd, passName, Utilities.kRendererConfigurationBakedLighting);
void RenderVelocity(CullResults cullResults, HDCamera hdcam, ScriptableRenderContext renderContext)
void RenderVelocity(CullResults cullResults, HDCamera hdcam, ScriptableRenderContext renderContext, CommandBuffer cmd)
using (new Utilities.ProfilingSample("Velocity", renderContext))
using (new Utilities.ProfilingSample("Velocity", cmd))
{
// If opaque velocity have been render during GBuffer no need to render it here
if ((ShaderConfig.s_VelocityInGbuffer == 1) || m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())

int w = (int)hdcam.screenSize.x;
int h = (int)hdcam.screenSize.y;
var cmd = CommandBufferPool.Get("");
m_CameraMotionVectorsMaterial.SetVector("_CameraPosDiff", hdcam.prevCameraPos - hdcam.cameraPos);
cmd.Blit(BuiltinRenderTextureType.None, m_VelocityBufferRT, m_CameraMotionVectorsMaterial, 0);
Utilities.DrawFullScreen(cmd, m_CameraMotionVectorsMaterial, m_VelocityBufferRT, null, 0);
renderContext.ExecuteCommandBuffer(cmd);
RenderOpaqueRenderList(cullResults, hdcam.camera, renderContext, cmd, "MotionVectors", RendererConfiguration.PerObjectMotionVectors);
RenderOpaqueRenderList(cullResults, hdcam.camera, renderContext, "MotionVectors", RendererConfiguration.PerObjectMotionVectors);
PushFullScreenDebugTexture(cmd, m_VelocityBuffer, hdcam.camera, renderContext, FullScreenDebugMode.MotionVectors);
void RenderDistortion(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext)
void RenderDistortion(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
using (new Utilities.ProfilingSample("Distortion", renderContext))
using (new Utilities.ProfilingSample("Distortion", cmd))
var cmd = CommandBufferPool.Get("");
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
RenderTransparentRenderList(cullResults, camera, renderContext, "DistortionVectors");
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, "DistortionVectors");
void RenderPostProcesses(Camera camera, ScriptableRenderContext renderContext)
void RenderPostProcesses(Camera camera, CommandBuffer cmd, PostProcessLayer layer)
using (new Utilities.ProfilingSample("Post-processing", renderContext))
using (new Utilities.ProfilingSample("Post-processing", cmd))
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
var cmd = CommandBufferPool.Get("");
if (postProcessLayer != null && postProcessLayer.enabled)
if (Utilities.IsPostProcessingActive(layer))
{
cmd.SetGlobalTexture("_CameraDepthTexture", GetDepthTexture());
cmd.SetGlobalTexture("_CameraMotionVectorsTexture", m_VelocityBufferRT);

context.sourceFormat = RenderTextureFormat.ARGBHalf;
context.flip = true;
postProcessLayer.Render(context);
layer.Render(context);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}

}
MaterialPropertyBlock m_SharedPropertyBlock = new MaterialPropertyBlock();
void RenderDebug(HDCamera camera, ScriptableRenderContext renderContext)
void RenderDebug(HDCamera camera, CommandBuffer cmd)
// We make sure the depth buffer is bound because we need it to write depth at near plane for overlays otherwise the editor grid end up visible in them.
Utilities.SetRenderTarget(renderContext, BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
using (new Utilities.ProfilingSample("Render Debug", cmd))
{
// We make sure the depth buffer is bound because we need it to write depth at near plane for overlays otherwise the editor grid end up visible in them.
Utilities.SetRenderTarget(cmd, BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
CommandBuffer debugCB = CommandBufferPool.Get();
debugCB.name = "Render Debug";
// First render full screen debug texture
if(m_DebugDisplaySettings.lightingDebugSettings.fullScreenDebugMode != FullScreenDebugMode.None && m_FullScreenDebugPushed)
{
m_FullScreenDebugPushed = false;
cmd.SetGlobalTexture("_DebugFullScreenTexture", m_DebugFullScreenTempRT);
m_DebugFullScreen.SetFloat("_FullScreenDebugMode", (float)m_DebugDisplaySettings.lightingDebugSettings.fullScreenDebugMode);
Utilities.DrawFullScreen(cmd, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
}
// First render full screen debug texture
if(m_DebugDisplaySettings.lightingDebugSettings.fullScreenDebugMode != FullScreenDebugMode.None && m_FullScreenDebugPushed)
{
m_FullScreenDebugPushed = false;
debugCB.SetGlobalTexture("_DebugFullScreenTexture", m_DebugFullScreenTempRT);
m_DebugFullScreen.SetFloat("_FullScreenDebugMode", (float)m_DebugDisplaySettings.lightingDebugSettings.fullScreenDebugMode);
Utilities.DrawFullScreen(debugCB, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
}
// Then overlays
float x = 0;
float overlayRatio = m_DebugDisplaySettings.debugOverlayRatio;
float overlaySize = Math.Min(camera.camera.pixelHeight, camera.camera.pixelWidth) * overlayRatio;
float y = camera.camera.pixelHeight - overlaySize;
// Then overlays
float x = 0;
float overlayRatio = m_DebugDisplaySettings.debugOverlayRatio;
float overlaySize = Math.Min(camera.camera.pixelHeight, camera.camera.pixelWidth) * overlayRatio;
float y = camera.camera.pixelHeight - overlaySize;
LightingDebugSettings lightingDebug = m_DebugDisplaySettings.lightingDebugSettings;
LightingDebugSettings lightingDebug = m_DebugDisplaySettings.lightingDebugSettings;
if (lightingDebug.displaySkyReflection)
{
Texture skyReflection = m_SkyManager.skyReflection;
m_SharedPropertyBlock.SetTexture("_InputCubemap", skyReflection);
m_SharedPropertyBlock.SetFloat("_Mipmap", lightingDebug.skyReflectionMipmap);
cmd.SetViewport(new Rect(x, y, overlaySize, overlaySize));
cmd.DrawProcedural(Matrix4x4.identity, m_DebugDisplayLatlong, 0, MeshTopology.Triangles, 3, 1, m_SharedPropertyBlock);
Utilities.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, camera.camera.pixelWidth);
}
if (lightingDebug.displaySkyReflection)
{
Texture skyReflection = m_SkyManager.skyReflection;
m_SharedPropertyBlock.SetTexture("_InputCubemap", skyReflection);
m_SharedPropertyBlock.SetFloat("_Mipmap", lightingDebug.skyReflectionMipmap);
debugCB.SetViewport(new Rect(x, y, overlaySize, overlaySize));
debugCB.DrawProcedural(Matrix4x4.identity, m_DebugDisplayLatlong, 0, MeshTopology.Triangles, 3, 1, m_SharedPropertyBlock);
Utilities.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, camera.camera.pixelWidth);
m_LightLoop.RenderDebugOverlay(camera.camera, cmd, m_DebugDisplaySettings, ref x, ref y, overlaySize, camera.camera.pixelWidth);
renderContext.ExecuteCommandBuffer(debugCB);
CommandBufferPool.Release(debugCB);
m_LightLoop.RenderDebugOverlay(camera.camera, renderContext, m_DebugDisplaySettings, ref x, ref y, overlaySize, camera.camera.pixelWidth);
void InitAndClearBuffer(Camera camera, ScriptableRenderContext renderContext)
void InitAndClearBuffer(Camera camera, CommandBuffer cmd)
using (new Utilities.ProfilingSample("InitAndClearBuffer", renderContext))
using (new Utilities.ProfilingSample("InitAndClearBuffer", cmd))
using (new Utilities.ProfilingSample("InitGBuffers and clear Depth/Stencil", renderContext))
using (new Utilities.ProfilingSample("InitGBuffers and clear Depth/Stencil", cmd))
var cmd = CommandBufferPool.Get();
cmd.name = "";
// Init buffer
// With scriptable render loop we must allocate ourself depth and color buffer (We must be independent of backbuffer for now, hope to fix that later).
// Also we manage ourself the HDR format, here allocating fp16 directly.

m_gbufferManager.InitGBuffers(w, h, cmd);
}
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearDepth);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearDepth);
using (new Utilities.ProfilingSample("Clear SSS diffuse target", renderContext))
using (new Utilities.ProfilingSample("Clear SSS diffuse target", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraSubsurfaceBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
Utilities.SetRenderTarget(cmd, m_CameraSubsurfaceBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
using (new Utilities.ProfilingSample("Clear SSS filtering target", renderContext))
using (new Utilities.ProfilingSample("Clear SSS filtering target", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraFilteringBuffer, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
Utilities.SetRenderTarget(cmd, m_CameraFilteringBuffer, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
}
// <<< Old SSS Model

using (new Utilities.ProfilingSample("Clear HDR target", renderContext))
using (new Utilities.ProfilingSample("Clear HDR target", cmd))
Utilities.SetRenderTarget(renderContext, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
Utilities.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
using (new Utilities.ProfilingSample("Clear GBuffer", renderContext))
using (new Utilities.ProfilingSample("Clear GBuffer", cmd))
Utilities.SetRenderTarget(renderContext, m_gbufferManager.GetGBuffers(), m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
Utilities.SetRenderTarget(cmd, m_gbufferManager.GetGBuffers(), m_CameraDepthStencilBufferRT, ClearFlag.ClearColor, Color.black);
}
}
// END TEMP

12
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset


tileSettings:
enableTileAndCluster: 1
enableSplitLightEvaluation: 1
enableComputeLightEvaluation: 0
enableComputeLightVariants: 0
enableComputeMaterialVariants: 0
enableComputeLightEvaluation: 1
enableComputeLightVariants: 1
enableComputeMaterialVariants: 1
tileDebugByCategory: 0
shadowInitParams:
shadowAtlasWidth: 4096
shadowAtlasHeight: 4096

reflectionCubemapSize: 128
m_DefaultDiffuseMaterial: {fileID: 0}
m_DefaultShader: {fileID: 0}
m_DefaultDiffuseMaterial: {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17,
type: 2}
m_DefaultShader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs


using UnityEngine.Rendering;
using System;
using System.Linq;
using UnityEngine.Experimental.PostProcessing;
using UnityEngine.Rendering.PostProcessing;
using UnityEngine.Experimental.Rendering.HDPipeline.TilePass;
#if UNITY_EDITOR

66
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/ScreenSpaceAmbientOcclusion.cs


}
Material m_Material;
CommandBuffer m_Command;
// For the AO buffer, use R8 or RHalf if available.
static RenderTextureFormat GetAOBufferFormat()

m_Material.hideFlags = HideFlags.DontSave;
}
public void Render(ScreenSpaceAmbientOcclusionSettings.Settings settings, HDRenderPipeline hdRP, HDCamera hdCamera, ScriptableRenderContext renderContext, bool isForward)
public void Render(ScreenSpaceAmbientOcclusionSettings.Settings settings, HDRenderPipeline hdRP, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd, bool isForward)
if (m_Command == null)
{
m_Command = new CommandBuffer();
m_Command.name = "Ambient Occlusion";
}
else
{
m_Command.Clear();
}
m_Command.SetGlobalTexture(Uniforms._AOBuffer, PostProcessing.RuntimeUtilities.blackTexture); // Neutral is black, see the comment in the shaders
renderContext.ExecuteCommandBuffer(m_Command);
cmd.SetGlobalTexture(Uniforms._AOBuffer, UnityEngine.Rendering.PostProcessing.RuntimeUtilities.blackTexture); // Neutral is black, see the comment in the shaders
return;
}

m_Material.SetFloat(Uniforms._Downsample, 1.0f / downsize);
m_Material.SetFloat(Uniforms._SampleCount, settings.sampleCount);
// AO estimation.
m_Command.GetTemporaryRT(Uniforms._TempTex1, width / downsize, height / downsize, 0, kFilter, kTempFormat, kRWMode);
Utilities.DrawFullScreen(m_Command, m_Material, Uniforms._TempTex1, null, 0);
hdRP.PushFullScreenDebugTexture(m_Command, Uniforms._TempTex1, hdCamera.camera, renderContext, FullScreenDebugMode.SSAOBeforeFiltering);
// Denoising (horizontal pass).
m_Command.GetTemporaryRT(Uniforms._TempTex2, width, height, 0, kFilter, kTempFormat, kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(m_Command, m_Material, Uniforms._TempTex2, null, 1);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex1);
using (new Utilities.ProfilingSample("Screenspace ambient occlusion", cmd))
{
// AO estimation.
cmd.GetTemporaryRT(Uniforms._TempTex1, width / downsize, height / downsize, 0, kFilter, kTempFormat, kRWMode);
Utilities.DrawFullScreen(cmd, m_Material, Uniforms._TempTex1, null, 0);
hdRP.PushFullScreenDebugTexture(cmd, Uniforms._TempTex1, hdCamera.camera, renderContext, FullScreenDebugMode.SSAOBeforeFiltering);
// Denoising (vertical pass).
m_Command.GetTemporaryRT(Uniforms._TempTex1, width, height, 0, kFilter, kTempFormat, kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex2);
Utilities.DrawFullScreen(m_Command, m_Material, Uniforms._TempTex1, null, 2);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex2);
// Denoising (horizontal pass).
cmd.GetTemporaryRT(Uniforms._TempTex2, width, height, 0, kFilter, kTempFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(cmd, m_Material, Uniforms._TempTex2, null, 1);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
// Final filtering
m_Command.GetTemporaryRT(Uniforms._AOBuffer, width, height, 0, kFilter, GetAOBufferFormat(), kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(m_Command, m_Material, Uniforms._AOBuffer, null, 3);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex1);
// Denoising (vertical pass).
cmd.GetTemporaryRT(Uniforms._TempTex1, width, height, 0, kFilter, kTempFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex2);
Utilities.DrawFullScreen(cmd, m_Material, Uniforms._TempTex1, null, 2);
cmd.ReleaseTemporaryRT(Uniforms._TempTex2);
// Setup texture for lighting pass (automagic of unity)
m_Command.SetGlobalTexture("_AmbientOcclusionTexture", Uniforms._AOBuffer);
hdRP.PushFullScreenDebugTexture(m_Command, Uniforms._AOBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.SSAO);
// Final filtering
cmd.GetTemporaryRT(Uniforms._AOBuffer, width, height, 0, kFilter, GetAOBufferFormat(), kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(cmd, m_Material, Uniforms._AOBuffer, null, 3);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
// Register the command buffer and release it.
renderContext.ExecuteCommandBuffer(m_Command);
// Setup texture for lighting pass (automagic of unity)
cmd.SetGlobalTexture("_AmbientOcclusionTexture", Uniforms._AOBuffer);
hdRP.PushFullScreenDebugTexture(cmd, Uniforms._AOBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.SSAO);
}
}
public void Cleanup()

66
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs


public enum GPULightType
{
Directional,
ProjectorBox,
ProjectorOrtho,
Line, // Keep Line lights before Rectangle. This is needed because of a compiler bug (see LightLoop.hlsl)
Line,
// Currently not supported in real time (just use for reference)
Sphere,
Disk,

// These structures share between C# and hlsl need to be align on float4, so we pad them.
[GenerateHLSL]
public struct LightData
public struct DirectionalLightData
public float invSqrAttenuationRadius;
public bool tileCookie;
public float angleScale; // Spot light
public int shadowIndex; // -1 if unused
public float angleOffset; // Spot light
public Vector3 up;
public float diffuseScale;
public int cookieIndex; // -1 if unused
public Vector3 right;
public Vector3 right; // Rescaled by (2 / lightLength)
public float shadowDimmer;
// index are -1 if not used
public int shadowIndex;
public int IESIndex;
public int cookieIndex;
public Vector2 size; // Used by area, projector and spot lights; x = cot(outerHalfAngle) for spot lights
public GPULightType lightType;
public float unused;
public Vector3 up; // Rescaled by (2 / lightWidth)
public float diffuseScale;
public struct DirectionalLightData
public struct LightData
public Vector3 forward;
public float diffuseScale;
public Vector3 up;
public float invScaleY;
public Vector3 right;
public float invScaleX;
// DirectionalLightData >>>
public bool tileCookie;
public float invSqrAttenuationRadius;
public float specularScale;
public int shadowIndex; // -1 if unused
// Sun disc size
public float cosAngle; // Distance to the disk
public float sinAngle; // Disk radius
public int shadowIndex; // -1 if unused
public Vector3 forward;
public Vector3 right; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by (2 / lightLength)
public float specularScale;
public Vector3 up; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by * (2 / lightWidth)
public float diffuseScale;
// <<< DirectionalLightData
public float angleScale; // Spot light
public float angleOffset; // Spot light
public float shadowDimmer;
public int IESIndex; // -1 if unused
public Vector2 size; // Used by area, frustum projector and spot lights (x = cot(outerHalfAngle))
public GPULightType lightType;
public float unused;
[GenerateHLSL]

180
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl


// UnityEngine.Experimental.Rendering.HDPipeline.GPULightType: static fields
//
#define GPULIGHTTYPE_DIRECTIONAL (0)
#define GPULIGHTTYPE_SPOT (1)
#define GPULIGHTTYPE_POINT (2)
#define GPULIGHTTYPE_PROJECTOR_ORTHO (3)
#define GPULIGHTTYPE_PROJECTOR_BOX (1)
#define GPULIGHTTYPE_SPOT (2)
#define GPULIGHTTYPE_POINT (3)
#define GPULIGHTTYPE_RECTANGLE (5)
#define GPULIGHTTYPE_LINE (6)
#define GPULIGHTTYPE_LINE (5)
#define GPULIGHTTYPE_RECTANGLE (6)
#define GPULIGHTTYPE_SPHERE (7)
#define GPULIGHTTYPE_DISK (8)
#define GPULIGHTTYPE_HEMISPHERE (9)

#define STENCILLIGHTINGUSAGE_SPLIT_LIGHTING (1)
#define STENCILLIGHTINGUSAGE_REGULAR_LIGHTING (2)
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.LightData
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.DirectionalLightData
struct LightData
struct DirectionalLightData
float invSqrAttenuationRadius;
bool tileCookie;
float angleScale;
int shadowIndex;
float angleOffset;
int cookieIndex;
float3 right;
float specularScale;
float3 right;
float specularScale;
float shadowDimmer;
int shadowIndex;
int IESIndex;
int cookieIndex;
float2 size;
int lightType;
float unused;
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.DirectionalLightData
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.LightData
struct DirectionalLightData
struct LightData
float3 forward;
float diffuseScale;
float3 up;
float invScaleY;
float3 right;
float invScaleX;
bool tileCookie;
float invSqrAttenuationRadius;
float specularScale;
float cosAngle;
float sinAngle;
float3 forward;
float3 right;
float specularScale;
float3 up;
float diffuseScale;
float angleScale;
float angleOffset;
float shadowDimmer;
int IESIndex;
float2 size;
int lightType;
float unused;
};
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.EnvLightData

};
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.LightData
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.DirectionalLightData
float3 GetPositionWS(LightData value)
float3 GetPositionWS(DirectionalLightData value)
float GetInvSqrAttenuationRadius(LightData value)
bool GetTileCookie(DirectionalLightData value)
return value.invSqrAttenuationRadius;
return value.tileCookie;
float3 GetColor(LightData value)
float3 GetColor(DirectionalLightData value)
float GetAngleScale(LightData value)
int GetShadowIndex(DirectionalLightData value)
return value.angleScale;
return value.shadowIndex;
float3 GetForward(LightData value)
float3 GetForward(DirectionalLightData value)
float GetAngleOffset(LightData value)
{
return value.angleOffset;
}
float3 GetUp(LightData value)
int GetCookieIndex(DirectionalLightData value)
return value.up;
return value.cookieIndex;
float GetDiffuseScale(LightData value)
{
return value.diffuseScale;
}
float3 GetRight(LightData value)
float3 GetRight(DirectionalLightData value)
float GetSpecularScale(LightData value)
float GetSpecularScale(DirectionalLightData value)
float GetShadowDimmer(LightData value)
float3 GetUp(DirectionalLightData value)
return value.shadowDimmer;
return value.up;
int GetShadowIndex(LightData value)
float GetDiffuseScale(DirectionalLightData value)
return value.shadowIndex;
return value.diffuseScale;
int GetIESIndex(LightData value)
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.LightData
//
float3 GetPositionWS(LightData value)
return value.IESIndex;
return value.positionWS;
int GetCookieIndex(LightData value)
float GetInvSqrAttenuationRadius(LightData value)
return value.cookieIndex;
return value.invSqrAttenuationRadius;
float2 GetSize(LightData value)
float3 GetColor(LightData value)
return value.size;
return value.color;
int GetLightType(LightData value)
int GetShadowIndex(LightData value)
return value.lightType;
return value.shadowIndex;
float GetUnused(LightData value)
{
return value.unused;
}
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.DirectionalLightData
//
float3 GetForward(DirectionalLightData value)
float3 GetForward(LightData value)
float GetDiffuseScale(DirectionalLightData value)
int GetCookieIndex(LightData value)
return value.diffuseScale;
}
float3 GetUp(DirectionalLightData value)
{
return value.up;
return value.cookieIndex;
float GetInvScaleY(DirectionalLightData value)
float3 GetRight(LightData value)
return value.invScaleY;
return value.right;
float3 GetRight(DirectionalLightData value)
float GetSpecularScale(LightData value)
return value.right;
return value.specularScale;
float GetInvScaleX(DirectionalLightData value)
float3 GetUp(LightData value)
return value.invScaleX;
return value.up;
float3 GetPositionWS(DirectionalLightData value)
float GetDiffuseScale(LightData value)
return value.positionWS;
return value.diffuseScale;
bool GetTileCookie(DirectionalLightData value)
float GetAngleScale(LightData value)
return value.tileCookie;
return value.angleScale;
float3 GetColor(DirectionalLightData value)
float GetAngleOffset(LightData value)
return value.color;
return value.angleOffset;
float GetSpecularScale(DirectionalLightData value)
float GetShadowDimmer(LightData value)
return value.specularScale;
return value.shadowDimmer;
float GetCosAngle(DirectionalLightData value)
int GetIESIndex(LightData value)
return value.cosAngle;
return value.IESIndex;
float GetSinAngle(DirectionalLightData value)
float2 GetSize(LightData value)
return value.sinAngle;
return value.size;
int GetShadowIndex(DirectionalLightData value)
int GetLightType(LightData value)
return value.shadowIndex;
return value.lightType;
int GetCookieIndex(DirectionalLightData value)
float GetUnused(LightData value)
return value.cookieIndex;
return value.unused;
}
//

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/ShadowDispatch.hlsl


// example of overriding punctual lights
#ifdef SHADOW_DISPATCH_USE_CUSTOM_PUNCTUAL
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L )
{
#ifdef SHADOW_DISPATCH_USE_SEPARATE_PUNC_ALGOS
// example for choosing different algos for point and spot lights

return EvalShadow_PunctualDepth( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
#endif
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

702
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
文件差异内容过多而无法显示
查看文件

22
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs.hlsl


//
#define LIGHTCATEGORY_PUNCTUAL (0)
#define LIGHTCATEGORY_AREA (1)
#define LIGHTCATEGORY_PROJECTOR (2)
#define LIGHTCATEGORY_ENV (3)
#define LIGHTCATEGORY_COUNT (4)
#define LIGHTCATEGORY_ENV (2)
#define LIGHTCATEGORY_COUNT (3)
#define LIGHTFEATUREFLAGS_PUNCTUAL (1)
#define LIGHTFEATUREFLAGS_AREA (2)
#define LIGHTFEATUREFLAGS_DIRECTIONAL (4)
#define LIGHTFEATUREFLAGS_PROJECTOR (8)
#define LIGHTFEATUREFLAGS_ENV (16)
#define LIGHTFEATUREFLAGS_SKY (32)
#define LIGHTFEATUREFLAGS_PUNCTUAL (256)
#define LIGHTFEATUREFLAGS_AREA (512)
#define LIGHTFEATUREFLAGS_DIRECTIONAL (1024)
#define LIGHTFEATUREFLAGS_ENV (2048)
#define LIGHTFEATUREFLAGS_SKY (4096)
//
// UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightDefinitions: static fields

#define USE_LEFT_HAND_CAMERA_SPACE (1)
#define TILE_SIZE_FPTL (16)
#define TILE_SIZE_CLUSTERED (32)
#define NUM_FEATURE_VARIANTS (16)
#define LIGHTFEATUREFLAGS_MASK (4095)
#define MATERIALFEATUREFLAGS_MASK (61440)
#define NUM_FEATURE_VARIANTS (26)
#define LIGHT_FEATURE_MASK_FLAGS (65280)
#define MATERIAL_FEATURE_MASK_FLAGS (255)
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.TilePass.SFiniteLightBound
// PackingRules = Exact

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl


#define PROCESS_DIRECTIONAL_LIGHT
#define PROCESS_PUNCTUAL_LIGHT
#define PROCESS_AREA_LIGHT
#define PROCESS_PROJECTOR_LIGHT
#endif
#if defined (LIGHTLOOP_TILE_INDIRECT) || defined(LIGHTLOOP_TILE_ALL)

uint _DirectionalLightCount;
uint _PunctualLightCount;
uint _AreaLightCount;
uint _ProjectorLightCount;
uint _EnvLightCount;
float4 _DirShadowSplitSpheres[4]; // TODO: share this max between C# and hlsl

73
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl


{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Directional( context, V, posInput, prelightData, _DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Directional(context, V, posInput, prelightData,
_DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;

#ifdef PROCESS_AREA_LIGHT
if(featureFlags & LIGHTFEATUREFLAGS_AREA)
{
// TODO: Convert the for loop below to a while on each type as we know we are sorted!
for(i = 0; i < areaLightCount; ++i)
{
float3 localDiffuseLighting, localSpecularLighting;
uint areaIndex = FetchIndex(areaLightStart, i);
float3 localDiffuseLighting = float3(0.0, 0.0, 0.0);
float3 localSpecularLighting = float3(0.0, 0.0, 0.0);
if(_LightDatas[areaIndex].lightType == GPULIGHTTYPE_LINE)
{
EvaluateBSDF_Line( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
}
else
{
EvaluateBSDF_Area( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
}
// !!!!!!!!!!!!!!
// COMPILER BEHAVIOR WARNING
// If rectangle lights are before line lights, the compiler will duplicate light matrices in VGPR because they are used differently between the two types of lights.
// By keeping line lights first we avoid this behavior and save substantial register pressure.
i = 0;
uint areaIndex = FetchIndex(areaLightStart, i);
while ( i < areaLightCount && _LightDatas[areaIndex].lightType == GPULIGHTTYPE_LINE)
{
areaIndex = FetchIndex(areaLightStart, i);
EvaluateBSDF_Line( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
i++;
}
#endif
#ifdef PROCESS_PROJECTOR_LIGHT
if(featureFlags & LIGHTFEATUREFLAGS_PROJECTOR)
{
// TODO: Convert the for loop below to a while on each type as we know we are sorted!
uint projectorLightStart;
uint projectorLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_PROJECTOR, projectorLightStart, projectorLightCount);
for(i = 0; i < projectorLightCount; ++i)
while (i < areaLightCount) // Rectangle lights are the last area lights so no need to check type
float3 localDiffuseLighting, localSpecularLighting;
uint projectorIndex = FetchIndex(projectorLightStart, i);
EvaluateBSDF_Projector(context, V, posInput, prelightData, _LightDatas[projectorIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
areaIndex = FetchIndex(areaLightStart, i);
EvaluateBSDF_Area( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
i++;
}
}
#endif

{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Directional( context, V, posInput, prelightData, _DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Directional(context, V, posInput, prelightData,
_DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;

EvaluateBSDF_Area( context, V, posInput, prelightData, _LightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
}
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;
}
for (; i < _PunctualLightCount + _AreaLightCount + _ProjectorLightCount; ++i)
{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Projector( context, V, posInput, prelightData, _LightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;

14
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/builddispatchindirect.compute


#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl
#include "../../Material/Material.hlsl" // This includes Material.hlsl
#include "FeatureFlags.hlsl"
RWBuffer<uint> g_DispatchIndirectBuffer : register( u0 ); // Indirect arguments have to be in a _buffer_, not a structured buffer
RWStructuredBuffer<uint> g_TileList;
StructuredBuffer<uint> g_TileFeatureFlags;

uint tileY = (dispatchThreadId + 0.5f) / (float)g_NumTilesX; // Integer division is extremely expensive, so we better avoid it
uint tileX = dispatchThreadId - tileY * g_NumTilesX;
uint variant = FeatureFlagsToTileVariant(featureFlags);
uint offset;
InterlockedAdd(g_DispatchIndirectBuffer[variant * 3 + 0], 1, offset);
g_TileList[variant * g_NumTiles + offset] = (tileY << 16) | tileX;
// Check if there is no light or no material (mean we are sky/background pixel) / Both test as we can enable/disable light/material classification
if ((featureFlags & LIGHT_FEATURE_MASK_FLAGS) != 0 && (featureFlags & MATERIAL_FEATURE_MASK_FLAGS) != 0)
{
uint variant = FeatureFlagsToTileVariant(featureFlags);
uint offset;
InterlockedAdd(g_DispatchIndirectBuffer[variant * 3 + 0], 1, offset);
g_TileList[variant * g_NumTiles + offset] = (tileY << 16) | tileX;
}
}

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/lightlistbuild.compute


if(t == 0)
{
uint featureFlags = ldsFeatureFlags | g_BaseFeatureFlags;
// In case of back
featureFlags &= ~(LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_AREA | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | 0xFFFFF); // list of features that are not enabled on background
// There is no stencil usage with compute path, featureFlags set to 0 is use to have fast rejection of tile in this case. It will still execute but will do nothing
featureFlags = 0;
}
g_TileFeatureFlags[tileIDX.y * nrTilesX + tileIDX.x] = featureFlags;

7
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute


#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl
#include "../../Material/Material.hlsl" // This includes Material.hlsl
#include "FeatureFlags.hlsl"
#define USE_MATERIAL_FEATURE_FLAGS
#define NR_THREADS 64

}
GroupMemoryBarrierWithGroupSync();
uint materialFeatureFlags = g_BaseFeatureFlags;
uint materialFeatureFlags = g_BaseFeatureFlags; // Contain all lightFeatures or 0 (depends if we enable light classification or not)
[unroll]
for(int i = 0; i < 4; i++)
{

if (FetchDepth(g_depth_tex, uCrd) < VIEWPORT_SCALE_Z)
if (FetchDepth(g_depth_tex, uCrd) < VIEWPORT_SCALE_Z) // This test is we are the sky/background or not
{
PositionInputs posInput = GetPositionInput(uCrd, invScreenSize);
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS);

{
#ifdef USE_OR
g_TileFeatureFlags[tileIDX.y * nrTilesX + tileIDX.x] |= ldsFeatureFlags;
#else
#else // Use in case we have disabled light classification
g_TileFeatureFlags[tileIDX.y * nrTilesX + tileIDX.x] = ldsFeatureFlags;
#endif
}

68
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/shadeopaque.compute


#pragma kernel ShadeOpaque_Indirect_Fptl_Variant13 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant13 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=13
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant14 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant14 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=14
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant15 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant15 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=15
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant16 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant16 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=16
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant17 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant17 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=17
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant18 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant18 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=18
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant19 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant19 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=19
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant20 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant20 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=20
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant21 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant21 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=21
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant22 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant22 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=22
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant23 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant23 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=23
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant24 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant24 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=24
#pragma kernel ShadeOpaque_Indirect_Fptl_Variant25 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Fptl_Variant25 USE_FPTL_LIGHTLIST USE_INDIRECT VARIANT=25
// TODO: We should remove these variant for cluster opaque and only keep tile opaque.
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant0 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant0 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=0
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant1 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant1 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=1
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant2 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant2 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=2

#pragma kernel ShadeOpaque_Indirect_Clustered_Variant13 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant13 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=13
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant14 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant14 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=14
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant15 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant15 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=15
//#pragma #pragma enable_d3d11_debug_symbols
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant16 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant16 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=16
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant17 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant17 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=17
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant18 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant18 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=18
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant19 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant19 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=19
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant20 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant20 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=20
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant21 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant21 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=21
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant22 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant22 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=22
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant23 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant23 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=23
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant24 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant24 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=24
#pragma kernel ShadeOpaque_Indirect_Clustered_Variant25 SHADE_OPAQUE_ENTRY=ShadeOpaque_Indirect_Clustered_Variant25 USE_CLUSTERED_LIGHTLIST USE_INDIRECT VARIANT=25
// Split lighting is required for the SSS pass.
// Not currently possible since we need to access the stencil buffer from the compute shader.
// #pragma multi_compile _ OUTPUT_SPLIT_LIGHTING
#define LIGHTLOOP_TILE_PASS 1
#define LIGHTLOOP_TILE_DIRECT 1

//#pragma enable_d3d11_debug_symbols
//-------------------------------------------------------------------------------------
// Include
//-------------------------------------------------------------------------------------

#include "../../ShaderConfig.cs.hlsl"
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl" // This include Material.hlsl
#include "../../Lighting/TilePass/FeatureFlags.hlsl"
//-------------------------------------------------------------------------------------
// variable declaration
//-------------------------------------------------------------------------------------

#ifdef OUTPUT_SPLIT_LIGHTING
RWTexture2D<float4> specularLightingUAV;
RWTexture2D<float3> diffuseLightingUAV;
#else
RWTexture2D<float4> combinedLightingUAV;
#endif
RWTexture2D<float3> diffuseLightingUAV;
RWTexture2D<float4> specularLightingUAV;
CBUFFER_START(UnityShadeOpaque)
uint g_TileListOffset;

#endif
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
// For indirect case: we can still overlap inside a tile with the sky/background, reject it
// Can't rely on stencil as we are in compute shader
// TODO : if we have depth bounds test we could remove such a test ?
if (depth == UNITY_RAW_FAR_CLIP_VALUE)
{
return;
}
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(posInput.positionWS);

float3 specularLighting;
LightLoop(V, posInput, preLightData, bsdfData, bakeDiffuseLighting, featureFlags, diffuseLighting, specularLighting);
#ifdef OUTPUT_SPLIT_LIGHTING
specularLightingUAV[pixelCoord] = float4(specularLighting, 1.0);
diffuseLightingUAV[pixelCoord] = diffuseLighting;
#else
combinedLightingUAV[pixelCoord] = float4(diffuseLighting + specularLighting, 1.0);
#endif
if (_EnableSSSAndTransmission != 0 && bsdfData.materialId == MATERIALID_LIT_SSS)
{
// We SSSSS is enabled with use split lighting.
// SSSSS algorithm need to know which pixels contribute to SSS and which doesn't. We could use the stencil for that but it mean that it will increase the cost of SSSSS
// A simpler solution is to add a slight contribution here that isn't visible (here we chose fp16 min (which is also fp11 and fp10 min).
// The SSSSS algorithm will check if diffuse lighting is black and discard the pixel if it is the case
diffuseLighting.r = max(diffuseLighting.r, HFLT_MIN);
specularLightingUAV[pixelCoord] = float4(specularLighting, 1.0);
diffuseLightingUAV[pixelCoord] = diffuseLighting;
}
else
{
specularLightingUAV[pixelCoord] = float4(diffuseLighting + specularLighting, 1.0);
}
}

33
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs


LitStandard = 1,
LitUnused0 = 2,
LitUnused1 = 3,
LitAniso = 4, // Should be the last as it is not setup by the users but generated based on anisotropy property
LitSpecular = 5, // Should be the last as it is not setup by the users but generated based on anisotropy property and specular
// We don't store any materialId for aniso but instead deduce it from LitStandard + value of specular + anisotropy parameters
// Consequence is that when querying materialId alone, it will read 2 RT and not only one. This may be a performance hit when only materialId is desired (like in material classification pass)
// Alternative is to use a materialId slot, if any are available.
LitAniso = 4,
// LitSpecular (DiffuseColor/SpecularColor) is an alternate parametrization for LitStandard (BaseColor/Metal/Specular), but it is the same shading model
// We don't want any specific materialId for it, instead we use LitStandard as materialId. However for UI purpose we still define this value here.
LitSpecular = 5,
// If change, be sure it match what is done in Lit.hlsl: MaterialFeatureFlagsFromGBuffer
// Material bit mask must match LightDefinitions.s_MaterialFeatureMaskFlags value
LitSSS = 1 << 12,
LitStandard = 1 << 13,
LitAniso = 1 << 14,
LitSpecular = 1 << 15
}
LitSSS = 1 << MaterialId.LitSSS,
LitStandard = 1 << MaterialId.LitStandard,
LitUnused0 = 1 << MaterialId.LitUnused0,
LitUnused1 = 1 << MaterialId.LitUnused1,
LitAniso = 1 << MaterialId.LitAniso,
};
[GenerateHLSL]
public enum SpecularValue

m_isInit = false;
}
public override void RenderInit(Rendering.ScriptableRenderContext renderContext)
public override void RenderInit(CommandBuffer cmd)
var cmd = CommandBufferPool.Get();
cmd.name = "Init PreFGD";
Utilities.DrawFullScreen(cmd, m_InitPreFGD, new RenderTargetIdentifier(m_PreIntegratedFGD));
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
using (new Utilities.ProfilingSample("Init PreFGD", cmd))
{
Utilities.DrawFullScreen(cmd, m_InitPreFGD, new RenderTargetIdentifier(m_PreIntegratedFGD));
}
m_isInit = true;
}

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs.hlsl


//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+MaterialFeatureFlags: static fields
//
#define MATERIALFEATUREFLAGS_LIT_SSS (4096)
#define MATERIALFEATUREFLAGS_LIT_STANDARD (8192)
#define MATERIALFEATUREFLAGS_LIT_ANISO (16384)
#define MATERIALFEATUREFLAGS_LIT_SPECULAR (32768)
#define MATERIALFEATUREFLAGS_LIT_SSS (1)
#define MATERIALFEATUREFLAGS_LIT_STANDARD (2)
#define MATERIALFEATUREFLAGS_LIT_UNUSED0 (4)
#define MATERIALFEATUREFLAGS_LIT_UNUSED1 (8)
#define MATERIALFEATUREFLAGS_LIT_ANISO (16)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+SpecularValue: static fields

702
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


uint _EnableSSSAndTransmission; // Globally toggles subsurface and transmission scattering on/off
uint _TexturingModeFlags; // 1 bit/profile; 0 = PreAndPostScatter, 1 = PostScatter
uint _TransmissionFlags; // 2 bit/profile; 0 = inf. thick, 1 = thin, 2 = regular
float _ThicknessRemaps[SSS_N_PROFILES][2]; // Remap: 0 = start, 1 = end - start
// Use float4 to avoid any packing issue between compute and pixel shaders
float4 _ThicknessRemaps[SSS_N_PROFILES]; // R: start, G = end - start, BA unused
// Ligth and material classification for the deferred rendering path
// Configure what kind of combination is supported
//-----------------------------------------------------------------------------
// Lighting architecture and material are suppose to be decoupled files.
// However as we use material classification it is hard to be fully separated
// the dependecy is define in this include where there is shared define for material and lighting in case of deferred material.
// If a user do a lighting architecture without material classification, this can be remove
#include "../../Lighting/TilePass/TilePass.cs.hlsl"
// Combination need to be define in increasing "comlexity" order as define by FeatureFlagsToTileVariant
static const uint kFeatureVariantFlags[NUM_FEATURE_VARIANTS] =
{
// Standard
/* 0 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 1 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_AREA | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 2 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 3 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 4 */ LIGHT_FEATURE_MASK_FLAGS | MATERIALFEATUREFLAGS_LIT_STANDARD,
// SSS
// SSS is a superset of material standard. With foliage or crowd SSS and standard can overlap a lot, better to have a dedicated combination
/* 5 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | MATERIALFEATUREFLAGS_LIT_SSS | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 6 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_AREA | MATERIALFEATUREFLAGS_LIT_SSS | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 7 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_SSS | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 8 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_SSS | MATERIALFEATUREFLAGS_LIT_STANDARD,
/* 9 */ LIGHT_FEATURE_MASK_FLAGS | MATERIALFEATUREFLAGS_LIT_SSS,
// Specular/Aniso
/* 10 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | MATERIALFEATUREFLAGS_LIT_ANISO,
/* 11 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_AREA | MATERIALFEATUREFLAGS_LIT_ANISO,
/* 12 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_ANISO,
/* 13 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_ANISO,
/* 14 */ LIGHT_FEATURE_MASK_FLAGS | MATERIALFEATUREFLAGS_LIT_ANISO,
// Future usage
/* 15 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | MATERIALFEATUREFLAGS_LIT_UNUSED0,
/* 16 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_AREA | MATERIALFEATUREFLAGS_LIT_UNUSED0,
/* 17 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_UNUSED0,
/* 18 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_UNUSED0,
/* 19 */ LIGHT_FEATURE_MASK_FLAGS | MATERIALFEATUREFLAGS_LIT_UNUSED0,
// Future usage
/* 20 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | MATERIALFEATUREFLAGS_LIT_UNUSED1,
/* 21 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_AREA | MATERIALFEATUREFLAGS_LIT_UNUSED1,
/* 22 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_UNUSED1,
/* 23 */ LIGHTFEATUREFLAGS_SKY | LIGHTFEATUREFLAGS_DIRECTIONAL | LIGHTFEATUREFLAGS_PUNCTUAL | LIGHTFEATUREFLAGS_ENV | MATERIALFEATUREFLAGS_LIT_UNUSED1,
/* 24 */ LIGHT_FEATURE_MASK_FLAGS | MATERIALFEATUREFLAGS_LIT_UNUSED1,
/* 25 */ LIGHT_FEATURE_MASK_FLAGS | MATERIAL_FEATURE_MASK_FLAGS, // Catch all case with MATERIAL_FEATURE_MASK_FLAGS is needed in case we disable material classification
};
uint FeatureFlagsToTileVariant(uint featureFlags)
{
for (int i = 0; i < NUM_FEATURE_VARIANTS; i++)
{
if ((featureFlags & kFeatureVariantFlags[i]) == featureFlags)
return i;
}
return NUM_FEATURE_VARIANTS - 1;
}
// This function need to return a compile time value, else there is no optimization
uint TileVariantToFeatureFlags(uint variant)
{
return kFeatureVariantFlags[variant];
}
//-----------------------------------------------------------------------------
// Helper functions/variable specific to this material
//-----------------------------------------------------------------------------

bsdfData.fresnel0 = 0.04; // Should be 0.028 for the skin
bsdfData.subsurfaceProfile = subsurfaceProfile;
bsdfData.subsurfaceRadius = subsurfaceRadius;
bsdfData.thickness = _ThicknessRemaps[subsurfaceProfile][0] +
_ThicknessRemaps[subsurfaceProfile][1] * thickness;
bsdfData.thickness = _ThicknessRemaps[subsurfaceProfile].x +
_ThicknessRemaps[subsurfaceProfile].y * thickness;
uint transmissionMode = BitFieldExtract(_TransmissionFlags, 2u, 2u * subsurfaceProfile);

#if defined(SHADERPASS) && (SHADERPASS == SHADERPASS_LIGHT_TRANSPORT) // In case of GI pass don't modify the diffuseColor
if (0)
#else
if (_EnableSSSAndTransmission > 0) // If we globally disable SSS effect, don't modify diffuseColor
if (_EnableSSSAndTransmission != 0) // If we globally disable SSS effect, don't modify diffuseColor
#endif
{
// We modify the albedo here as this code is used by all lighting (including light maps and GI).

bsdfData.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
bsdfData.materialId = surfaceData.materialId;
// IMPORTANT: In case of foward or gbuffer pass we know what we are, we don't need to check specular or aniso to know the materialId, this is because we have static compile shader feature for it
// IMPORTANT: In case of foward or gbuffer pass we must know what we are statically, so compiler can do compile time optimization
FillMaterialIdStandardData(surfaceData.baseColor, surfaceData.specular, surfaceData.metallic, bsdfData);
}
else if (bsdfData.materialId == MATERIALID_LIT_SPECULAR)
{
bsdfData.diffuseColor = surfaceData.baseColor;
bsdfData.fresnel0 = surfaceData.specularColor;
if (surfaceData.specular == SPECULARVALUE_SPECULAR_COLOR)
{
bsdfData.diffuseColor = surfaceData.baseColor;
bsdfData.fresnel0 = surfaceData.specularColor;
}
else
{
FillMaterialIdStandardData(surfaceData.baseColor, surfaceData.specular, surfaceData.metallic, bsdfData);
}
}
else if (bsdfData.materialId == MATERIALID_LIT_ANISO)
{

if (surfaceData.materialId == MATERIALID_LIT_STANDARD)
{
// Encode specular on two bit for the enum
outGBuffer2 = float4(0.0, 0.0, 0.0, PackFloatInt8bit(surfaceData.metallic, surfaceData.specular, 4.0));
}
else if (surfaceData.materialId == MATERIALID_LIT_SPECULAR)
{
outGBuffer1.a = PackMaterialId(MATERIALID_LIT_STANDARD); // We save 1bit in gbuffer1 to store it in gbuffer2 instead
// Encode specular on two bit for the enum, must match encoding of MATERIALID_LIT_STANDARD
// TODO: encoding here could be optimize as we know what is the value of surfaceData.specular => (0.75294)
outGBuffer2 = float4(surfaceData.specularColor, PackFloatInt8bit(0.0, surfaceData.specular, 4.0));
// Note: we encode two parametrization at the same time, specularColor and metal/specular
if (surfaceData.specular == SPECULARVALUE_SPECULAR_COLOR)
{
outGBuffer2 = float4(surfaceData.specularColor, PackFloatInt8bit(0.0, surfaceData.specular, 4.0)); // As all is static, Pack function should produce the result compile time
}
else
{
// Note: it is important to setup anisotropy field to 0 else materialId will be anisotropic
outGBuffer2 = float4(float3(0.0, 0.0, 0.0), PackFloatInt8bit(surfaceData.metallic, surfaceData.specular, 4.0));
}
}
else if (surfaceData.materialId == MATERIALID_LIT_ANISO)
{

bsdfData.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
int supportsStandard = (featureFlags & (MATERIALFEATUREFLAGS_LIT_STANDARD | MATERIALFEATUREFLAGS_LIT_ANISO | MATERIALFEATUREFLAGS_LIT_SPECULAR)) != 0;
// The material features system for material classification must allow compile time optimization (i.e everything should be static)
// Note that as we store materialId for Aniso based on content of RT2 we need to add few extra condition.
// The code is also call from MaterialFeatureFlagsFromGBuffer, so must work fully dynamic if featureFlags is 0xFFFFFFFF
int supportsStandard = (featureFlags & (MATERIALFEATUREFLAGS_LIT_STANDARD | MATERIALFEATUREFLAGS_LIT_ANISO)) != 0;
bsdfData.materialId = UnpackMaterialId(inGBuffer1.a); // only fetch materialid if it is not statically known from feature flags
// only fetch materialid if it is not statically known from feature flags
bsdfData.materialId = UnpackMaterialId(inGBuffer1.a);
}
else
{

bsdfData.materialId = MATERIALID_LIT_SSS;
}
if (supportsStandard && bsdfData.materialId == MATERIALID_LIT_STANDARD)
if (bsdfData.materialId == MATERIALID_LIT_STANDARD)
{
float metallic;
int specular;

if (((featureFlags & MATERIALFEATUREFLAGS_LIT_SPECULAR) && (featureFlags & MATERIALFEATUREFLAGS_LIT_STANDARD) == 0)
|| specular == SPECULARVALUE_SPECULAR_COLOR)
if (featureFlags & (MATERIAL_FEATURE_MASK_FLAGS) == MATERIALFEATUREFLAGS_LIT_STANDARD)
bsdfData.materialId = MATERIALID_LIT_SPECULAR;
bsdfData.diffuseColor = baseColor;
bsdfData.fresnel0 = inGBuffer2.rgb;
if (specular == SPECULARVALUE_SPECULAR_COLOR)
{
bsdfData.diffuseColor = baseColor;
bsdfData.fresnel0 = inGBuffer2.rgb;
}
else
{
FillMaterialIdStandardData(baseColor, specular, metallic, bsdfData);
}
else if ( ((featureFlags & MATERIALFEATUREFLAGS_LIT_ANISO) && (featureFlags & MATERIALFEATUREFLAGS_LIT_STANDARD) == 0)
|| anisotropy > 0)
else if (featureFlags & (MATERIAL_FEATURE_MASK_FLAGS) == MATERIALFEATUREFLAGS_LIT_ANISO)
{
bsdfData.materialId = MATERIALID_LIT_ANISO;
FillMaterialIdStandardData(baseColor, specular, metallic, bsdfData);

else
else // either MATERIAL_FEATURE_MASK_FLAGS or MATERIALFEATUREFLAGS_LIT_STANDARD | MATERIALFEATUREFLAGS_LIT_ANISO
FillMaterialIdStandardData(baseColor, specular, metallic, bsdfData);
if (specular == SPECULARVALUE_SPECULAR_COLOR)
{
bsdfData.diffuseColor = baseColor;
bsdfData.fresnel0 = inGBuffer2.rgb;
}
else if (anisotropy > 0)
{
bsdfData.materialId = MATERIALID_LIT_ANISO;
FillMaterialIdStandardData(baseColor, specular, metallic, bsdfData);
float3 tangentWS = UnpackNormalOctEncode(float2(inGBuffer2.rg * 2.0 - 1.0));
FillMaterialIdAnisoData(bsdfData.roughness, bsdfData.normalWS, tangentWS, anisotropy, bsdfData);
}
else
{
FillMaterialIdStandardData(baseColor, specular, metallic, bsdfData);
}
else // if (supportsSSS && bsdfData.materialId == MATERIALID_LIT_SSS)
else // bsdfData.materialId == MATERIALID_LIT_SSS
{
float subsurfaceRadius = inGBuffer2.x;
float thickness = inGBuffer2.y;

#endif
)
{
BSDFData bsdfData;
float3 unused;
DecodeFromGBuffer(
float4 inGBuffer0, inGBuffer1, inGBuffer2, inGBuffer3;
inGBuffer0 = DecodeGBuffer0(inGBufferU0);
uint packedGBuffer1 = inGBufferU0.z | inGBufferU0.w << 16;
inGBuffer1 = UnpackR10G10B10A2(packedGBuffer1);
inGBuffer2.x = UnpackUIntToFloat(inGBufferU1.x, 8, 0);
inGBuffer2.y = UnpackUIntToFloat(inGBufferU1.x, 8, 8);
inGBuffer2.z = UnpackUIntToFloat(inGBufferU1.y, 8, 0);
inGBuffer2.w = UnpackUIntToFloat(inGBufferU1.y, 8, 8);
uint packedGBuffer3 = inGBufferU1.z | inGBufferU1.w << 16;
inGBuffer3.xyz = UnpackR11G11B10f(packedGBuffer1);
inGBuffer3.w = 0.0;
inGBufferU0, inGBufferU1,
#else
inGBuffer0, inGBuffer1, inGBuffer2, inGBuffer3,
int materialId = UnpackMaterialId(inGBuffer1.a);
0xFFFFFFFF,
bsdfData,
unused
);
uint featureFlags = 0;
if (materialId == MATERIALID_LIT_STANDARD)
{
float metallic;
int specular;
UnpackFloatInt8bit(inGBuffer2.a, 4.0, metallic, specular);
float anisotropy = inGBuffer2.b;
if (specular == SPECULARVALUE_SPECULAR_COLOR)
{
featureFlags |= MATERIALFEATUREFLAGS_LIT_SPECULAR;
}
else if (anisotropy > 0.0)
{
featureFlags |= MATERIALFEATUREFLAGS_LIT_ANISO;
}
else
{
featureFlags |= MATERIALFEATUREFLAGS_LIT_STANDARD;
}
}
else if (materialId == MATERIALID_LIT_SSS)
{
featureFlags |= MATERIALFEATUREFLAGS_LIT_SSS;
}
return featureFlags;
return (1 << bsdfData.materialId); // This match all the MATERIALFEATUREFLAGS_LIT_XXX flag
}

preLightData.ggxLambdaV = GetSmithJointGGXLambdaV(NdotV, bsdfData.roughness);
// GGX aniso
preLightData.TdotV = 0;
preLightData.BdotV = 0;
if (bsdfData.materialId == MATERIALID_LIT_ANISO)
{
preLightData.TdotV = dot(bsdfData.tangentWS, V);

}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Directional
// EvaluateBSDF_Directional (supports directional and box projector lights)
void EvaluateBSDF_Directional( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData, DirectionalLightData lightData, BSDFData bsdfData,
out float3 diffuseLighting,
out float3 specularLighting)
void EvaluateBSDF_Directional(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData,
DirectionalLightData lightData, BSDFData bsdfData,
out float3 diffuseLighting,
out float3 specularLighting)
{
float3 positionWS = posInput.positionWS;

diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
float4 cookie = float4(1.0, 1.0, 1.0, 1.0);
diffuseLighting = float3(0, 0, 0); // TODO: check whether using 'out' instead of 'inout' increases the VGPR pressure
specularLighting = float3(0, 0, 0); // TODO: check whether using 'out' instead of 'inout' increases the VGPR pressure
float3 cookie = float3(1, 1, 1);
float shadow = 1;
[branch] if (lightData.shadowIndex >= 0)

[branch] if (lightData.cookieIndex >= 0)
{
float3 lightToSurface = positionWS - lightData.positionWS;
// Compute the NDC position (in [-1, 1]^2) by projecting 'positionWS' onto the near plane.
// 'lightData.right' and 'lightData.up' are pre-scaled on CPU.
float3 lightToSurface = positionWS - lightData.positionWS;
float3x3 lightToWorld = float3x3(lightData.right, lightData.up, lightData.forward);
float3 positionLS = mul(lightToSurface, transpose(lightToWorld));
float2 positionNDC = positionLS.xy;
// Project 'lightToSurface' onto the light's axes.
float2 coord = float2(dot(lightToSurface, lightData.right), dot(lightToSurface, lightData.up));
float clipFactor = 1.0f;
// Compute the NDC coordinates (in [-1, 1]^2).
coord.x *= lightData.invScaleX;
coord.y *= lightData.invScaleY;
// Remap the texture coordinates from [-1, 1]^2 to [0, 1]^2.
float2 coord = positionNDC * 0.5 + 0.5;
if (lightData.tileCookie || (abs(coord.x) <= 1 && abs(coord.y) <= 1))
if (lightData.tileCookie)
// Remap the texture coordinates from [-1, 1]^2 to [0, 1]^2.
coord = coord * 0.5 + 0.5;
if (lightData.tileCookie) { coord = frac(coord); }
cookie = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
coord = frac(coord);
cookie = float4(0, 0, 0, 0);
bool isInBounds = Max3(abs(positionNDC.x), abs(positionNDC.y), 1 - positionLS.z) <= 1;
clipFactor = isInBounds ? 1 : 0;
illuminance *= cookie.a;
// We let the sampler handle tiling or clamping to border.
// Note: tiling (the repeat mode) is not currently supported.
float4 c = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
// Use premultiplied alpha to save 1x VGPR.
cookie = c.rgb * c.a * clipFactor;
}
[branch] if (illuminance > 0.0)

diffuseLighting *= (cookie.rgb * lightData.color) * (illuminance * lightData.diffuseScale);
specularLighting *= (cookie.rgb * lightData.color) * (illuminance * lightData.specularScale);
diffuseLighting *= (cookie * lightData.color) * (illuminance * lightData.diffuseScale);
specularLighting *= (cookie * lightData.color) * (illuminance * lightData.specularScale);
// Use the reversed normal from the front for the back of the object.
illuminance = F_Transm_Schlick(bsdfData.fresnel0.x, saturate(-NdotL)); // Transmission is only valid for dielectric
// Currently, we only model diffuse transmission. Specular transmission is not yet supported.
// We assume that the back side of the object is a uniformly illuminated infinite plane
// (we reuse the illumination) with the reversed normal of the current sample.
// We apply wrapped lighting instead of the regular Lambertian diffuse
// to compensate for these approximations.
illuminance = ComputeWrappedDiffuseLighting(NdotL, SSS_WRAP_LIGHT);
illuminance *= shadow * cookie.a;
illuminance *= shadow;
float3 backLight = (cookie.rgb * lightData.color) * (illuminance * lightData.diffuseScale);
float3 backLight = (cookie * lightData.color) * (Lambert() * illuminance * lightData.diffuseScale);
// TODO: multiplication by 'diffuseColor' and 'transmittance' is the same for each light.
float3 transmittedLight = backLight * (bsdfData.diffuseColor * bsdfData.transmittance);

}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Punctual
// EvaluateBSDF_Punctual (supports spot, point and projector lights)
//-----------------------------------------------------------------------------
void EvaluateBSDF_Punctual( LightLoopContext lightLoopContext,

{
float3 positionWS = posInput.positionWS;
int lightType = lightData.lightType;
// All punctual light type in the same formula, attenuation is neutral depends on light type.
// light.positionWS is the normalize light direction in case of directional light and invSqrAttenuationRadius is 0

float3 unL = lightData.positionWS - positionWS;
float3 L = normalize(unL);
float3 lightToSurface = positionWS - lightData.positionWS;
float3 unL = -lightToSurface;
float3 L = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? normalize(unL) : -lightData.forward;
float attenuation = GetDistanceAttenuation(unL, lightData.invSqrAttenuationRadius);
// Reminder: lights are ortiented backward (-Z)
float attenuation = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? GetDistanceAttenuation(unL, lightData.invSqrAttenuationRadius) : 1;
// Reminder: lights are oriented backward (-Z)
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
float4 cookie = float4(1.0, 1.0, 1.0, 1.0);
diffuseLighting = float3(0, 0, 0); // TODO: check whether using 'out' instead of 'inout' increases the VGPR pressure
specularLighting = float3(0, 0, 0); // TODO: check whether using 'out' instead of 'inout' increases the VGPR pressure
float3 cookie = float3(1, 1, 1);
// TODO: measure impact of having all these dynamic branch here and the gain (or not) of testing illuminace > 0
//[branch] if (lightData.IESIndex >= 0 && illuminance > 0.0)
//{
// float3x3 lightToWorld = float3x3(lightData.right, lightData.up, lightData.forward);
// float2 sphericalCoord = GetIESTextureCoordinate(lightToWorld, L);
// illuminance *= SampleIES(lightLoopContext, lightData.IESIndex, sphericalCoord, 0).r;
//}
// TODO: make projector lights cast shadows.
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, bsdfData.normalWS, lightData.shadowIndex, L, posInput.unPositionSS);
float4 L_dist = { normalize( L.xyz ), length( unL ) };
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, bsdfData.normalWS, lightData.shadowIndex, L_dist, posInput.unPositionSS);
// Projector lights always have a cookie.
// Translate and rotate 'positionWS' into the light space.
// 'lightData.right' and 'lightData.up' are pre-scaled on CPU.
float3 positionLS = mul(lightToSurface, transpose(lightToWorld));
// Rotate 'L' into the light space.
// We perform the negation because lights are oriented backwards (-Z).
float3 coord = mul(-L, transpose(lightToWorld));
[branch] if (lightData.lightType == GPULIGHTTYPE_SPOT)
[branch] if (lightType == GPULIGHTTYPE_POINT)
// Perform the perspective projection of the hemisphere onto the disk.
coord.xy /= coord.z;
float4 c = SampleCookieCube(lightLoopContext, positionLS, lightData.cookieIndex);
// Rescale the projective coordinates to fit into the [-1, 1]^2 range.
float cotOuterHalfAngle = lightData.size.x;
coord.xy *= cotOuterHalfAngle;
// Remap the texture coordinates from [-1, 1]^2 to [0, 1]^2.
coord.xy = coord.xy * 0.5 + 0.5;
cookie = SampleCookie2D(lightLoopContext, coord.xy, lightData.cookieIndex);
// Use premultiplied alpha to save 1x VGPR.
cookie = c.rgb * c.a;
else // GPULIGHTTYPE_POINT
else
cookie = SampleCookieCube(lightLoopContext, coord, lightData.cookieIndex);
}
illuminance *= cookie.a;
}
[branch] if (illuminance > 0.0)
{
BSDF(V, L, positionWS, preLightData, bsdfData, diffuseLighting, specularLighting);
diffuseLighting *= (cookie.rgb * lightData.color) * (illuminance * lightData.diffuseScale);
specularLighting *= (cookie.rgb * lightData.color) * (illuminance * lightData.specularScale);
}
[branch] if (bsdfData.enableTransmission)
{
// Use the reversed normal from the front for the back of the object.
illuminance = F_Transm_Schlick(bsdfData.fresnel0.x , saturate(-NdotL)) * attenuation; // Transmission is only valid for dielectric
// For low thickness, we can reuse the shadowing status for the back of the object.
shadow = bsdfData.useThinObjectMode ? shadow : 1;
illuminance *= shadow * cookie.a;
float3 backLight = (cookie.rgb * lightData.color) * (illuminance * lightData.diffuseScale);
// TODO: multiplication by 'diffuseColor' and 'transmittance' is the same for each light.
float3 transmittedLight = backLight * (bsdfData.diffuseColor * bsdfData.transmittance);
// We use diffuse lighting for accumulation since it is going to be blurred during the SSS pass.
diffuseLighting += transmittedLight;
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Projector
//-----------------------------------------------------------------------------
void EvaluateBSDF_Projector(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData, LightData lightData, BSDFData bsdfData,
out float3 diffuseLighting,
out float3 specularLighting)
{
float3 positionWS = posInput.positionWS;
// Translate and rotate 'positionWS' into the light space.
float3 positionLS = mul(positionWS - lightData.positionWS,
transpose(float3x3(lightData.right, lightData.up, lightData.forward)));
if (lightData.lightType == GPULIGHTTYPE_PROJECTOR_PYRAMID)
{
// Perform perspective division.
positionLS *= rcp(positionLS.z);
}
else
{
// For orthographic projection, the Z coordinate plays no role.
positionLS.z = 0;
}
// Compute the NDC position (in [-1, 1]^2). TODO: precompute the inverse?
float2 positionNDC = positionLS.xy * rcp(0.5 * lightData.size);
// Perform clipping.
float clipFactor = ((positionLS.z >= 0) && (abs(positionNDC.x) <= 1 && abs(positionNDC.y) <= 1)) ? 1 : 0;
float3 L = -lightData.forward; // Lights are pointing backward in Unity
float NdotL = dot(bsdfData.normalWS, L);
float illuminance = saturate(NdotL * clipFactor);
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
float4 cookie = float4(1.0, 1.0, 1.0, 1.0);
float shadow = 1;
[branch] if (lightData.shadowIndex >= 0)
{
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, bsdfData.normalWS, lightData.shadowIndex, L, posInput.unPositionSS);
illuminance *= shadow;
}
// Compute the NDC position (in [-1, 1]^2) by projecting 'positionWS' onto the plane at 1m distance.
// Box projector lights require no perspective division.
float perspectiveZ = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? positionLS.z : 1;
float2 positionNDC = positionLS.xy / perspectiveZ;
bool isInBounds = Max3(abs(positionNDC.x), abs(positionNDC.y), 1 - positionLS.z) <= 1;
float clipFactor = isInBounds ? 1 : 0;
[branch] if (lightData.cookieIndex >= 0)
{
// Compute the texture coordinates in [0, 1]^2.
float2 coord = positionNDC * 0.5 + 0.5;
// Remap the texture coordinates from [-1, 1]^2 to [0, 1]^2.
float2 coord = positionNDC * 0.5 + 0.5;
cookie = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
// We let the sampler handle clamping to border.
float4 c = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
illuminance *= cookie.a;
// Use premultiplied alpha to save 1x VGPR.
cookie = c.rgb * (c.a * clipFactor);
}
}
[branch] if (illuminance > 0.0)

[branch] if (bsdfData.enableTransmission)
{
// Use the reversed normal from the front for the back of the object.
illuminance = F_Transm_Schlick(bsdfData.fresnel0.x, saturate(-NdotL)) * clipFactor; // Transmission is only valid for dielectric
// Currently, we only model diffuse transmission. Specular transmission is not yet supported.
// We assume that the back side of the object is a uniformly illuminated infinite plane
// (we reuse the illumination) with the reversed normal of the current sample.
// We apply wrapped lighting instead of the regular Lambertian diffuse
// to compensate for these approximations.
illuminance = ComputeWrappedDiffuseLighting(NdotL, SSS_WRAP_LIGHT) * attenuation;
illuminance *= shadow * cookie.a;
illuminance *= shadow;
float3 backLight = (cookie.rgb * lightData.color) * (illuminance * lightData.diffuseScale);
float3 backLight = (cookie.rgb * lightData.color) * (Lambert() * illuminance * lightData.diffuseScale);
// TODO: multiplication by 'diffuseColor' and 'transmittance' is the same for each light.
float3 transmittedLight = backLight * (bsdfData.diffuseColor * bsdfData.transmittance);

}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Line - Reference
//-----------------------------------------------------------------------------
void IntegrateBSDF_LineRef(float3 V, float3 positionWS,
PreLightData preLightData, LightData lightData, BSDFData bsdfData,
out float3 diffuseLighting, out float3 specularLighting,
int sampleCount = 128)
{
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
const float len = lightData.size.x;
const float3 T = lightData.right;
const float3 P1 = lightData.positionWS - T * (0.5 * len);
const float dt = len * rcp(sampleCount);
const float off = 0.5 * dt;
// Uniformly sample the line segment with the Pdf = 1 / len.
const float invPdf = len;
for (int i = 0; i < sampleCount; ++i)
{
// Place the sample in the middle of the interval.
float t = off + i * dt;
float3 sPos = P1 + t * T;
float3 unL = sPos - positionWS;
float dist2 = dot(unL, unL);
float3 L = normalize(unL);
float sinLT = length(cross(L, T));
float NdotL = saturate(dot(bsdfData.normalWS, L));
if (NdotL > 0)
{
float3 lightDiff, lightSpec;
BSDF(V, L, positionWS, preLightData, bsdfData, lightDiff, lightSpec);
diffuseLighting += lightDiff * (sinLT / dist2 * NdotL);
specularLighting += lightSpec * (sinLT / dist2 * NdotL);
}
}
// The factor of 2 is due to the fact: Integral{0, 2 PI}{max(0, cos(x))dx} = 2.
float normFactor = 2.0 * invPdf * rcp(sampleCount);
diffuseLighting *= normFactor * lightData.diffuseScale * lightData.color;
specularLighting *= normFactor * lightData.specularScale * lightData.color;
}
#include "LitReference.hlsl"
//-----------------------------------------------------------------------------
// EvaluateBSDF_Line - Approximation with Linearly Transformed Cosines

}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Area - Reference
//-----------------------------------------------------------------------------
void IntegrateBSDF_AreaRef(float3 V, float3 positionWS,
PreLightData preLightData, LightData lightData, BSDFData bsdfData,
out float3 diffuseLighting, out float3 specularLighting,
uint sampleCount = 512)
{
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
for (uint i = 0; i < sampleCount; ++i)
{
float3 P = float3(0.0, 0.0, 0.0); // Sample light point. Random point on the light shape in local space.
float3 Ns = float3(0.0, 0.0, 0.0); // Unit surface normal at P
float lightPdf = 0.0; // Pdf of the light sample
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
// Lights in Unity point backward.
float4x4 localToWorld = float4x4(float4(lightData.right, 0.0), float4(lightData.up, 0.0), float4(-lightData.forward, 0.0), float4(lightData.positionWS, 1.0));
switch (lightData.lightType)
{
case GPULIGHTTYPE_SPHERE:
SampleSphere(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_HEMISPHERE:
SampleHemisphere(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_CYLINDER:
SampleCylinder(u, localToWorld, lightData.size.x, lightData.size.y, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_RECTANGLE:
SampleRectangle(u, localToWorld, lightData.size.x, lightData.size.y, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_DISK:
SampleDisk(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
// case GPULIGHTTYPE_LINE: handled by a separate function.
}
// Get distance
float3 unL = P - positionWS;
float sqrDist = dot(unL, unL);
float3 L = normalize(unL);
// Cosine of the angle between the light direction and the normal of the light's surface.
float cosLNs = saturate(dot(-L, Ns));
// We calculate area reference light with the area integral rather than the solid angle one.
float illuminance = cosLNs * saturate(dot(bsdfData.normalWS, L)) / (sqrDist * lightPdf);
float3 localDiffuseLighting = float3(0.0, 0.0, 0.0);
float3 localSpecularLighting = float3(0.0, 0.0, 0.0);
if (illuminance > 0.0)
{
BSDF(V, L, positionWS, preLightData, bsdfData, localDiffuseLighting, localSpecularLighting);
localDiffuseLighting *= lightData.color * illuminance * lightData.diffuseScale;
localSpecularLighting *= lightData.color * illuminance * lightData.specularScale;
}
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;
}
diffuseLighting /= float(sampleCount);
specularLighting /= float(sampleCount);
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Area - Approximation with Linearly Transformed Cosines
//-----------------------------------------------------------------------------

specularLighting = fresnelTerm * lightData.color * ltcValue;
}
#endif // LIT_DISPLAY_REFERENCE_AREA
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Env - Reference
// ----------------------------------------------------------------------------
// Ref: Moving Frostbite to PBR (Appendix A)
float3 IntegrateLambertIBLRef(LightLoopContext lightLoopContext,
float3 V, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float3 L;
float NdotL;
float weightOverPdf;
ImportanceSampleLambert(u, localToWorld, L, NdotL, weightOverPdf);
if (NdotL > 0.0)
{
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
// diffuse Albedo is apply here as describe in ImportanceSampleLambert function
acc += bsdfData.diffuseColor * LambertNoPI() * weightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}
float3 IntegrateDisneyDiffuseIBLRef(LightLoopContext lightLoopContext,
float3 V, PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float NdotV = max(preLightData.NdotV, MIN_N_DOT_V);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float3 L;
float NdotL;
float weightOverPdf;
// for Disney we still use a Cosine importance sampling, true Disney importance sampling imply a look up table
ImportanceSampleLambert(u, localToWorld, L, NdotL, weightOverPdf);
if (NdotL > 0.0)
{
float3 H = normalize(L + V);
float LdotH = dot(L, H);
// Note: we call DisneyDiffuse that require to multiply by Albedo / PI. Divide by PI is already taken into account
// in weightOverPdf of ImportanceSampleLambert call.
float disneyDiffuse = DisneyDiffuse(NdotV, NdotL, LdotH, bsdfData.perceptualRoughness);
// diffuse Albedo is apply here as describe in ImportanceSampleLambert function
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
acc += bsdfData.diffuseColor * disneyDiffuse * weightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}
// Ref: Moving Frostbite to PBR (Appendix A)
float3 IntegrateSpecularGGXIBLRef(LightLoopContext lightLoopContext,
float3 V, PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float NdotV = max(preLightData.NdotV, MIN_N_DOT_V);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float VdotH;
float NdotL;
float3 L;
float weightOverPdf;
// GGX BRDF
if (bsdfData.materialId == MATERIALID_LIT_ANISO)
{
ImportanceSampleAnisoGGX(u, V, localToWorld, bsdfData.roughnessT, bsdfData.roughnessB, NdotV, L, VdotH, NdotL, weightOverPdf);
}
else
{
ImportanceSampleGGX(u, V, localToWorld, bsdfData.roughness, NdotV, L, VdotH, NdotL, weightOverPdf);
}
if (NdotL > 0.0)
{
// Fresnel component is apply here as describe in ImportanceSampleGGX function
float3 FweightOverPdf = F_Schlick(bsdfData.fresnel0, VdotH) * weightOverPdf;
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
acc += FweightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}
//-----------------------------------------------------------------------------

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitDataInternal.hlsl


float2 uvXZ;
float2 uvXY;
float2 uvZY;
GetTriplanarCoordinate(positionWS * worldScale, uvXZ, uvXY, uvZY);
GetTriplanarCoordinate(GetAbsolutePositionWS(positionWS) * worldScale, uvXZ, uvXY, uvZY);
// Planar is just XZ of triplanar
if (mappingType == UV_MAPPING_PLANAR)

#ifdef _DETAIL_MAP_IDX
#ifdef SURFACE_GRADIENT
normalTS += detailNormalTS;
normalTS += detailNormalTS * detailMask;
#else
normalTS = lerp(normalTS, BlendNormalRNM(normalTS, detailNormalTS), detailMask);
#endif

#elif defined(_MATID_ANISO)
surfaceData.materialId = MATERIALID_LIT_ANISO;
#elif defined(_MATID_SPECULAR)
surfaceData.materialId = MATERIALID_LIT_SPECULAR;
surfaceData.materialId = MATERIALID_LIT_STANDARD; // Specular is not a different BRDF, it is just different parametrization, do'nt do a separate matId for it
#else // Default
surfaceData.materialId = MATERIALID_LIT_STANDARD;
#endif

#endif
surfaceData.anisotropy *= ADD_IDX(_Anisotropy);
// This surfaceData.specular must be static to allow the compiler to optimize the code when converting / encoding the values
// To save 1bit space in GBuffer we don't store specular as materialID but in the enum of the specular value
surfaceData.specular = SPECULARVALUE_SPECULAR_COLOR;
#else
surfaceData.specular = SPECULARVALUE_REGULAR;

39
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitTessellation.hlsl


{
float maxDisplacement = GetMaxDisplacement();
bool frustumCulled = WorldViewFrustumCull(p0, p1, p2, maxDisplacement, (float4[4])unity_CameraWorldClipPlanes);
bool faceCull = false;
// For tessellation we want to process tessellation factor always from the point of view of the camera (to be consistent and avoid Z-fight).
// For the culling part however we want to use the current view (shadow view).
// Thus the following code play with both.
// We use the position of the primary (scene view) camera in order
// to have identical tessellation levels for both the scene view and
// shadow views. Otherwise, depth comparisons become meaningless!
float3 camPosWS = _WorldSpaceCameraPos;
#if defined(SHADERPASS) && (SHADERPASS != SHADERPASS_SHADOWS)
bool frustumCulledCurrentView = WorldViewFrustumCull(p0, p1, p2, maxDisplacement, (float4[4])_FrustumPlanes); // _FrustumPlanes are primary camera planes
bool frustumCulledMainView = false;
#else
bool frustumCulledCurrentView = WorldViewFrustumCull(p0, p1, p2, maxDisplacement, (float4[4])unity_CameraWorldClipPlanes); // unity_CameraWorldClipPlanes is set by legacy Unity in case of shadow and contain shadow view plan
// In the case of shadow, we don't want to tessellate anything that is not seen by the main view frustum. It can result in minor popping of tessellation into a shadow but we can't afford it anyway.
bool frustumCulledMainView = WorldViewFrustumCull(p0, p1, p2, maxDisplacement, (float4[4])_FrustumPlanes);
#endif
bool faceCull = false;
faceCull = BackFaceCullTriangle(p0, p1, p2, _TessellationBackFaceCullEpsilon, camPosWS);
faceCull = BackFaceCullTriangle(p0, p1, p2, _TessellationBackFaceCullEpsilon, GetCurrentViewPosition()); // Use shadow view
if (frustumCulled || faceCull)
if (frustumCulledCurrentView || faceCull)
}
// See comment above:
// During shadow passes, we decide that anything outside the main view frustum should not be tessellated.
if (frustumCulledMainView)
{
return float4(1.0, 1.0, 1.0, 1.0);
// We use the parameters of the primary (scene view) camera in order
// to have identical tessellation levels for both the scene view and
// shadow views. Otherwise, depth comparisons become meaningless!
// Aaptive screen space tessellation
// Adaptive screen space tessellation
tessFactor *= GetScreenSpaceTessFactor( p0, p1, p2, GetWorldToHClipMatrix(), _ScreenParams, _TessellationFactorTriangleSize);
tessFactor *= GetScreenSpaceTessFactor( p0, p1, p2, _ViewProjMatrix, _ScreenSize, _TessellationFactorTriangleSize); // Use primary camera view
float3 distFactor = GetDistanceBasedTessFactor(p0, p1, p2, camPosWS, _TessellationFactorMinDistance, _TessellationFactorMaxDistance);
float3 distFactor = GetDistanceBasedTessFactor(p0, p1, p2, GetPrimaryCameraPosition(), _TessellationFactorMinDistance, _TessellationFactorMaxDistance); // Use primary camera view
// We square the disance factor as it allow a better percptual descrease of vertex density.
tessFactor *= distFactor * distFactor;
}

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader


{
Stencil
{
Ref 1 // StencilBits.SSS
Ref 1 // StencilLightingUsage.SplitLighting
Comp Equal
Pass Keep
}

20
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SubsurfaceScatteringProfile.cs


// PDF(r, s) = s * (Exp[-r * s] + Exp[-r * s / 3]) / 4
// CDF(r, s) = 1 - 1/4 * Exp[-r * s] - 3/4 * Exp[-r * s / 3]
// ------------------------------------------------------------------------------------
// N.b.: computation of normalized weights, and multiplication by the surface albedo
// of the actual geometry is performed at runtime (in the shader).
m_FilterKernelNearField[i].x = r;

// Set in BuildKernel().
get { return m_FilterKernelNearField; }
}
public Vector2[] filterKernelFarField
{
// Set in BuildKernel().

// Below are the cached values.
[NonSerialized] public uint texturingModeFlags; // 1 bit/profile; 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 2 bit/profile; 0 = inf. thick, 1 = thin, 2 = regular
[NonSerialized] public float[] thicknessRemaps; // Remap: 0 = start, 1 = end - start
[NonSerialized] public Vector4[] thicknessRemaps; // Remap: 0 = start, 1 = end - start
[NonSerialized] public float[] worldScales; // Size of the world unit in meters
[NonSerialized] public Vector4[] shapeParams; // RGB = S = 1 / D, A = filter radius
[NonSerialized] public Vector4[] transmissionTints; // RGB = color, A = unused

{
texturingModeFlags = transmissionFlags = 0;
const int thicknessRemapsLen = SssConstants.SSS_N_PROFILES * 2;
if (thicknessRemaps == null || thicknessRemaps.Length != thicknessRemapsLen)
if (thicknessRemaps == null || thicknessRemaps.Length != SssConstants.SSS_N_PROFILES)
thicknessRemaps = new float[thicknessRemapsLen];
thicknessRemaps = new Vector4[SssConstants.SSS_N_PROFILES];
}
if (worldScales == null || worldScales.Length != SssConstants.SSS_N_PROFILES)

texturingModeFlags |= (uint)profiles[i].texturingMode << i;
transmissionFlags |= (uint)profiles[i].transmissionMode << i * 2;
thicknessRemaps[2 * i] = profiles[i].thicknessRemap.x;
thicknessRemaps[2 * i + 1] = profiles[i].thicknessRemap.y - profiles[i].thicknessRemap.x;
thicknessRemaps[i] = new Vector4(profiles[i].thicknessRemap.x, profiles[i].thicknessRemap.y - profiles[i].thicknessRemap.x, 0.0f, 0.0f);
worldScales[i] = profiles[i].worldScale;
shapeParams[i] = profiles[i].shapeParameter;
shapeParams[i].w = profiles[i].maxRadius;

if (useDisneySSS)
{
EditorGUILayout.PropertyField(m_ScatteringDistance, styles.sssProfileScatteringDistance);
GUI.enabled = false;
EditorGUILayout.PropertyField(m_MaxRadius, styles.sssProfileMaxRadius);
GUI.enabled = true;

m_ProfileMaterial.SetFloat("_MaxRadius", rMax);
// <<< Old SSS Model
EditorGUI.DrawPreviewTexture(GUILayoutUtility.GetRect(256, 256), m_ProfileImage, m_ProfileMaterial, ScaleMode.ScaleToFit, 1.0f);
EditorGUILayout.Space();
EditorGUILayout.LabelField(styles.sssTransmittancePreview0, styles.centeredMiniBoldLabel);
EditorGUILayout.LabelField(styles.sssTransmittancePreview1, EditorStyles.centeredGreyMiniLabel);

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/RenderPipelineMaterial.cs


public virtual void Cleanup() {}
// Following function can be use to initialize GPU resource (once or each frame) and bind them
public virtual void RenderInit(Rendering.ScriptableRenderContext renderContext) {}
public virtual void RenderInit(CommandBuffer cmd) {}
public virtual void Bind() {}
}
}

43
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader


#include "../../ShaderLibrary/Common.hlsl"
#include "../ShaderVariables.hlsl"
#include "../ShaderConfig.cs.hlsl"
PackedVaryingsType Vert(AttributesMesh inputMesh)
float4 _CameraPosDiff;
struct Attributes
VaryingsType varyingsType;
varyingsType.vmesh = VertMesh(inputMesh);
return PackVaryingsType(varyingsType);
uint vertexID : SV_VertexID;
};
struct Varyings
{
float4 positionCS : SV_POSITION;
};
Varyings Vert(Attributes input)
{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
return output;
float4 Frag(PackedVaryingsToPS packedInput) : SV_Target
float4 Frag(Varyings input) : SV_Target
PositionInputs posInput = GetPositionInput(packedInput.vmesh.positionCS.xy, _ScreenSize.zw);
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float3 vPos = ComputeViewSpacePosition(posInput.positionSS, depth, _InvProjMatrix);
float4 worldPos = mul(unity_CameraToWorld, float4(vPos, 1.0));
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
float4 worldPos = float4(posInput.positionWS, 1.0);
float4 prevPos = worldPos;
#if (SHADEROPTIONS_CAMERA_RELATIVE_RENDERING != 0)
prevPos -= _CameraPosDiff;
#endif
float4 prevClipPos = mul(_PrevViewProjMatrix, worldPos);
float4 curClipPos = mul(_ViewProjMatrix, worldPos);
float4 prevClipPos = mul(_PrevViewProjMatrix, prevPos);
float4 curClipPos = mul(_NonJitteredViewProjMatrix, worldPos);
#if UNITY_UV_STARTS_AT_TOP
previousPositionCS.y = 1.0 - previousPositionCS.y;
positionCS.y = 1.0 - positionCS.y;
#endif
return float4(positionCS - previousPositionCS, 0.0, 1.0);
}

6
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs


// All this could be fix we a new Mesh API not ready yet. Note that this feature only affect animated mesh (vertex or skin) as others use depth reprojection.
VelocityInGBuffer = 0, // Change to 1 to enable the feature, then regenerate hlsl headers.
// TODO: not working yet, waiting for UINT16 RT format support
PackGBufferInU16 = 0
PackGBufferInU16 = 0,
CameraRelativeRendering = 1 // Rendering sets the origin of the world to the position of the primary (scene view) camera
};
// Note: #define can't be use in include file in C# so we chose this way to configure both C# and hlsl

public const int k_PackgbufferInU16 = (int)ShaderOptions.PackGBufferInU16;
public static int s_PackgbufferInU16 = (int)ShaderOptions.PackGBufferInU16;
public const int k_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;
public static int s_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;
}
}

1
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs.hlsl


//
#define SHADEROPTIONS_VELOCITY_IN_GBUFFER (0)
#define SHADEROPTIONS_PACK_GBUFFER_IN_U16 (0)
#define SHADEROPTIONS_CAMERA_RELATIVE_RENDERING (1)
#endif

1
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl


float3 result = float3(1.0, 0.0, 1.0);
bool needLinearToSRGB = false;
GetPropertiesDataDebug(_DebugViewMaterial, result, needLinearToSRGB);
GetVaryingsDataDebug(_DebugViewMaterial, input, result, needLinearToSRGB);
GetBuiltinDataDebug(_DebugViewMaterial, builtinData, result, needLinearToSRGB);
GetSurfaceDataDebug(_DebugViewMaterial, surfaceData, result, needLinearToSRGB);

37
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl


VaryingsToPS output;
// Output UV coordinate in vertex shader
float2 uv;
inputMesh.positionOS.xy = inputMesh.uv1 * unity_LightmapST.xy + unity_LightmapST.zw;
{
uv = inputMesh.uv1 * unity_LightmapST.xy + unity_LightmapST.zw;
}
else if (unity_MetaVertexControl.y)
{
uv = inputMesh.uv2 * unity_DynamicLightmapST.xy + unity_DynamicLightmapST.zw;
}
if (unity_MetaVertexControl.y)
inputMesh.positionOS.xy = inputMesh.uv2 * unity_DynamicLightmapST.xy + unity_DynamicLightmapST.zw;
// Zero out the Z component. However, OpenGL right now needs to actually use the incoming vertex
// position, so also take this opportunity to create a dependence on it.
inputMesh.positionOS.z = inputMesh.positionOS.z > 0 ? 1.0e-4 : 0.0;
float3 positionWS = TransformObjectToWorld(inputMesh.positionOS);
output.vmesh.positionCS = TransformWorldToHClip(positionWS);
output.vmesh.texCoord0 = inputMesh.uv0;
output.vmesh.texCoord1 = inputMesh.uv1;
// OpenGL right now needs to actually use the incoming vertex position
// so we create a fake dependency on it here that haven't any impact.
output.vmesh.positionCS = float4(uv * 2.0 - 1.0, inputMesh.positionOS.z > 0 ? 1.0e-4 : 0.0, 1.0);
output.vmesh.texCoord0 = inputMesh.uv0;
output.vmesh.texCoord1 = inputMesh.uv1;
#if defined(VARYINGS_NEED_COLOR)
output.vmesh.color = inputMesh.color;

// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
// No position and depth in case of light transport
float3 V = float3(0, 0, 1); // No vector view in case of light transport
float3 V = float3(0.0, 0.0, 1.0); // No vector view in case of light transport
SurfaceData surfaceData;
BuiltinData builtinData;

LightTransportData lightTransportData = GetLightTransportData(surfaceData, builtinData, bsdfData);
// This shader is call two time. Once for getting emissiveColor, the other time to get diffuseColor
// This shader is call two times. Once for getting emissiveColor, the other time to get diffuseColor
// TODO: No if / else in original code from Unity, why ? keep like original code but should be either diffuse or emissive
if (unity_MetaFragmentControl.x)
{
// Apply diffuseColor Boost from LightmapSettings.

if (unity_MetaFragmentControl.y)
{
// TODO: THIS LIMIT MUST BE REMOVE, IT IS NOT HDR, change when RGB9e5 is here.
// Do we assume here that emission is [0..1] ?
res = PackEmissiveRGBM(lightTransportData.emissiveColor);
// emissive use HDR format
res.rgb = lightTransportData.emissiveColor;
}
return res;

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl


// It is not possible to correctly generate the motion vector for tesselated geometry as tessellation parameters can change
// from one frame to another (adaptative, lod) + in Unity we only receive information for one non tesselated vertex.
// So motion vetor will be based on interpolate previous position at vertex level instead.
varyingsType.vpass.positionCS = mul(_ViewProjMatrix, mul(unity_ObjectToWorld, float4(inputMesh.positionOS, 1.0)));
varyingsType.vpass.positionCS = mul(_NonJitteredViewProjMatrix, mul(unity_ObjectToWorld, float4(inputMesh.positionOS, 1.0)));
varyingsType.vpass.previousPositionCS = mul(_PrevViewProjMatrix, mul(unity_MatrixPreviousM, unity_MotionVectorsParams.x ? float4(inputPass.previousPositionOS, 1.0) : float4(inputMesh.positionOS, 1.0)));
return PackVaryingsType(varyingsType);

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VertMesh.hlsl


ApplyWind(positionWS, normalWS, rootWP, _Stiffness, _Drag, _ShiverDrag, _ShiverDirectionality, _InitialBend, vertexColor.a, _Time);
#endif
positionWS = GetCameraRelativePositionWS(positionWS);
#ifdef TESSELLATION_ON
output.positionWS = positionWS;
#ifdef _TESSELLATION_OBJECT_SCALE

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl


CBUFFER_START(UnityPerPass)
float4x4 _PrevViewProjMatrix;
float4x4 _ViewProjMatrix;
float4x4 _NonJitteredViewProjMatrix;
float4x4 _ViewMatrix;
float4x4 _ProjMatrix;
float4x4 _InvViewProjMatrix;

float4 _ScreenSize;
float4 _ScreenSize; // (w, h, 1/w, 1/h)
float4 _FrustumPlanes[6]; // (N, -dot(N, P))
CBUFFER_END
#ifdef USE_LEGACY_UNITY_MATRIX_VARIABLES

47
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariablesFunctions.hlsl


return mul(GetWorldToHClipMatrix(), float4(positionWS, 1.0));
}
float3 GetCurrentCameraPosition()
float3 GetAbsolutePositionWS(float3 cameraRelativePositionWS)
{
float3 pos = cameraRelativePositionWS;
#if (SHADEROPTIONS_CAMERA_RELATIVE_RENDERING != 0)
pos += _WorldSpaceCameraPos;
#endif
return pos;
}
float3 GetCameraRelativePositionWS(float3 absolutePositionWS)
{
float3 pos = absolutePositionWS;
#if (SHADEROPTIONS_CAMERA_RELATIVE_RENDERING != 0)
pos -= _WorldSpaceCameraPos;
#endif
return pos;
}
// Note: '_WorldSpaceCameraPos' is set by the legacy Unity code.
float3 GetPrimaryCameraPosition()
{
return GetCameraRelativePositionWS(_WorldSpaceCameraPos);
}
// Could be e.g. the position of a primary camera or a shadow-casting light.
float3 GetCurrentViewPosition()
return _WorldSpaceCameraPos;
return GetPrimaryCameraPosition();
#else
// TEMP: this is rather expensive. Then again, we need '_WorldSpaceCameraPos'
// to represent the position of the primary (scene view) camera in order to

float3 rotCamPos = trViewMat[3].xyz;
return mul((float3x3)trViewMat, -rotCamPos);
return mul((float3x3)trViewMat, -rotCamPos);
// Returns the forward direction of the current camera in the world space.
float3 GetCameraForwardDir()
// Returns the forward (central) direction of the current view in the world space.
float3 GetViewForwardDir()
// Returns 'true' if the current camera performs a perspective projection.
bool IsPerspectiveCamera()
// Returns 'true' if the current view performs a perspective projection.
bool IsPerspectiveProjection()
{
#if defined(SHADERPASS) && (SHADERPASS != SHADERPASS_SHADOWS)
return (unity_OrthoParams.w == 0);

#endif
}
// Computes the world space view direction (pointing towards the camera).
// Computes the world space view direction (pointing towards the viewer).
if (IsPerspectiveCamera())
if (IsPerspectiveProjection())
float3 V = GetCurrentCameraPosition() - positionWS;
float3 V = GetCurrentViewPosition() - positionWS;
return -GetCameraForwardDir();
return -GetViewForwardDir();
}
}

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/HDRISky/HDRISkyRenderer.cs


{
if (builtinParams.depthBuffer == BuiltinSkyParameters.nullRT)
{
Utilities.SetRenderTarget(builtinParams.renderContext, builtinParams.colorBuffer);
Utilities.SetRenderTarget(builtinParams.commandBuffer, builtinParams.colorBuffer);
Utilities.SetRenderTarget(builtinParams.renderContext, builtinParams.colorBuffer, builtinParams.depthBuffer);
Utilities.SetRenderTarget(builtinParams.commandBuffer, builtinParams.colorBuffer, builtinParams.depthBuffer);
}
}

m_SkyHDRIMaterial.SetVector("_SkyParam", new Vector4(m_HdriSkyParams.exposure, m_HdriSkyParams.multiplier, m_HdriSkyParams.rotation, 0.0f));
var cmd = CommandBufferPool.Get("");
cmd.DrawMesh(builtinParams.skyMesh, Matrix4x4.identity, m_SkyHDRIMaterial, 0, renderForCubemap ? 0 : 1);
builtinParams.renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
builtinParams.commandBuffer.DrawMesh(builtinParams.skyMesh, Matrix4x4.identity, m_SkyHDRIMaterial, 0, renderForCubemap ? 0 : 1);
}
public override bool IsSkyValid()

8
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/ProceduralSkyRenderer.cs


{
// We do not bind the depth buffer as a depth-stencil target since it is
// bound as a color texture which is then sampled from within the shader.
Utilities.SetRenderTarget(builtinParams.renderContext, builtinParams.colorBuffer);
Utilities.SetRenderTarget(builtinParams.commandBuffer, builtinParams.colorBuffer);
}
void SetKeywords(BuiltinSkyParameters builtinParams, ProceduralSkySettings param, bool renderForCubemap)

// Set shader constants.
SetUniforms(builtinParams, m_ProceduralSkySettings, renderForCubemap, ref properties);
var cmd = CommandBufferPool.Get("");
cmd.DrawMesh(builtinParams.skyMesh, Matrix4x4.identity, m_ProceduralSkyMaterial, 0, 0, properties);
builtinParams.renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
builtinParams.commandBuffer.DrawMesh(builtinParams.skyMesh, Matrix4x4.identity, m_ProceduralSkyMaterial, 0, 0, properties);
}
}
}

35
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/RuntimeFilterIBL.cs


using UnityEngine.Rendering;
using UnityEngine.Rendering;
using UnityEngine.Experimental.Rendering.HDPipeline;
using System;

get { return m_SupportMIS; }
}
public void Initialize(ScriptableRenderContext context)
public void Initialize(CommandBuffer cmd)
{
if (!m_ComputeGgxIblSampleDataCS)
{

m_ComputeGgxIblSampleDataCS.SetTexture(m_ComputeGgxIblSampleDataKernel, "output", m_GgxIblSampleData);
var cmd = CommandBufferPool.Get("Compute GGX IBL Sample Data");
using (new Utilities.ProfilingSample("Compute GGX IBL Sample Data", cmd))
{
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
void FilterCubemapCommon(ScriptableRenderContext context,
void FilterCubemapCommon(CommandBuffer cmd,
Texture source, RenderTexture target, int mipCount,
Mesh[] cubemapFaceMesh)
{

for (int mip = 1; mip < ((int)EnvConstants.SpecCubeLodStep + 1); ++mip)
{
string sampleName = String.Format("Filter Cubemap Mip {0}", mip);
cmd.BeginSample(sampleName);
Utilities.SetRenderTarget(context, target, ClearFlag.ClearNone, mip, (CubemapFace)face);
Utilities.SetRenderTarget(cmd, target, ClearFlag.ClearNone, mip, (CubemapFace)face);
var cmd = CommandBufferPool.Get();
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
cmd.EndSample(sampleName);
public void FilterCubemap(ScriptableRenderContext context,
public void FilterCubemap(CommandBuffer cmd,
FilterCubemapCommon(context, source, target, mipCount, cubemapFaceMesh);
FilterCubemapCommon(cmd, source, target, mipCount, cubemapFaceMesh);
public void FilterCubemapMIS(ScriptableRenderContext context,
public void FilterCubemapMIS(CommandBuffer cmd,
Texture source, RenderTexture target, int mipCount,
RenderTexture conditionalCdf, RenderTexture marginalRowCdf,
Mesh[] cubemapFaceMesh)

int numRows = conditionalCdf.height;
var cmd = CommandBufferPool.Get("Build Probability Tables");
using (new Utilities.ProfilingSample("Build Probability Tables", cmd))
{
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
FilterCubemapCommon(context, source, target, mipCount, cubemapFaceMesh);
FilterCubemapCommon(cmd, source, target, mipCount, cubemapFaceMesh);
}
}
}

114
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/SkyManager.cs


public Vector3 cameraPosWS;
public Vector4 screenSize;
public Mesh skyMesh;
public ScriptableRenderContext renderContext;
public CommandBuffer commandBuffer;
public Light sunLight;
public RenderTargetIdentifier colorBuffer;
public RenderTargetIdentifier depthBuffer;

builtinParams.colorBuffer = target;
builtinParams.depthBuffer = BuiltinSkyParameters.nullRT;
Utilities.SetRenderTarget(builtinParams.renderContext, target, ClearFlag.ClearNone, 0, (CubemapFace)i);
Utilities.SetRenderTarget(builtinParams.commandBuffer, target, ClearFlag.ClearNone, 0, (CubemapFace)i);
private void BlitCubemap(ScriptableRenderContext renderContext, Cubemap source, RenderTexture dest)
private void BlitCubemap(CommandBuffer cmd, Cubemap source, RenderTexture dest)
{
MaterialPropertyBlock propertyBlock = new MaterialPropertyBlock();

Utilities.SetRenderTarget(renderContext, dest, ClearFlag.ClearNone, 0, (CubemapFace)i);
var cmd = CommandBufferPool.Get();
Utilities.SetRenderTarget(cmd, dest, ClearFlag.ClearNone, 0, (CubemapFace)i);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
private void RenderCubemapGGXConvolution(ScriptableRenderContext renderContext, BuiltinSkyParameters builtinParams, SkySettings skyParams, Texture input, RenderTexture target)
private void RenderCubemapGGXConvolution(CommandBuffer cmd, BuiltinSkyParameters builtinParams, SkySettings skyParams, Texture input, RenderTexture target)
using (new Utilities.ProfilingSample("Sky Pass: GGX Convolution", renderContext))
using (new Utilities.ProfilingSample("Update Env: GGX Convolution", cmd))
{
int mipCount = 1 + (int)Mathf.Log(input.width, 2.0f);
if (mipCount < ((int)EnvConstants.SpecCubeLodStep + 1))

if (!m_iblFilterGgx.IsInitialized())
{
m_iblFilterGgx.Initialize(renderContext);
m_iblFilterGgx.Initialize(cmd);
var cmd = CommandBufferPool.Get();
for (int f = 0; f < 6; f++)
using (new Utilities.ProfilingSample("Copy Original Mip", cmd))
cmd.CopyTexture(input, f, 0, target, f, 0);
for (int f = 0; f < 6; f++)
{
cmd.CopyTexture(input, f, 0, target, f, 0);
}
renderContext.ExecuteCommandBuffer(cmd);
if (m_useMIS && m_iblFilterGgx.SupportMIS)
{
m_iblFilterGgx.FilterCubemapMIS(renderContext, input, target, mipCount, m_SkyboxConditionalCdfRT, m_SkyboxMarginalRowCdfRT, m_CubemapFaceMesh);
}
else
using (new Utilities.ProfilingSample("GGX Convolution", cmd))
m_iblFilterGgx.FilterCubemap(renderContext, input, target, mipCount, m_CubemapFaceMesh);
if (m_useMIS && m_iblFilterGgx.SupportMIS)
{
m_iblFilterGgx.FilterCubemapMIS(cmd, input, target, mipCount, m_SkyboxConditionalCdfRT, m_SkyboxMarginalRowCdfRT, m_CubemapFaceMesh);
}
else
{
m_iblFilterGgx.FilterCubemap(cmd, input, target, mipCount, m_CubemapFaceMesh);
}
CommandBufferPool.Release(cmd);
}
}

}
public void UpdateEnvironment(HDCamera camera, Light sunLight, ScriptableRenderContext renderContext)
public void UpdateEnvironment(HDCamera camera, Light sunLight, CommandBuffer cmd)
using (new Utilities.ProfilingSample("Sky Environment Pass", renderContext))
// We need one frame delay for this update to work since DynamicGI.UpdateEnvironment is executed direclty but the renderloop is not (so we need to wait for the sky texture to be rendered first)
if (m_NeedLowLevelUpdateEnvironment)
// We need one frame delay for this update to work since DynamicGI.UpdateEnvironment is executed direclty but the renderloop is not (so we need to wait for the sky texture to be rendered first)
if (m_NeedLowLevelUpdateEnvironment)
using (new Utilities.ProfilingSample("DynamicGI.UpdateEnvironment", cmd))
{
// TODO: Properly send the cubemap to Enlighten. Currently workaround is to set the cubemap in a Skybox/cubemap material
m_StandardSkyboxMaterial.SetTexture("_Tex", m_SkyboxCubemapRT);

m_NeedLowLevelUpdateEnvironment = false;
}
}
if (IsSkyValid())
{
m_CurrentUpdateTime += Time.deltaTime;
if (IsSkyValid())
{
m_CurrentUpdateTime += Time.deltaTime;
m_BuiltinParameters.renderContext = renderContext;
m_BuiltinParameters.sunLight = sunLight;
m_BuiltinParameters.commandBuffer = cmd;
m_BuiltinParameters.sunLight = sunLight;
if (
m_UpdatedFramesRequired > 0 ||
(skySettings.updateMode == EnvironementUpdateMode.OnChanged && skySettings.GetHash() != m_SkyParametersHash) ||
(skySettings.updateMode == EnvironementUpdateMode.Realtime && m_CurrentUpdateTime > skySettings.updatePeriod)
)
if (
m_UpdatedFramesRequired > 0 ||
(skySettings.updateMode == EnvironementUpdateMode.OnChanged && skySettings.GetHash() != m_SkyParametersHash) ||
(skySettings.updateMode == EnvironementUpdateMode.Realtime && m_CurrentUpdateTime > skySettings.updatePeriod)
)
{
using (new Utilities.ProfilingSample("Sky Environment Pass", cmd))
// Render sky into a cubemap - doesn't happen every frame, can be controlled
// Note that m_SkyboxCubemapRT is created with auto-generate mipmap, it mean that here we have also our mipmap correctly box filtered for importance sampling.
if(m_SkySettings.lightingOverride == null)
RenderSkyToCubemap(m_BuiltinParameters, skySettings, m_SkyboxCubemapRT);
// In case the user overrides the lighting, we already have a cubemap ready but we need to blit it anyway for potential resize and so that we can generate proper mipmaps for enlighten.
else
BlitCubemap(renderContext, m_SkySettings.lightingOverride, m_SkyboxCubemapRT);
using (new Utilities.ProfilingSample("Update Env: Generate Lighting Cubemap", cmd))
{
// Render sky into a cubemap - doesn't happen every frame, can be controlled
// Note that m_SkyboxCubemapRT is created with auto-generate mipmap, it mean that here we have also our mipmap correctly box filtered for importance sampling.
if(m_SkySettings.lightingOverride == null)
RenderSkyToCubemap(m_BuiltinParameters, skySettings, m_SkyboxCubemapRT);
// In case the user overrides the lighting, we already have a cubemap ready but we need to blit it anyway for potential resize and so that we can generate proper mipmaps for enlighten.
else
BlitCubemap(cmd, m_SkySettings.lightingOverride, m_SkyboxCubemapRT);
}
RenderCubemapGGXConvolution(renderContext, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
//#if UNITY_EDITOR
// m_SkyboxCubemapRT.imageContentsHash = new Hash128((uint)skySettings.GetHash(), 0, 0, 0);
//#endif
else
}
else
{
if(m_SkyParametersHash != 0)
if(m_SkyParametersHash != 0)
using (new Utilities.ProfilingSample("Reset Sky Environment", cmd))
Utilities.ClearCubemap(renderContext, m_SkyboxCubemapRT, Color.black);
RenderCubemapGGXConvolution(renderContext, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
Utilities.ClearCubemap(cmd, m_SkyboxCubemapRT, Color.black);
RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
m_SkyParametersHash = 0;
m_NeedLowLevelUpdateEnvironment = true;

}
public void RenderSky(HDCamera camera, Light sunLight, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ScriptableRenderContext renderContext)
public void RenderSky(HDCamera camera, Light sunLight, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, CommandBuffer cmd)
using (new Utilities.ProfilingSample("Sky Pass", renderContext))
using (new Utilities.ProfilingSample("Sky Pass", cmd))
m_BuiltinParameters.renderContext = renderContext;
m_BuiltinParameters.commandBuffer = cmd;
m_BuiltinParameters.sunLight = sunLight;
m_BuiltinParameters.invViewProjMatrix = camera.viewProjMatrix.inverse;
m_BuiltinParameters.cameraPosWS = camera.camera.transform.position;

162
Assets/ScriptableRenderPipeline/HDRenderPipeline/Utilities.cs


using UnityEngine.Rendering;
using UnityObject = UnityEngine.Object;
using System.Reflection;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

// Render Target Management.
public const ClearFlag kClearAll = ClearFlag.ClearDepth | ClearFlag.ClearColor;
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
var cmd = CommandBufferPool.Get();
cmd.name = "";
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier buffer, ClearFlag clearFlag = ClearFlag.ClearNone, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier buffer, ClearFlag clearFlag = ClearFlag.ClearNone, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
SetRenderTarget(renderContext, buffer, clearFlag, Color.black, miplevel, cubemapFace);
SetRenderTarget(cmd, buffer, clearFlag, Color.black, miplevel, cubemapFace);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
SetRenderTarget(renderContext, colorBuffer, depthBuffer, ClearFlag.ClearNone, Color.black, miplevel, cubemapFace);
SetRenderTarget(cmd, colorBuffer, depthBuffer, ClearFlag.ClearNone, Color.black, miplevel, cubemapFace);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
SetRenderTarget(renderContext, colorBuffer, depthBuffer, clearFlag, Color.black, miplevel, cubemapFace);
SetRenderTarget(cmd, colorBuffer, depthBuffer, clearFlag, Color.black, miplevel, cubemapFace);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown)
var cmd = CommandBufferPool.Get();
cmd.name = "";
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer)
SetRenderTarget(renderContext, colorBuffers, depthBuffer, ClearFlag.ClearNone, Color.black);
SetRenderTarget(cmd, colorBuffers, depthBuffer, ClearFlag.ClearNone, Color.black);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag = ClearFlag.ClearNone)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag = ClearFlag.ClearNone)
SetRenderTarget(renderContext, colorBuffers, depthBuffer, clearFlag, Color.black);
SetRenderTarget(cmd, colorBuffers, depthBuffer, clearFlag, Color.black);
public static void SetRenderTarget(ScriptableRenderContext renderContext, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor)
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor)
var cmd = CommandBufferPool.Get();
cmd.name = "";
renderContext.ExecuteCommandBuffer(cmd);
public static void ClearCubemap(ScriptableRenderContext renderContext, RenderTargetIdentifier buffer, Color clearColor)
public static void ClearCubemap(CommandBuffer cmd, RenderTargetIdentifier buffer, Color clearColor)
var cmd = CommandBufferPool.Get();
cmd.name = "";
SetRenderTarget(renderContext, buffer, ClearFlag.ClearColor, Color.black, 0, (CubemapFace)i);
SetRenderTarget(cmd, buffer, ClearFlag.ClearColor, Color.black, 0, (CubemapFace)i);
}
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
// Post-processing misc
public static bool IsPostProcessingActive(PostProcessLayer layer)
{
return layer != null
&& layer.enabled;
}
public static bool IsTemporalAntialiasingActive(PostProcessLayer layer)
{
return IsPostProcessingActive(layer)
&& layer.antialiasingMode == PostProcessLayer.Antialiasing.TemporalAntialiasing
&& layer.temporalAntialiasing.IsSupported();
}
// Miscellanous

public struct ProfilingSample
: IDisposable
{
bool disposed;
ScriptableRenderContext renderContext;
string name;
bool disposed;
CommandBuffer cmd;
string name;
public ProfilingSample(string _name, ScriptableRenderContext _renderloop)
public ProfilingSample(string _name, CommandBuffer _cmd)
renderContext = _renderloop;
cmd = _cmd;
CommandBuffer cmd = CommandBufferPool.Get();
cmd.name = "";
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
public void Dispose()
{
Dispose(true);

if (disposing)
{
CommandBuffer cmd = CommandBufferPool.Get();
cmd.name = "";
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
disposed = true;

var gpuVP = gpuProj * worldToViewMatrix * Matrix4x4.Scale(new Vector3(1.0f, 1.0f, -1.0f)); // Need to scale -1.0 on Z to match what is being done in the camera.wolrdToCameraMatrix API.
return gpuVP;
}
// TEMP: These functions should be implemented C++ side, for now do it in C#
static List<float> m_FloatListdata = new List<float>();
public static void SetMatrixCS(CommandBuffer cmd, ComputeShader shadercs, string name, Matrix4x4 mat)
{
m_FloatListdata.Clear();
for (int c = 0; c < 4; c++)
for (int r = 0; r < 4; r++)
m_FloatListdata.Add(mat[r, c]);
cmd.SetComputeFloatParams(shadercs, name, m_FloatListdata);
}
public static void SetMatrixArrayCS(CommandBuffer cmd, ComputeShader shadercs, string name, Matrix4x4[] matArray)
{
int numMatrices = matArray.Length;
m_FloatListdata.Clear();
for (int n = 0; n < numMatrices; n++)
for (int c = 0; c < 4; c++)
for (int r = 0; r < 4; r++)
m_FloatListdata.Add(matArray[n][r, c]);
cmd.SetComputeFloatParams(shadercs, name, m_FloatListdata);
}
public static void SetVectorArrayCS(CommandBuffer cmd, ComputeShader shadercs, string name, Vector4[] vecArray)
{
int numVectors = vecArray.Length;
m_FloatListdata.Clear();
for (int n = 0; n < numVectors; n++)
for (int i = 0; i < 4; i++)
m_FloatListdata.Add(vecArray[n][i]);
cmd.SetComputeFloatParams(shadercs, name, m_FloatListdata);
}
public static void SetKeyword(Material m, string keyword, bool state)

x = 0;
y -= overlayLineHeight;
overlayLineHeight = -1.0f;
}
}
// Just a sort function that doesn't allocate memory
// Note: Shoud be repalc by a radix sort for positive integer
static public int Partition(uint[] numbers, int left, int right)
{
uint pivot = numbers[left];
while (true)
{
while (numbers[left] < pivot)
left++;
while (numbers[right] > pivot)
right--;
if (left < right)
{
uint temp = numbers[right];
numbers[right] = numbers[left];
numbers[left] = temp;
}
else
{
return right;
}
}
}
static public void QuickSort(uint[] arr, int left, int right)
{
// For Recusrion
if (left < right)
{
int pivot = Partition(arr, left, right);
if (pivot > 1)
QuickSort(arr, left, pivot - 1);
if (pivot + 1 < right)
QuickSort(arr, pivot + 1, right);
}
}
}

44
Assets/ScriptableRenderPipeline/LightweightPipeline/Editor/LightweightAssetInspector.cs


public static GUIContent shadowLabel = new GUIContent("Shadows");
public static GUIContent defaults = new GUIContent("Defaults");
public static GUIContent maxPixelLights = new GUIContent("Max per-pixel lights supported",
"Amount of dynamic lights processed in fragment shader. More than 1 per-pixel light is not recommended.");
public static GUIContent maxPixelLights = new GUIContent("Per-Object Pixel Lights",
"Max amount of dynamic per-object pixel lights.");
"Enable up to 4 per-vertex dynamic lights.");
"Lightweight pipeline support at most 4 per-object lights between pixel and vertex. If value in pixel lights is set to max this settings has no effect.");
"Only non-directional lightmaps are supported");
"Enabled/Disable support for non-directional lightmaps.");
"Enables/Disabled light probe support.");
"Enables/Disable light probe support.");
public static GUIContent shadowType = new GUIContent("Shadow Type",
"Single directional shadow supported. SOFT_SHADOWS applies shadow filtering.");

"Percentages to split shadow volume");
public static GUIContent defaultDiffuseMaterial = new GUIContent("Default Diffuse Material",
"Material to use when creating objects");
"Material to use when creating 3D objects");
public static GUIContent defaultParticleMaterial = new GUIContent("Default Particle Material",
"Material to use when creating Paticle Systems");
public static GUIContent defaultLineMaterial = new GUIContent("Default Line Material",
"Material to use when creating Line Renderers");
public static GUIContent defaultUIMaterial = new GUIContent("Default UI Material", "Material to use when creating UI Text");
public static GUIContent msaaContent = new GUIContent("Anti Aliasing", "Controls the global anti aliasing quality. When set to disabled, MSAA will not be performed even if the camera allows it.");
}
private SerializedProperty m_MaxPixelLights;

private SerializedProperty m_ShadowTypeProp;
private SerializedProperty m_ShadowNearPlaneOffsetProp;
private SerializedProperty m_ShadowMinNormalBiasProperty;
private SerializedProperty m_ShadowNormalBiasProperty;
private SerializedProperty m_ShadowDistanceProp;
private SerializedProperty m_ShadowAtlasResolutionProp;
private SerializedProperty m_ShadowCascadesProp;

private SerializedProperty m_DefaultParticleMaterial;
private SerializedProperty m_DefaultLineMaterial;
private SerializedProperty m_DefaultUIMaterial;
private SerializedProperty m_MSAA;
void OnEnable()
{

m_EnableAmbientProbeProp = serializedObject.FindProperty("m_EnableAmbientProbe");
m_ShadowTypeProp = serializedObject.FindProperty("m_ShadowType");
m_ShadowNearPlaneOffsetProp = serializedObject.FindProperty("m_ShadowNearPlaneOffset");
m_ShadowMinNormalBiasProperty = serializedObject.FindProperty("m_MinShadowNormalBias");
m_ShadowNormalBiasProperty = serializedObject.FindProperty("m_ShadowNormalBias");
m_ShadowDistanceProp = serializedObject.FindProperty("m_ShadowDistance");
m_ShadowAtlasResolutionProp = serializedObject.FindProperty("m_ShadowAtlasResolution");
m_ShadowCascadesProp = serializedObject.FindProperty("m_ShadowCascades");

m_DefaultParticleMaterial = serializedObject.FindProperty("m_DefaultParticleMaterial");
m_DefaultLineMaterial = serializedObject.FindProperty("m_DefaultLineMaterial");
m_DefaultUIMaterial = serializedObject.FindProperty("m_DefaultUIMaterial");
m_MSAA = serializedObject.FindProperty("m_MSAA");
}
public override void OnInspectorGUI()

EditorGUILayout.Space();
EditorGUILayout.LabelField(Styles.renderingLabel, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_MaxPixelLights, Styles.maxPixelLights);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(Styles.maxPixelLights);
m_MaxPixelLights.intValue = EditorGUILayout.IntSlider(m_MaxPixelLights.intValue, 0, 4);
EditorGUILayout.EndHorizontal();
EditorGUILayout.PropertyField(m_MSAA, Styles.msaaContent);
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.Space();

EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.PropertyField(m_DefaultParticleMaterial, Styles.defaultParticleMaterial);
EditorGUILayout.PropertyField(m_DefaultLineMaterial, Styles.defaultLineMaterial);
EditorGUILayout.PropertyField(m_DefaultUIMaterial, Styles.defaultUIMaterial);
EditorGUILayout.PropertyField(m_DefaultShader, Styles.defaultShader);
EditorGUI.indentLevel--;

170
Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipeline.cs


// Max amount of visible lights. This controls the lights constant buffers in shader but not the max shaded lights.
// Lights are set per-object and the max shaded lights for each object are controlled by the max pixel lights in pipeline asset and kMaxVertexLights.
private static readonly int kMaxVisibleLights = 16;
private static readonly int kMaxVertexLights = 4;
private static readonly int kMaxPerObjectLights = 4;
private Vector4[] m_LightPositions = new Vector4[kMaxVisibleLights];
private Vector4[] m_LightColors = new Vector4[kMaxVisibleLights];

private static readonly int kMaxCascades = 4;
private int m_ShadowCasterCascadesCount = kMaxCascades;
private int m_ShadowMapProperty;
private int m_CameraRTProperty;
private int m_DepthBufferBits = 16;
private RenderTargetIdentifier m_CameraRTID;
private bool m_RenderToIntermediateTarget = false;
private const int kShadowDepthBufferBits = 16;
private const int kCameraDepthBufferBits = 32;
private Vector4[] m_DirectionalShadowSplitDistances = new Vector4[kMaxCascades];
private ShadowSettings m_ShadowSettings = ShadowSettings.Default;

private static readonly ShaderPassName m_UnlitPassName = new ShaderPassName("SrpDefaultUnlit");
private static readonly ShaderPassName m_UnlitPassName = new ShaderPassName("SRPDefaultUnlit");
public LightweightPipeline(LightweightPipelineAsset asset)
{

m_ShadowMapProperty = Shader.PropertyToID("_ShadowMap");
m_CameraRTProperty = Shader.PropertyToID("_CameraRT");
m_CameraRTID = new RenderTargetIdentifier(m_CameraRTProperty);
// Let engine know we have MSAA on for cases where we support MSAA backbuffer
if (QualitySettings.antiAliasing != m_Asset.MSAASampleCount)
QualitySettings.antiAliasing = m_Asset.MSAASampleCount;
Shader.globalRenderPipeline = "LightweightPipeline";
}

InitializeLightData(visibleLights, out lightData);
// Render Shadow Map
if (lightData.shadowLightIndex > -1)
if (lightData.shadowLightIndex > -1)
// Clear RenderTarget to avoid tile initialization on mobile GPUs
// https://community.arm.com/graphics/b/blog/posts/mali-performance-2-how-to-correctly-handle-framebuffers
var cmd = CommandBufferPool.Get("Clear");
cmd.ClearRenderTarget(true, true, camera.backgroundColor);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
// Setup light and shadow shader constants
SetupShaderLightConstants(visibleLights, ref lightData, ref m_CullResults, ref context);
if (lightData.shadowsRendered)

configuration |= RendererConfiguration.PerObjectLightProbe;
if (!lightData.isSingleDirectionalLight)
configuration |= RendererConfiguration.ProvideLightIndices;
configuration |= RendererConfiguration.PerObjectLightIndices8;
BeginForwardRendering(camera, ref context);
// Render Opaques
var litSettings = new DrawRendererSettings(m_CullResults, camera, m_LitPassName);

var unlitSettings = new DrawRendererSettings(m_CullResults, camera, m_UnlitPassName);
unlitSettings.sorting.flags = SortFlags.CommonOpaque;
unlitSettings.inputFilter.SetQueuesOpaque();
unlitSettings.sorting.flags = SortFlags.CommonTransparent;
unlitSettings.inputFilter.SetQueuesTransparent();
context.DrawRenderers(ref litSettings);

discardRT.ReleaseTemporaryRT(m_CameraRTProperty);
CommandBufferPool.Release(cmd);
context.DrawRenderers(ref unlitSettings);
CommandBufferPool.Release(discardRT);
// TODO: Check skybox shader
context.DrawSkybox(camera);

litSettings.inputFilter.SetQueuesTransparent();
context.DrawRenderers(ref litSettings);
context.DrawRenderers(ref unlitSettings);
unlitSettings.sorting.flags = SortFlags.CommonTransparent;
unlitSettings.inputFilter.SetQueuesTransparent();
context.DrawRenderers(ref unlitSettings);
EndForwardRendering(camera, ref context);
}
context.Submit();

}
int lightsCount = lights.Length;
lightData.pixelLightsCount = Mathf.Min(lightsCount, m_Asset.MaxSupportedPixelLights);
lightData.vertexLightsCount = (m_Asset.SupportsVertexLight) ? Mathf.Min(lightsCount - lightData.pixelLightsCount, kMaxVertexLights) : 0;
int maxPerPixelLights = Math.Min(m_Asset.MaxSupportedPixelLights, kMaxPerObjectLights);
lightData.pixelLightsCount = Math.Min(lightsCount, maxPerPixelLights);
lightData.vertexLightsCount = (m_Asset.SupportsVertexLight) ? Math.Min(lightsCount - lightData.pixelLightsCount, kMaxPerObjectLights) : 0;
private void FillLightIndices(ref CullResults cullResults, int visibleLightsCount)
{
//int visibleRenderersCount = cullResults.GetVisibleRenderersCount();
// TODO: commenting cullResults.GetVisislbeRenderersCount() to avoid compiler errors as it is not in main SRP trunk yet
// For now setting a small amount enough for the test scenes.
int visibleRenderersCount = 1024;
if (visibleRenderersCount > m_LightIndicesCount)
{
m_LightIndicesCount = visibleRenderersCount * visibleLightsCount;
if (m_LightIndexListBuffer != null)
m_LightIndexListBuffer.Release();
m_LightIndexListBuffer = new ComputeBuffer(m_LightIndicesCount, sizeof(uint));
}
cullResults.FillLightIndices(m_LightIndexListBuffer);
}
if (lightData.isSingleDirectionalLight)
if (lightData.isSingleDirectionalLight)
SetupShaderLightListConstants(lights, lightData.pixelLightsCount, ref cullResults, ref context);
SetupShaderLightListConstants(lights, ref lightData, ref context);
CommandBuffer cmd = new CommandBuffer() { name = "SetupLightConstants" };
cmd.SetGlobalVector("_LightPosition0", new Vector4(lightDir.x, lightDir.y, lightDir.z, 0.0f));
cmd.SetGlobalColor("_LightColor0", light.finalColor);

// TODO: Perform tests on light lights memory pattern access (SOA vs AOS vs Swizzling)
private void SetupShaderLightListConstants(VisibleLight[] lights, int pixelLightsCount, ref CullResults cullResults, ref ScriptableRenderContext context)
private void SetupShaderLightListConstants(VisibleLight[] lights, ref LightData lightData, ref ScriptableRenderContext context)
FillLightIndices(ref cullResults, lights.Length);
for (int i = 0; i < maxLights; ++i)
for (int i = 0; i < maxLights; ++i)
if (currLight.lightType == LightType.Directional)
if (currLight.lightType == LightType.Directional)
}
else
}
else
{
Vector4 pos = currLight.localToWorld.GetColumn (3);
m_LightPositions [i] = new Vector4 (pos.x, pos.y, pos.z, 1.0f);

float rangeSq = currLight.range * currLight.range;
float quadAtten = (currLight.lightType == LightType.Directional) ? 0.0f : 25.0f / rangeSq;
if (currLight.lightType == LightType.Spot)
if (currLight.lightType == LightType.Spot)
{
Vector4 dir = currLight.localToWorld.GetColumn (2);
m_LightSpotDirections [i] = new Vector4 (-dir.x, -dir.y, -dir.z, 0.0f);

float angleRange = cosInneAngle - cosOuterAngle;
m_LightAttenuations [i] = new Vector4 (cosOuterAngle,
Mathf.Approximately (angleRange, 0.0f) ? 1.0f : angleRange, quadAtten, rangeSq);
}
}
else
{
m_LightSpotDirections [i] = new Vector4 (0.0f, 0.0f, 1.0f, 0.0f);

// Lightweight pipeline only upload kMaxVisibleLights to shader cbuffer.
// We tell the pipe to disable remaining lights by setting it to -1.
int[] lightIndexMap = m_CullResults.GetLightIndexMap();
for (int i = kMaxVisibleLights; i < lightIndexMap.Length; ++i)
lightIndexMap[i] = -1;
m_CullResults.SetLightIndexMap(lightIndexMap);
cmd.SetGlobalVector("globalLightCount", new Vector4 (pixelLightsCount, 0.0f, 0.0f, 0.0f));
cmd.SetGlobalVector("globalLightCount", new Vector4 (lightData.pixelLightsCount, lightData.vertexLightsCount, 0.0f, 0.0f));
cmd.SetGlobalBuffer("globalLightIndexList", m_LightIndexListBuffer);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}

var setRenderTargetCommandBuffer = CommandBufferPool.Get();
setRenderTargetCommandBuffer.name = "Render packed shadows";
setRenderTargetCommandBuffer.GetTemporaryRT(m_ShadowMapProperty, m_ShadowSettings.shadowAtlasWidth,
m_ShadowSettings.shadowAtlasHeight, m_DepthBufferBits, FilterMode.Bilinear, RenderTextureFormat.Depth,
RenderTextureReadWrite.Linear);
m_ShadowSettings.shadowAtlasHeight, kShadowDepthBufferBits, FilterMode.Bilinear, RenderTextureFormat.Depth);
setRenderTargetCommandBuffer.SetRenderTarget(m_ShadowMapRTID);
setRenderTargetCommandBuffer.ClearRenderTarget(true, true, Color.black);
context.ExecuteCommandBuffer(setRenderTargetCommandBuffer);

private void InitializeMainShadowLightIndex(VisibleLight[] lights, out int shadowIndex)
{
shadowIndex = -1;
if (m_Asset.CurrShadowType == ShadowType.NO_SHADOW)
return;
float maxIntensity = -1;
for (int i = 0; i < lights.Length; ++i)
{

private bool IsSupportedShadowType(LightType type)
{
return (type == LightType.Directional || type == LightType.Spot);
}
private void BeginForwardRendering(Camera camera, ref ScriptableRenderContext context)
{
m_RenderToIntermediateTarget = GetRenderToIntermediateTarget(camera);
var cmd = CommandBufferPool.Get("SetCameraRenderTarget");
if (m_RenderToIntermediateTarget)
{
if (camera.activeTexture == null)
{
cmd.GetTemporaryRT(m_CameraRTProperty, Screen.width, Screen.height, kCameraDepthBufferBits,
FilterMode.Bilinear, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, m_Asset.MSAASampleCount);
cmd.SetRenderTarget(m_CameraRTID);
}
else
{
cmd.SetRenderTarget(new RenderTargetIdentifier(camera.activeTexture));
}
}
else
{
cmd.SetRenderTarget(BuiltinRenderTextureType.None);
}
// Clear RenderTarget to avoid tile initialization on mobile GPUs
// https://community.arm.com/graphics/b/blog/posts/mali-performance-2-how-to-correctly-handle-framebuffers
if (camera.clearFlags != CameraClearFlags.Nothing)
cmd.ClearRenderTarget(camera.clearFlags == CameraClearFlags.Color, camera.clearFlags == CameraClearFlags.Color || camera.clearFlags == CameraClearFlags.Depth, camera.backgroundColor);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
private void EndForwardRendering(Camera camera, ref ScriptableRenderContext context)
{
if (!m_RenderToIntermediateTarget)
return;
var cmd = CommandBufferPool.Get("Blit");
cmd.Blit(BuiltinRenderTextureType.CurrentActive, BuiltinRenderTextureType.CameraTarget);
if (camera.cameraType == CameraType.SceneView)
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
private bool GetRenderToIntermediateTarget(Camera camera)
{
bool allowMSAA = camera.allowMSAA && m_Asset.MSAASampleCount > 1 && !PlatformSupportsMSAABackBuffer();
if (camera.cameraType == CameraType.SceneView || allowMSAA || camera.activeTexture != null)
return true;
return false;
}
private bool PlatformSupportsMSAABackBuffer()
{
#if UNITY_ANDROID || UNITY_IPHONE || UNITY_TVOS || UNITY_SAMSUNGTV
return true;
#else
return false;
#endif
}
}
}

6
Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipelineAsset.asset


m_SupportsVertexLight: 1
m_EnableLightmaps: 1
m_EnableAmbientProbe: 1
m_MSAA: 4
m_ShadowType: 1
m_ShadowAtlasResolution: 1024
m_ShadowNearPlaneOffset: 2

m_Cascade4Split: {x: 0.067, y: 0.2, z: 0.467}
m_DefaultDiffuseMaterial: {fileID: 2100000, guid: 6a1143ee683302f4aa628c052723efc1,
type: 2}
m_DefaultParticleMaterial: {fileID: 2100000, guid: e823cd5b5d27c0f4b8256e7c12ee3e6d,
type: 2}
m_DefaultLineMaterial: {fileID: 2100000, guid: 541b04d3bf488324f816937313973e15,
type: 2}
m_DefaultUIMaterial: {fileID: 2100000, guid: 786cc499ea3906946b10ab7d24c8d0e7, type: 2}
m_DefaultShader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}

26
Assets/ScriptableRenderPipeline/LightweightPipeline/LightweightPipelineAsset.cs


_2048 = 2048
}
public enum MSAAQuality
{
Disabled = 1,
_2x = 2,
_4x = 4,
_8x = 8
}
public class LightweightPipelineAsset : RenderPipelineAsset
{
private static readonly string m_PipelineFolder = "Assets/ScriptableRenderPipeline/LightweightPipeline";

[SerializeField] private bool m_SupportsVertexLight = true;
[SerializeField] private bool m_EnableLightmaps = true;
[SerializeField] private bool m_EnableAmbientProbe = true;
[SerializeField] private MSAAQuality m_MSAA = MSAAQuality.Disabled;
[SerializeField] private ShadowType m_ShadowType = ShadowType.HARD_SHADOWS;
[SerializeField] private ShadowResolution m_ShadowAtlasResolution = ShadowResolution._1024;
[SerializeField] private float m_ShadowNearPlaneOffset = 2.0f;

[SerializeField] private Vector3 m_Cascade4Split = new Vector3(0.067f, 0.2f, 0.467f);
[SerializeField] private Material m_DefaultDiffuseMaterial;
[SerializeField] private Material m_DefaultParticleMaterial;
[SerializeField] private Material m_DefaultLineMaterial;
[SerializeField] private Material m_DefaultUIMaterial;
[SerializeField] private Shader m_DefaultShader;
public int MaxSupportedPixelLights

private set { m_EnableAmbientProbe = value; }
}
public int MSAASampleCount
{
get { return (int)m_MSAA; }
set { m_MSAA = (MSAAQuality)value; }
}
public ShadowType CurrShadowType
{
get { return m_ShadowType; }

get { return (int)m_ShadowCascades; }
private set { m_ShadowCascades = (ShadowCascades)value; }
}
public float Cascade2Split
{
get { return m_Cascade2Split; }

public override Material GetDefaultParticleMaterial()
{
return m_DefaultDiffuseMaterial;
return m_DefaultParticleMaterial;
return m_DefaultDiffuseMaterial;
return m_DefaultLineMaterial;
}
public override Material GetDefaultTerrainMaterial()

public override Material GetDefaultUIMaterial()
{
return m_DefaultDiffuseMaterial;
return m_DefaultUIMaterial;
}
public override Material GetDefaultUIOverdrawMaterial()

4
Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-Default.mat


m_PrefabInternal: {fileID: 0}
m_Name: Lightweight-Default
m_Shader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}
m_ShaderKeywords:
m_ShaderKeywords: _SPECULAR_COLOR
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0

- _Mode: 0
- _Parallax: 0.02
- _ReflectionSource: 0
- _Shininess: 1
- _Shininess: 0.15
- _SmoothnessTextureChannel: 0
- _SpecSource: 0
- _SpecularHighlights: 1

4
Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultSprite.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Lightweight-SpritesDefault
m_Name: Lightweight-DefaultSprite
m_ShaderKeywords:
m_ShaderKeywords: ETC1_EXTERNAL_ALPHA
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0

19
Assets/ScriptableRenderPipeline/LightweightPipeline/Shaders/LightweightPipeline.shader


#if defined(_VERTEX_LIGHTS) && !defined(_SINGLE_DIRECTIONAL_LIGHT)
half4 diffuseAndSpecular = half4(1.0, 1.0, 1.0, 1.0);
int vertexLightStart = unity_LightIndicesOffsetAndCount.x + globalLightCount.x;
int vertexLightEnd = vertexLightStart + (unity_LightIndicesOffsetAndCount.y - globalLightCount.x);
// pixel lights shaded = min(pixelLights, perObjectLights)
// vertex lights shaded = min(vertexLights, perObjectLights) - pixel lights shaded
// Therefore vertexStartIndex = pixelLightCount; vertexEndIndex = min(vertexLights, perObjectLights)
int vertexLightStart = min(globalLightCount.x, unity_LightIndicesOffsetAndCount.y);
int vertexLightEnd = min(globalLightCount.y, unity_LightIndicesOffsetAndCount.y);
int lightIndex = globalLightIndexList[lightIter];
int lightIndex = unity_4LightIndices0[lightIter];
LightInput lightInput;
INITIALIZE_LIGHT(lightInput, lightIndex);
o.fogCoord.yzw += EvaluateOneLight(lightInput, diffuseAndSpecular.rgb, diffuseAndSpecular, normal, o.posWS, o.viewDir.xyz);

#ifdef _SHADOWS
half shadowAttenuation = ComputeShadowAttenuation(i, _ShadowLightDirection.xyz);
#endif
int pixelLightEnd = unity_LightIndicesOffsetAndCount.x + min(globalLightCount.x, unity_LightIndicesOffsetAndCount.y);
for (int lightIter = unity_LightIndicesOffsetAndCount.x; lightIter < pixelLightEnd; ++lightIter)
int pixelLightCount = min(globalLightCount.x, unity_LightIndicesOffsetAndCount.y);
for (int lightIter = 0; lightIter < pixelLightCount; ++lightIter)
int lightIndex = globalLightIndexList[lightIter];
int lightIndex = unity_4LightIndices0[lightIter];
// multiplies shadowAttenuation to avoid branching.
// step will only evaluate to 1 when lightIndex == _ShadowData.x (shadowLightIndex)
half currLightAttenuation = shadowAttenuation * step(abs(lightIndex - _ShadowData.x), 0);
half currLightAttenuation = max(shadowAttenuation, half(lightIter != _ShadowData.x));
color += EvaluateOneLight(lightData, diffuse, specularGloss, normal, i.posWS, viewDir) * currLightAttenuation;
#else
color += EvaluateOneLight(lightData, diffuse, specularGloss, normal, i.posWS, viewDir);

2
Assets/ScriptableRenderPipeline/LightweightPipeline/Shaders/LightweightPipelineCore.cginc


// Per object light list data
#ifndef _SINGLE_DIRECTIONAL_LIGHT
half4 unity_LightIndicesOffsetAndCount;
StructuredBuffer<uint> globalLightIndexList;
half4 unity_4LightIndices0;
// The variables are very similar to built-in unity_LightColor, unity_LightPosition,
// unity_LightAtten, unity_SpotDirection as used by the VertexLit shaders, except here

50
Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/LDRenderPipelineBasicScene.unity


m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVRBounces: 2
m_PVRFiltering: 0
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringAtrousColorSigma: 1
m_PVRFilteringAtrousNormalSigma: 1
m_PVRFilteringAtrousPositionSigma: 1
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_LightingDataAsset: {fileID: 112000002, guid: 740b181ab47c46a47ae28377711d7097,
type: 2}
m_UseShadowmask: 0

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1254169578
Transform:
m_ObjectHideFlags: 0

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1366180536
Transform:
m_ObjectHideFlags: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1447851825}
m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &1447851827
Behaviour:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1449522779
Transform:
m_ObjectHideFlags: 0

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1659249726
Transform:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!108 &1677622457
Light:
m_ObjectHideFlags: 0

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1911408598
Transform:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &2025477086
Transform:
m_ObjectHideFlags: 0

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

12
Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/Materials/LDRenderPipeMaterials/MobilePlane.mat


m_PrefabInternal: {fileID: 0}
m_Name: MobilePlane
m_Shader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}
m_ShaderKeywords: _EMISSION _GLOSSINESS_FROM_BASE_ALPHA _NORMALMAP _SHARED_SPECULAR_DIFFUSE
m_ShaderKeywords: _GLOSSINESS_FROM_BASE_ALPHA _NORMALMAP _SHARED_SPECULAR_DIFFUSE
_SPECULAR_COLOR
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []

- _BumpMap:
m_Texture: {fileID: 2800000, guid: d63435d934a30fd439a005c25fa542f0, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Cube:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:

- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _ReflectionSource: 0
- _Shininess: 1
- _SpecSource: 1
- _SpecSource: 0
- _SpecularHighlights: 1
- _SpecularStrength: 64.1
- _SrcBlend: 1

17
Assets/ScriptableRenderPipeline/LightweightPipeline/TestScenes/Textures/154.JPG.meta


licenseType: Pro
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0

fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:

textureFormat: 1
maxTextureSize: 2048
textureSettings:
filterMode: -1
serializedVersion: 2
filterMode: 1
wrapMode: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
lightmap: 0
compressionQuality: 50

platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

- buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

- buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

- buildTarget: tvOS
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

- buildTarget: Android
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

- buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50

serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:

3
Assets/ScriptableRenderPipeline/ShaderLibrary/AreaLighting.hlsl


float a = cosOmega * acos(x) - z; // y*ArcCos[-y*Sqrt[(1/x-1)/(1-y^2)]]-Sqrt[(1-y^2)*(x/(1-x))-y^2]*(1/x-1)
float b = atan(y); // ArcTan[Sqrt[(1-y^2)*(x/(1-x))-y^2]]
// Replacing max() with saturate() results in a 12 cycle SGPR forwarding stall on PS4.
return max(INV_PI * (a * sinSqSigma + b), 0); // (a/Pi)*x+(b/Pi)
return saturate(INV_PI * (a * sinSqSigma + b));
}
#endif
#endif

28
Assets/ScriptableRenderPipeline/ShaderLibrary/Common.hlsl


// headers from ShaderLibrary do not include "common.hlsl", this should be included in the .shader using it (or Material.hlsl)
// Rules: When doing an array for constant buffer variables, we always use float4 to avoid any packing issue, particularly between compute shader and pixel shaders
// i.e don't use SetGlobalFloatArray or SetComputeFloatParams
// The array can be alias in hlsl. Exemple:
// uniform float4 packedArray[3];
// static float unpackedArray[12] = (float[12]packedArray;
// Include language header
#if defined(SHADER_API_D3D11)

return positionSS;
}
float3 ComputeViewSpacePosition(float2 positionSS, float depthRaw, float4x4 invProjMatrix)
{
float4 positionCS = ComputeClipSpacePosition(positionSS, depthRaw);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;
return positionVS.xyz / positionVS.w;
}
// It may be necessary to flip the Y axis as the origin of the screen-space coordinate system
// of Direct3D is at the top left corner of the screen, with the Y axis pointing downwards.
void UpdatePositionInput(float depthRaw, float4x4 invViewProjMatrix, float4x4 viewProjMatrix, inout PositionInputs posInput)
{
posInput.depthRaw = depthRaw;

// The compiler should optimize this (less expensive than reconstruct depth VS from depth buffer)
posInput.depthVS = mul(viewProjMatrix, float4(posInput.positionWS, 1.0)).w;
}
// It may be necessary to flip the Y axis as the origin of the screen-space coordinate system
// of Direct3D is at the top left corner of the screen, with the Y axis pointing downwards.
float3 ComputeViewSpacePosition(float2 positionSS, float depthRaw, float4x4 invProjMatrix)
{
float4 positionCS = ComputeClipSpacePosition(positionSS, depthRaw);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;
return positionVS.xyz / positionVS.w;
}
// The view direction 'V' points towards the camera.

6
Assets/ScriptableRenderPipeline/ShaderLibrary/CommonMaterial.hlsl


return 0.25 * (expOneThird + 3 * expOneThird * expOneThird * expOneThird) * volumeAlbedo;
}
// Ref: Steve McAuley - Energy-Conserving Wrapped Diffuse
float ComputeWrappedDiffuseLighting(float NdotL, float w)
{
return saturate((-NdotL + w) / ((1 + w) * (1 + w)));
}
// MACRO from Legacy Untiy
// Transforms 2D UV by scale/bias property
#define TRANSFORM_TEX(tex, name) ((tex.xy) * name##_ST.xy + name##_ST.zw)

16
Assets/ScriptableRenderPipeline/ShaderLibrary/Packing.hlsl


return normalize(n);
}
float2 PackNormalHemiOctEncode(float3 n)
{
float l1norm = dot(abs(n), 1.0);
float2 res = n.xy * (1.0 / l1norm);
return float2(res.x + res.y, res.x - res.y);
}
float3 UnpackNormalHemiOctEncode(float2 f)
{
float2 val = float2(f.x + f.y, f.x - f.y) * 0.5;
float3 n = float3(val, 1.0 - dot(abs(val), 1.0));
return normalize(n);
}
// Tetrahedral encoding - Looks like Tetra encoding 10:10 + 2 is similar to oct 11:11, as oct is cheaper prefer it
// To generate the basisNormal below we use these 4 vertex of a regular tetrahedron
// v0 = float3(1.0, 0.0, -1.0 / sqrt(2.0));

12
Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/Shadow.hlsl


// shadow sampling prototypes
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS );
// shadow sampling prototypes with screenspace info
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS );

// default dispatchers for the individual shadow types (with and without screenspace support)
// point/spot light shadows
float GetPunctualShadowAttenuationDefault( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L )
float GetPunctualShadowAttenuationDefault( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L )
float GetPunctualShadowAttenuationDefault( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetPunctualShadowAttenuationDefault( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
{
return GetPunctualShadowAttenuationDefault( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

// if shadow dispatch is empty we'll fall back to default shadow sampling implementations
#ifndef SHADOW_DISPATCH_USE_CUSTOM_PUNCTUAL
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
{
return GetPunctualShadowAttenuationDefault( shadowContext, positionWS, normalWS, shadowDataIndex, L, unPositionSS );
}

78
Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowAlgorithms.hlsl


//
// Point shadows
//
float EvalShadow_PointDepth( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int index, float3 L )
float EvalShadow_PointDepth( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int index, float4 L )
ShadowData sd = shadowContext.shadowDatas[index];
float3 biased_posWS = positionWS + EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias );
float3 lpos = positionWS + L.xyz * L.w;
positionWS = biased_posWS;
int faceIndex = EvalShadow_GetCubeFaceID( lpos - biased_posWS ) + 1;
int faceIndex = EvalShadow_GetCubeFaceID( L ) + 1;
ShadowData sd = shadowContext.shadowDatas[index + faceIndex];
sd = shadowContext.shadowDatas[index + faceIndex];
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias );
// get shadowmap texcoords
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS );
// get the algorithm

}
#define EvalShadow_PointDepth_( _samplerType ) \
float EvalShadow_PointDepth( ShadowContext shadowContext, uint shadowAlgorithm, Texture2DArray tex, _samplerType samp, float3 positionWS, float3 normalWS, int index, float3 L ) \
float EvalShadow_PointDepth( ShadowContext shadowContext, uint shadowAlgorithm, Texture2DArray tex, _samplerType samp, float3 positionWS, float3 normalWS, int index, float4 L ) \
ShadowData sd = shadowContext.shadowDatas[index]; \
float3 biased_posWS = positionWS + EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
float3 lpos = positionWS + L.xyz * L.w; \
positionWS = biased_posWS; \
int faceIndex = EvalShadow_GetCubeFaceID( lpos - biased_posWS ) + 1; \
int faceIndex = EvalShadow_GetCubeFaceID( L ) + 1; \
ShadowData sd = shadowContext.shadowDatas[index + faceIndex]; \
sd = shadowContext.shadowDatas[index + faceIndex]; \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
/* get shadowmap texcoords */ \
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS ); \
/* sample the texture */ \

ShadowData sd = shadowContext.shadowDatas[index]; \
uint payloadOffset = GetPayloadOffset( sd ); \
/* normal based bias */ \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
/* get shadowmap texcoords */ \
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS ); \
/* sample the texture */ \

//
// Punctual shadows for Point and Spot
//
float EvalShadow_PunctualDepth( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int index, float3 L )
float EvalShadow_PunctualDepth( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int index, float4 L )
ShadowData sd = shadowContext.shadowDatas[index];
UnpackShadowType( shadowContext.shadowDatas[index].shadowType, shadowType );
UnpackShadowType( sd.shadowType, shadowType );
faceIndex = EvalShadow_GetCubeFaceID( L ) + 1;
float3 biased_posWS = positionWS + EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias );
float3 lpos = positionWS + L.xyz * L.w;
positionWS = biased_posWS;
faceIndex = EvalShadow_GetCubeFaceID( lpos - biased_posWS ) + 1;
else
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias );
ShadowData sd = shadowContext.shadowDatas[index + faceIndex];
sd = shadowContext.shadowDatas[index + faceIndex];
// normal based bias
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
// get shadowmap texcoords
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS );
// sample the texture according to the given algorithm

UnpackShadowType( sd.shadowType, shadowType, shadowAlgorithm );
return SampleShadow_SelectAlgorithm( shadowContext, sd, payloadOffset, posTC, sd.bias, slice, shadowAlgorithm, texIdx, sampIdx );
}
return SampleShadow_SelectAlgorithm(shadowContext, sd, payloadOffset, posTC, sd.bias, slice, shadowAlgorithm, texIdx, sampIdx);
}
float EvalShadow_PunctualDepth( ShadowContext shadowContext, uint shadowAlgorithm, Texture2DArray tex, _samplerType samp, float3 positionWS, float3 normalWS, int index, float3 L ) \
float EvalShadow_PunctualDepth( ShadowContext shadowContext, uint shadowAlgorithm, Texture2DArray tex, _samplerType samp, float3 positionWS, float3 normalWS, int index, float4 L ) \
ShadowData sd = shadowContext.shadowDatas[index]; \
UnpackShadowType( shadowContext.shadowDatas[index].shadowType, shadowType ); \
UnpackShadowType( sd.shadowType, shadowType ); \
faceIndex = EvalShadow_GetCubeFaceID( L ) + 1; \
float3 biased_posWS = positionWS + EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
float3 lpos = positionWS + L.xyz * L.w; \
positionWS = biased_posWS; \
faceIndex = EvalShadow_GetCubeFaceID( lpos - biased_posWS ) + 1; \
else \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L.xyz ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
ShadowData sd = shadowContext.shadowDatas[index + faceIndex]; \
sd = shadowContext.shadowDatas[index + faceIndex]; \
/* normal based bias */ \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
/* get shadowmap texcoords */ \
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS ); \
/* sample the texture */ \

float EvalShadow_CascadedDepth( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int index, float3 L )
{
ShadowData sd = shadowContext.shadowDatas[index];
// normal based bias
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
// load the right shadow data for the current face
float4 dirShadowSplitSpheres[4];
uint payloadOffset = EvalShadow_LoadSplitSpheres( shadowContext, index, dirShadowSplitSpheres );

ShadowData sd = shadowContext.shadowDatas[index + 1 + shadowSplitIndex];
// normal based bias
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
sd = shadowContext.shadowDatas[index + 1 + shadowSplitIndex];
// get shadowmap texcoords
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS );

#define EvalShadow_CascadedDepth_( _samplerType ) \
float EvalShadow_CascadedDepth( ShadowContext shadowContext, uint shadowAlgorithm, Texture2DArray tex, _samplerType samp, float3 positionWS, float3 normalWS, int index, float3 L ) \
{ \
ShadowData sd = shadowContext.shadowDatas[index]; \
/* normal based bias */ \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
\
/* load the right shadow data for the current face */ \
float4 dirShadowSplitSpheres[4]; \
uint payloadOffset = EvalShadow_LoadSplitSpheres( shadowContext, index, dirShadowSplitSpheres ); \

\
ShadowData sd = shadowContext.shadowDatas[index + 1 + shadowSplitIndex]; \
/* normal based bias */ \
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias ); \
sd = shadowContext.shadowDatas[index + 1 + shadowSplitIndex]; \
/* get shadowmap texcoords */ \
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS ); \
/* sample the texture */ \

10
Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowAlgorithmsCustom.hlsl


float EvalShadow_CascadedMoment( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L )
{
ShadowData sd = shadowContext.shadowDatas[shadowDataIndex];
// normal based bias
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
// load the right shadow data for the current face
float4 dirShadowSplitSpheres[4];
uint payloadOffset = EvalShadow_LoadSplitSpheres( shadowContext, shadowDataIndex, dirShadowSplitSpheres );

ShadowData sd = shadowContext.shadowDatas[shadowDataIndex + 1 + shadowSplitIndex];
// normal based bias
positionWS += EvalShadow_NormalBias( normalWS, saturate( dot( normalWS, L ) ), sd.texelSizeRcp.zw, sd.normalBias );
// get the shadowmap data for the correct cascade
sd = shadowContext.shadowDatas[shadowDataIndex + 1 + shadowSplitIndex];
// get shadowmap texcoords
float3 posTC = EvalShadow_GetTexcoords( sd, positionWS );

2
Assets/ScriptableRenderPipeline/ShaderLibrary/Shadow/ShadowMoments.hlsl


float mD = depth - moments.x;
float p = variance / (variance + mD * mD);
p = saturate( p / (1.0f - lightLeakBias) );
p = saturate( (p - lightLeakBias) / (1.0f - lightLeakBias) );
return max( p, depth <= moments.x );
}

12
Assets/ScriptableRenderPipeline/ShaderLibrary/Tessellation.hlsl


return (dot(V, N) < backFaceCullEpsilon) ? true : false;
}
float2 GetScreenSpacePosition(float3 positionWS, float4x4 viewProjectionMatrix, float4 screenParams)
float2 GetScreenSpacePosition(float3 positionWS, float4x4 viewProjectionMatrix, float4 screenSize)
return (positionSS * 0.5 + 0.5) * float2(screenParams.x, -screenParams.y);
return (positionSS * 0.5 + 0.5) * float2(screenSize.x, -screenSize.y);
float3 GetScreenSpaceTessFactor(float3 p0, float3 p1, float3 p2, float4x4 viewProjectionMatrix, float4 screenParams, float triangleSize)
float3 GetScreenSpaceTessFactor(float3 p0, float3 p1, float3 p2, float4x4 viewProjectionMatrix, float4 screenSize, float triangleSize)
float2 edgeScreenPosition0 = GetScreenSpacePosition(p0, viewProjectionMatrix, screenParams);
float2 edgeScreenPosition1 = GetScreenSpacePosition(p1, viewProjectionMatrix, screenParams);
float2 edgeScreenPosition2 = GetScreenSpacePosition(p2, viewProjectionMatrix, screenParams);
float2 edgeScreenPosition0 = GetScreenSpacePosition(p0, viewProjectionMatrix, screenSize);
float2 edgeScreenPosition1 = GetScreenSpacePosition(p1, viewProjectionMatrix, screenSize);
float2 edgeScreenPosition2 = GetScreenSpacePosition(p2, viewProjectionMatrix, screenSize);
float EdgeScale = 1.0 / triangleSize; // Edge size in reality, but name is simpler
float3 tessFactor;

195
Assets/TestScenes/HDTest/BasicProfiling.unity


affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &31226894

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &119724878

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &290893317

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &578289780

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1028064555

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!1 &1224712456

affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1369123404

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 1
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1418455419

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1685118384

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1736085185

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1866152053

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1955890979

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

affectDiffuse: 1
affectSpecular: 1
archetype: 0
spotLightShape: 0
lightLength: 0
lightWidth: 0
--- !u!108 &1964619977

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

287
Assets/TestScenes/HDTest/CascadedShadowsTest.unity
文件差异内容过多而无法显示
查看文件

593
Assets/TestScenes/HDTest/HDRenderLoopTest.unity
文件差异内容过多而无法显示
查看文件

3
Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Gray.mat


m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Gray
m_Shader: {fileID: 0}
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []

3
Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Green.mat


m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Green
m_Shader: {fileID: 0}
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []

3
Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Red.mat


m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Red
m_Shader: {fileID: 0}
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []

3
Assets/TestScenes/HDTest/Material/StandardShaderMaterials/Std_Blue_Emissive.mat


m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Std_Blue_Emissive
m_Shader: {fileID: 0}
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []

404
Assets/Textures/Batman.png

之前 之后
宽度: 512  |  高度: 512  |  大小: 49 KiB

2
ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2017.2.0b1
m_EditorVersion: 2017.3.0a2

7
README.md


# Unity Scriptable Render Loop testbed
# Unity Scriptable Render Pipeline testbed
**NOTE**: this is a testbed for a Unity feature that has not shipped yet! The latest commits in this project does not work
with any public Unity version, and things in it might and will be broken.

Did we mention it's a very WIP, no promises, may or might not ship feature, anything and everything in it can change? It totally is.
## For Unity 2017.1 beta users
SRP depends on PostProcessing submodule. Perform the following instructions to get a working copy of SRP:
* git clone https://github.com/Unity-Technologies/ScriptableRenderLoop
* git checkout unity-2017.1b5 (or the latest tag)
* git submodule update --init --recursive --remote
## For Unity 5.6 beta users

10
Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Assets/Scripts.meta


fileFormatVersion: 2
guid: ac5e33c9370558b4d88db4eca81a4c4f
folderAsset: yes
timeCreated: 1498746585
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

10
Assets/GraphicsTests/RenderPipeline/LightweightPipeline/Scenes/Materials.meta


fileFormatVersion: 2
guid: 53a4327b506073b47b9c4e843d28f306
folderAsset: yes
timeCreated: 1498824718
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

248
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl


//-----------------------------------------------------------------------------
// EvaluateBSDF_Line - Reference
//-----------------------------------------------------------------------------
void IntegrateBSDF_LineRef(float3 V, float3 positionWS,
PreLightData preLightData, LightData lightData, BSDFData bsdfData,
out float3 diffuseLighting, out float3 specularLighting,
int sampleCount = 128)
{
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
const float len = lightData.size.x;
const float3 T = lightData.right;
const float3 P1 = lightData.positionWS - T * (0.5 * len);
const float dt = len * rcp(sampleCount);
const float off = 0.5 * dt;
// Uniformly sample the line segment with the Pdf = 1 / len.
const float invPdf = len;
for (int i = 0; i < sampleCount; ++i)
{
// Place the sample in the middle of the interval.
float t = off + i * dt;
float3 sPos = P1 + t * T;
float3 unL = sPos - positionWS;
float dist2 = dot(unL, unL);
float3 L = normalize(unL);
float sinLT = length(cross(L, T));
float NdotL = saturate(dot(bsdfData.normalWS, L));
if (NdotL > 0)
{
float3 lightDiff, lightSpec;
BSDF(V, L, positionWS, preLightData, bsdfData, lightDiff, lightSpec);
diffuseLighting += lightDiff * (sinLT / dist2 * NdotL);
specularLighting += lightSpec * (sinLT / dist2 * NdotL);
}
}
// The factor of 2 is due to the fact: Integral{0, 2 PI}{max(0, cos(x))dx} = 2.
float normFactor = 2.0 * invPdf * rcp(sampleCount);
diffuseLighting *= normFactor * lightData.diffuseScale * lightData.color;
specularLighting *= normFactor * lightData.specularScale * lightData.color;
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Area - Reference
//-----------------------------------------------------------------------------
void IntegrateBSDF_AreaRef(float3 V, float3 positionWS,
PreLightData preLightData, LightData lightData, BSDFData bsdfData,
out float3 diffuseLighting, out float3 specularLighting,
uint sampleCount = 512)
{
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
diffuseLighting = float3(0.0, 0.0, 0.0);
specularLighting = float3(0.0, 0.0, 0.0);
for (uint i = 0; i < sampleCount; ++i)
{
float3 P = float3(0.0, 0.0, 0.0); // Sample light point. Random point on the light shape in local space.
float3 Ns = float3(0.0, 0.0, 0.0); // Unit surface normal at P
float lightPdf = 0.0; // Pdf of the light sample
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
// Lights in Unity point backward.
float4x4 localToWorld = float4x4(float4(lightData.right, 0.0), float4(lightData.up, 0.0), float4(-lightData.forward, 0.0), float4(lightData.positionWS, 1.0));
switch (lightData.lightType)
{
case GPULIGHTTYPE_SPHERE:
SampleSphere(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_HEMISPHERE:
SampleHemisphere(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_CYLINDER:
SampleCylinder(u, localToWorld, lightData.size.x, lightData.size.y, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_RECTANGLE:
SampleRectangle(u, localToWorld, lightData.size.x, lightData.size.y, lightPdf, P, Ns);
break;
case GPULIGHTTYPE_DISK:
SampleDisk(u, localToWorld, lightData.size.x, lightPdf, P, Ns);
break;
// case GPULIGHTTYPE_LINE: handled by a separate function.
}
// Get distance
float3 unL = P - positionWS;
float sqrDist = dot(unL, unL);
float3 L = normalize(unL);
// Cosine of the angle between the light direction and the normal of the light's surface.
float cosLNs = saturate(dot(-L, Ns));
// We calculate area reference light with the area integral rather than the solid angle one.
float illuminance = cosLNs * saturate(dot(bsdfData.normalWS, L)) / (sqrDist * lightPdf);
float3 localDiffuseLighting = float3(0.0, 0.0, 0.0);
float3 localSpecularLighting = float3(0.0, 0.0, 0.0);
if (illuminance > 0.0)
{
BSDF(V, L, positionWS, preLightData, bsdfData, localDiffuseLighting, localSpecularLighting);
localDiffuseLighting *= lightData.color * illuminance * lightData.diffuseScale;
localSpecularLighting *= lightData.color * illuminance * lightData.specularScale;
}
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;
}
diffuseLighting /= float(sampleCount);
specularLighting /= float(sampleCount);
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Env - Reference
// ----------------------------------------------------------------------------
// Ref: Moving Frostbite to PBR (Appendix A)
float3 IntegrateLambertIBLRef(LightLoopContext lightLoopContext,
float3 V, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float3 L;
float NdotL;
float weightOverPdf;
ImportanceSampleLambert(u, localToWorld, L, NdotL, weightOverPdf);
if (NdotL > 0.0)
{
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
// diffuse Albedo is apply here as describe in ImportanceSampleLambert function
acc += bsdfData.diffuseColor * LambertNoPI() * weightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}
float3 IntegrateDisneyDiffuseIBLRef(LightLoopContext lightLoopContext,
float3 V, PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float NdotV = max(preLightData.NdotV, MIN_N_DOT_V);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float3 L;
float NdotL;
float weightOverPdf;
// for Disney we still use a Cosine importance sampling, true Disney importance sampling imply a look up table
ImportanceSampleLambert(u, localToWorld, L, NdotL, weightOverPdf);
if (NdotL > 0.0)
{
float3 H = normalize(L + V);
float LdotH = dot(L, H);
// Note: we call DisneyDiffuse that require to multiply by Albedo / PI. Divide by PI is already taken into account
// in weightOverPdf of ImportanceSampleLambert call.
float disneyDiffuse = DisneyDiffuse(NdotV, NdotL, LdotH, bsdfData.perceptualRoughness);
// diffuse Albedo is apply here as describe in ImportanceSampleLambert function
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
acc += bsdfData.diffuseColor * disneyDiffuse * weightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}
// Ref: Moving Frostbite to PBR (Appendix A)
float3 IntegrateSpecularGGXIBLRef(LightLoopContext lightLoopContext,
float3 V, PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float NdotV = max(preLightData.NdotV, MIN_N_DOT_V);
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
float2 randNum = InitRandom(V.xy * 0.5 + 0.5);
for (uint i = 0; i < sampleCount; ++i)
{
float2 u = Hammersley2d(i, sampleCount);
u = frac(u + randNum);
float VdotH;
float NdotL;
float3 L;
float weightOverPdf;
// GGX BRDF
if (bsdfData.materialId == MATERIALID_LIT_ANISO)
{
ImportanceSampleAnisoGGX(u, V, localToWorld, bsdfData.roughnessT, bsdfData.roughnessB, NdotV, L, VdotH, NdotL, weightOverPdf);
}
else
{
ImportanceSampleGGX(u, V, localToWorld, bsdfData.roughness, NdotV, L, VdotH, NdotL, weightOverPdf);
}
if (NdotL > 0.0)
{
// Fresnel component is apply here as describe in ImportanceSampleGGX function
float3 FweightOverPdf = F_Schlick(bsdfData.fresnel0, VdotH) * weightOverPdf;
float4 val = SampleEnv(lightLoopContext, lightData.envIndex, L, 0);
acc += FweightOverPdf * val.rgb;
}
}
return acc / sampleCount;
}

10
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl.meta


fileFormatVersion: 2
guid: 0406917314064054eb0ef42f727a8889
timeCreated: 1500913760
licenseType: Pro
ShaderImporter:
externalObjects: {}
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:

58
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader


Shader "Hidden/HDRenderPipeline/CopyStencilBuffer"
{
SubShader
{
Pass
{
Stencil
{
Ref 1 // StencilLightingUsage.SplitLighting
Comp Equal
Pass Keep
}
Cull Off
ZTest Always
ZWrite Off
Blend Off
HLSLPROGRAM
#pragma target 4.5
#pragma only_renderers d3d11 ps4 metal // TEMP: until we go further in dev
// #pragma enable_d3d11_debug_symbols
#pragma vertex Vert
#pragma fragment Frag
#include "../../../../ShaderLibrary/Common.hlsl"
#include "../../../ShaderConfig.cs.hlsl"
#include "../../../ShaderVariables.hlsl"
#include "../../../Lighting/LightDefinition.cs.hlsl"
struct Attributes
{
uint vertexID : SV_VertexID;
};
struct Varyings
{
float4 positionCS : SV_Position;
};
Varyings Vert(Attributes input)
{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
return output;
}
// Should use HiS and therefore be faster than a GPU memcpy().
float4 Frag(Varyings input) : SV_Target // use SV_StencilRef in D3D 11.3+
{
return float4(STENCILLIGHTINGUSAGE_SPLIT_LIGHTING, 0, 0, 0);
}
ENDHLSL
}
}
Fallback Off
}

10
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader.meta


fileFormatVersion: 2
guid: 7fd941b2d9d2a39429de64bde023932c
timeCreated: 1499946987
licenseType: Pro
ShaderImporter:
externalObjects: {}
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:

10
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SSSProfile/Resources.meta


fileFormatVersion: 2
guid: 59a5ca19094a8c64099eded20685322f
folderAsset: yes
timeCreated: 1500627638
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

10
Assets/ScriptableRenderPipeline/HDRenderPipeline/Resources.meta


fileFormatVersion: 2
guid: 2b0e7252e10375942943003cdd139be0
folderAsset: yes
timeCreated: 1500627615
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

79
Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultETC1.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Lightweight-DefaultETC1
m_Shader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Cube:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossinessSource: 0
- _GlossyReflections: 1
- _Mode: 0
- _Parallax: 0.02
- _ReflectionSource: 0
- _Shininess: 1
- _SmoothnessTextureChannel: 0
- _SpecSource: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SpecColor: {r: 1, g: 1, b: 1, a: 1}

10
Assets/ScriptableRenderPipeline/LightweightPipeline/Materials/Lightweight-DefaultETC1.mat.meta


fileFormatVersion: 2
guid: 77da5f76bec40af47be3a3b79da0e81c
timeCreated: 1499350842
licenseType: Pro
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存