浏览代码

Merge branch 'master' into StackLit2

/main
Stephane Laroche 6 年前
当前提交
a49a696e
共有 42 个文件被更改,包括 701 次插入1019 次删除
  1. 25
      com.unity.render-pipelines.high-definition/CHANGELOG.md
  2. 40
      com.unity.render-pipelines.high-definition/HDRP/Debug/DebugColorPicker.shader
  3. 5
      com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.cs
  4. 5
      com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.hlsl
  5. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/LightLoopSettingsUI.cs
  6. 44
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRPass.template
  7. 288
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRSubShader.cs
  8. 242
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDSubShaderUtilities.cs
  9. 58
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitPassForward.template
  10. 294
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitSubShader.cs
  11. 57
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightEvaluation.hlsl
  12. 6
      com.unity.render-pipelines.high-definition/HDRP/Material/Decal/Decal.cs.hlsl
  13. 101
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.hlsl
  14. 110
      com.unity.render-pipelines.high-definition/HDRP/Material/StackLit/StackLit.hlsl
  15. 76
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs
  16. 1
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/SceneViewDrawMode.cs
  17. 5
      com.unity.render-pipelines.lightweight/CHANGELOG.md
  18. 5
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/lightweightPBRForwardPass.template
  19. 10
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderPreprocessor.cs
  20. 4
      com.unity.render-pipelines.lightweight/LWRP/LightweightForwardRenderer.cs
  21. 4
      com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs
  22. 10
      com.unity.render-pipelines.lightweight/LWRP/Passes/DirectionalShadowsPass.cs
  23. 1
      com.unity.render-pipelines.lightweight/LWRP/Passes/ForwardLitPass.cs
  24. 2
      com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Input.hlsl
  25. 7
      com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Shadows.hlsl
  26. 7
      com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightScreenSpaceShadows.shader
  27. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightStandard.shader
  28. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightStandardSimpleLighting.shader
  29. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrain.shader
  30. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrainAddPass.shader
  31. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrainBase.shader
  32. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightWavingGrass.shader
  33. 1
      com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightWavingGrassBillboard.shader
  34. 68
      com.unity.shadergraph/Editor/Data/Util/GraphUtil.cs
  35. 18
      com.unity.shadergraph/Editor/Drawing/Blackboard/BlackboardFieldPropertyView.cs
  36. 8
      com.unity.shadergraph/Editor/Drawing/Controls/ToggleControl.cs
  37. 8
      com.unity.shadergraph/Editor/Drawing/Views/PBRSettingsView.cs
  38. 8
      com.unity.shadergraph/Editor/Drawing/Views/Slots/BooleanSlotControlView.cs
  39. 8
      com.unity.shadergraph/Editor/Drawing/Views/UnlitSettingsView.cs
  40. 9
      com.unity.shadergraph/Editor/Util/CompatibilityExtensions.cs
  41. 168
      com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl
  42. 9
      com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl.meta

25
com.unity.render-pipelines.high-definition/CHANGELOG.md


All notable changes to this package will be documented in this file.
## [2018.3]
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
### Improvements
## [Unreleased]
### Changed, Removals and deprecations
## [2.0.4-preview]
- Fix issue when disabling rough refraction and building a player. Was causing a crash.
## [2018.2 / next ]
## [2.0.3-preview]
### Improvements
- Increased debug color picker limit up to 260k lux
## [2.0.2-preview]
### Improvements
- Add Light -> Planar Reflection Probe command
- Added a false color mode in rendering debug
- Add support for mesh decals

- Provide a define in lit.hlsl (FORWARD_MATERIAL_READ_FROM_WRITTEN_NORMAL_BUFFER) when output buffer normal is used to read the normal and roughness instead of caclulating it (can save performance, but lower quality due to compression)
- Add color swatch to decal material
### Improvements
### Changed, Removals and deprecations
- Change Render -> Planar Reflection creation to 3D Object -> Mirror
- Change "Enable Reflector" name on SpotLight to "Angle Affect Intensity"

- Fix issue with StackLit in deferred mode with deferredDirectionalShadow due to GBuffer not being cleared. Gbuffer is still not clear and issue was fix with the new Output of normal buffer.
- Fixed an issue where interpolation volumes were not updated correctly for reflection captures.
- Fixed an exception in Light Loop settings UI
## [2018.2 / 2.0.1-preview]
## [2.0.1-preview]
### Improvements
- Add stripper of shader variant when building a player. Save shader compile time.

40
com.unity.render-pipelines.high-definition/HDRP/Debug/DebugColorPicker.shader


SAMPLER(sampler_DebugColorPickerTexture);
float4 _ColorPickerParam; // 4 increasing threshold
int _ColorPickerMode;
float3 _ColorPickerFontColor;
float _ApplyLinearToSRGB;
float _RequireToFlipInputTexture;

float4 result = SAMPLE_TEXTURE2D(_DebugColorPickerTexture, sampler_DebugColorPickerTexture, input.texcoord);
float4 mousePixelCoord = _MousePixelCoord;
if (_RequireToFlipInputTexture > 0.0)
{
mousePixelCoord.y = _ScreenSize.y - mousePixelCoord.y;
// Note: We must not flip the mousePixelCoord.w coordinate
}
float4 mouseResult = SAMPLE_TEXTURE2D(_DebugColorPickerTexture, sampler_DebugColorPickerTexture, mousePixelCoord.zw);
// Reverse debug exposure in order to display the real values.
// _DebugExposure will be set to zero if the debug view does not need it so we don't need to make a special case here. It's handled in only one place in C#
mouseResult = mouseResult / exp2(_DebugExposure);
//Decompress value if luxMeter is active
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER && _ColorPickerMode != COLORPICKERDEBUGMODE_NONE)
result.rgb = result.rgb * LUXMETER_COMPRESSION_RATIO;
float4 finalResult = result;
finalResult = DisplayPixelInformationAtMousePosition(input, result, mouseResult, mousePixelCoord);
{
float4 mousePixelCoord = _MousePixelCoord;
if (_RequireToFlipInputTexture > 0.0)
{
mousePixelCoord.y = _ScreenSize.y - mousePixelCoord.y;
// Note: We must not flip the mousePixelCoord.w coordinate
}
float4 mouseResult = SAMPLE_TEXTURE2D(_DebugColorPickerTexture, sampler_DebugColorPickerTexture, mousePixelCoord.zw);
//Decompress value if luxMeter is active
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
mouseResult = mouseResult * LUXMETER_COMPRESSION_RATIO;
// Reverse debug exposure in order to display the real values.
// _DebugExposure will be set to zero if the debug view does not need it so we don't need to make a special case here. It's handled in only one place in C#
mouseResult = mouseResult / exp2(_DebugExposure);
result = DisplayPixelInformationAtMousePosition(input, result, mouseResult, mousePixelCoord);
}
return finalResult;
return result;
}
ENDHLSL

5
com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.cs


return mipMapDebugSettings.debugMipMapMode;
}
public ColorPickerDebugMode GetDebugColorPickerMode()
{
return colorPickerDebugSettings.colorPickerMode;
}
public bool IsDebugDisplayEnabled()
{
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayEnabled() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();

5
com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.hlsl


int _DebugLightingSubMode;
int _DebugViewMaterial; // Contain the id (define in various materialXXX.cs.hlsl) of the property to display
int _DebugMipMapMode; // Match enum DebugMipMapMode
int _ColorPickerMode; // Match enum ColorPickerDebugMode
int _DebugStep;
float4 _DebugLightingAlbedo; // x == bool override, yzw = albedo for diffuse
float4 _DebugLightingSmoothness; // x == bool override, y == override value

float4 _MouseClickPixelCoord; // xy unorm, zw norm
float _DebugExposure;
CBUFFER_END
// When displaying lux meter we compress the light in order to be able to display value higher than 65504
// The sun is between 100 000 and 150 000, so we use 4 to be able to cover such a range (4 * 65504)
#define LUXMETER_COMPRESSION_RATIO 4
TEXTURE2D(_DebugFont); // Debug font to write string in shader
RWStructuredBuffer<ScreenSpaceTracingDebug> _DebugScreenSpaceTracingData : register(u7); // TODO: Change the register number for PS4

2
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/LightLoopSettingsUI.cs


EditorGUILayout.PropertyField(p.enableComputeMaterialVariants, _.GetContent("Enable Compute Material Variants"));
EditorGUI.indentLevel--;
}
EditorGUILayout.EndFadeGroup();
EditorGUILayout.EndFadeGroup();
GUILayout.EndVertical();
//EditorGUI.indentLevel--;

44
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRPass.template


//-------------------------------------------------------------------------------------
// Render Modes (Blend, Cull, ZTest, Stencil, etc)
//-------------------------------------------------------------------------------------
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
//-------------------------------------------------------------------------------------
// End Render Modes
//-------------------------------------------------------------------------------------

$BlendMode.Alpha: #define _BLENDMODE_ALPHA 1
$BlendMode.Add: #define _BLENDMODE_ADD 1
//-------------------------------------------------------------------------------------
// End Variant
// End Variant Definitions
//-------------------------------------------------------------------------------------
#pragma vertex Vert

$VertexDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = TransformObjectToWorldNormal(input.normalOS);
$VertexDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = TransformWorldToViewDir(output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$VertexDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(output.WorldSpacePosition), _ProjectionParams.x);
$VertexDescriptionInputs.uv0: output.uv0 = float4(input.uv0, 0.0f, 0.0f);
$VertexDescriptionInputs.uv1: output.uv1 = float4(input.uv1, 0.0f, 0.0f);

$SurfaceDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) unity_ObjectToWorld); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_I_V); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
// TODO: FragInputs.positionWS is badly named -- it's camera relative, not in world space
// we have to fix it up here to match graph input expectations
$SurfaceDescriptionInputs.WorldSpacePosition: // TODO: FragInputs.positionWS is badly named -- it's camera relative, not in world space
$SurfaceDescriptionInputs.WorldSpacePosition: // we have to fix it up here to match graph input expectations
// TODO: positionSS is SV_Position, graph input expects screenPosition to be 0..1 across the active viewport (?)
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.FaceSign: output.FaceSign = input.isFrontFace;

288
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRSubShader.cs


activeFields.Add("AlphaTest");
}
// if (kTesselationMode != TessellationMode.None)
// {
// defines.AddShaderChunk("#define _TESSELLATION_PHONG 1", true);
// }
// #pragma shader_feature _ _VERTEX_DISPLACEMENT _PIXEL_DISPLACEMENT
// switch (kDisplacementMode)
// {
// case DisplacementMode.None:
// break;
// case DisplacementMode.Vertex:
// defines.AddShaderChunk("#define _VERTEX_DISPLACEMENT 1", true);
// break;
// case DisplacementMode.Pixel:
// defines.AddShaderChunk("#define _PIXEL_DISPLACEMENT 1", true);
// Depth offset is only enabled if per pixel displacement is
// if (kDepthOffsetEnable)
// {
// // #pragma shader_feature _DEPTHOFFSET_ON
// defines.AddShaderChunk("#define _DEPTHOFFSET_ON 1", true);
// }
// break;
// case DisplacementMode.Tessellation:
// if (kTessellationEnabled)
// {
// defines.AddShaderChunk("#define _TESSELLATION_DISPLACEMENT 1", true);
// }
// break;
// }
// #pragma shader_feature _VERTEX_DISPLACEMENT_LOCK_OBJECT_SCALE
// #pragma shader_feature _DISPLACEMENT_LOCK_TILING_SCALE
// #pragma shader_feature _PIXEL_DISPLACEMENT_LOCK_OBJECT_SCALE
// #pragma shader_feature _VERTEX_WIND
// #pragma shader_feature _ _REFRACTION_PLANE _REFRACTION_SPHERE
//
// #pragma shader_feature _ _MAPPING_PLANAR _MAPPING_TRIPLANAR // MOVE to a node
// #pragma shader_feature _NORMALMAP_TANGENT_SPACE
// #pragma shader_feature _ _REQUIRE_UV2 _REQUIRE_UV3
// #pragma shader_feature _MASKMAP
// #pragma shader_feature _BENTNORMALMAP
// #pragma shader_feature _EMISSIVE_COLOR_MAP
// #pragma shader_feature _ENABLESPECULAROCCLUSION
// #pragma shader_feature _HEIGHTMAP
// #pragma shader_feature _TANGENTMAP
// #pragma shader_feature _ANISOTROPYMAP
// #pragma shader_feature _SUBSURFACE_RADIUS_MAP
// #pragma shader_feature _THICKNESSMAP
// #pragma shader_feature _SPECULARCOLORMAP
// #pragma shader_feature _TRANSMITTANCECOLORMAP
// Keywords for transparent
// #pragma shader_feature _SURFACE_TYPE_TRANSPARENT
if (masterNode.surfaceType != SurfaceType.Opaque)

// {
// defines.AddShaderChunk("#define _BLENDMODE_PRE_MULTIPLY 1", true);
// }
// #pragma shader_feature _BLENDMODE_PRESERVE_SPECULAR_LIGHTING
// if (kEnableBlendModePreserveSpecularLighting)
// {
// defines.AddShaderChunk("#define _BLENDMODE_PRESERVE_SPECULAR_LIGHTING 1", true);
// }
// #pragma shader_feature _ENABLE_FOG_ON_TRANSPARENT
// if (kEnableFogOnTransparent)
// {
// defines.AddShaderChunk("#define _ENABLE_FOG_ON_TRANSPARENT 1", true);
// }
}
else
{

private static bool GenerateShaderPassLit(AbstractMaterialNode masterNode, Pass pass, GenerationMode mode, SurfaceMaterialOptions materialOptions, ShaderGenerator result, List<string> sourceAssetDependencyPaths)
{
var templateLocation = Path.Combine(Path.Combine(Path.Combine(HDEditorUtils.GetHDRenderPipelinePath(), "Editor"), "ShaderGraph"), pass.TemplateName);
if (!File.Exists(templateLocation))
{
// TODO: produce error here
return false;
}
if (sourceAssetDependencyPaths != null)
sourceAssetDependencyPaths.Add(templateLocation);
// grab all of the active nodes (for pixel and vertex graphs)
var vertexNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(vertexNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.VertexShaderSlots);
var pixelNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(pixelNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.PixelShaderSlots);
// graph requirements describe what the graph itself requires
var pixelRequirements = ShaderGraphRequirements.FromNodes(pixelNodes, ShaderStageCapability.Fragment, false); // TODO: is ShaderStageCapability.Fragment correct?
var vertexRequirements = ShaderGraphRequirements.FromNodes(vertexNodes, ShaderStageCapability.Vertex, false);
// Function Registry tracks functions to remove duplicates, it wraps a string builder that stores the combined function string
ShaderStringBuilder graphNodeFunctions = new ShaderStringBuilder();
graphNodeFunctions.IncreaseIndent();
var functionRegistry = new FunctionRegistry(graphNodeFunctions);
// TODO: this can be a shared function for all HDRP master nodes -- From here through GraphUtil.GenerateSurfaceDescription(..)
// Build the list of active slots based on what the pass requires
var pixelSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.PixelShaderSlots, masterNode);
var vertexSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.VertexShaderSlots, masterNode);
// properties used by either pixel and vertex shader
PropertyCollector sharedProperties = new PropertyCollector();
// build the graph outputs structure to hold the results of each active slots (and fill out activeFields to indicate they are active)
string pixelGraphInputStructName = "SurfaceDescriptionInputs";
string pixelGraphOutputStructName = "SurfaceDescription";
string pixelGraphEvalFunctionName = "SurfaceDescriptionFunction";
ShaderStringBuilder pixelGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder pixelGraphOutputs = new ShaderStringBuilder();
// dependency tracker -- set of active fields
// apply master node options to active fields
// build initial requirements
HDRPShaderStructs.AddActiveFieldsFromPixelGraphRequirements(activeFields, pixelRequirements);
// build the graph outputs structure, and populate activeFields with the fields of that structure
GraphUtil.GenerateSurfaceDescriptionStruct(pixelGraphOutputs, pixelSlots, true, pixelGraphOutputStructName, activeFields);
// Build the graph evaluation code, to evaluate the specified slots
GraphUtil.GenerateSurfaceDescriptionFunction(
pixelNodes,
masterNode,
masterNode.owner as AbstractMaterialGraph,
pixelGraphEvalFunction,
functionRegistry,
sharedProperties,
pixelRequirements, // TODO : REMOVE UNUSED
mode,
pixelGraphEvalFunctionName,
pixelGraphOutputStructName,
null,
pixelSlots,
pixelGraphInputStructName);
string vertexGraphInputStructName = "VertexDescriptionInputs";
string vertexGraphOutputStructName = "VertexDescription";
string vertexGraphEvalFunctionName = "VertexDescriptionFunction";
ShaderStringBuilder vertexGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder vertexGraphOutputs = new ShaderStringBuilder();
// check for vertex animation -- enables HAVE_VERTEX_MODIFICATION
bool vertexActive = false;
if (masterNode.IsSlotConnected(PBRMasterNode.PositionSlotId))
{
vertexActive = true;
activeFields.Add("features.modifyMesh");
HDRPShaderStructs.AddActiveFieldsFromVertexGraphRequirements(activeFields, vertexRequirements);
// -------------------------------------
// Generate Output structure for Vertex Description function
GraphUtil.GenerateVertexDescriptionStruct(vertexGraphOutputs, vertexSlots, vertexGraphOutputStructName, activeFields);
// -------------------------------------
// Generate Vertex Description function
GraphUtil.GenerateVertexDescriptionFunction(
masterNode.owner as AbstractMaterialGraph,
vertexGraphEvalFunction,
functionRegistry,
sharedProperties,
mode,
vertexNodes,
vertexSlots,
vertexGraphInputStructName,
vertexGraphEvalFunctionName,
vertexGraphOutputStructName);
}
var blendCode = new ShaderStringBuilder();
var cullCode = new ShaderStringBuilder();
var zTestCode = new ShaderStringBuilder();
var zWriteCode = new ShaderStringBuilder();
var stencilCode = new ShaderStringBuilder();
var colorMaskCode = new ShaderStringBuilder();
HDSubShaderUtilities.BuildRenderStatesFromPassAndMaterialOptions(pass, materialOptions, blendCode, cullCode, zTestCode, zWriteCode, stencilCode, colorMaskCode);
HDRPShaderStructs.AddRequiredFields(pass.RequiredFields, activeFields);
// apply dependencies to the active fields, and build interpolators (TODO: split this function)
var packedInterpolatorCode = new ShaderGenerator();
HDRPShaderStructs.Generate(
packedInterpolatorCode,
activeFields);
// debug output all active fields
var interpolatorDefines = new ShaderGenerator();
{
interpolatorDefines.AddShaderChunk("// ACTIVE FIELDS:");
foreach (string f in activeFields)
{
interpolatorDefines.AddShaderChunk("// " + f);
}
}
// build graph inputs structures
ShaderGenerator pixelGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.SurfaceDescriptionInputs), activeFields, pixelGraphInputs);
ShaderGenerator vertexGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.VertexDescriptionInputs), activeFields, vertexGraphInputs);
ShaderGenerator defines = new ShaderGenerator();
{
defines.AddShaderChunk(string.Format("#define SHADERPASS {0}", pass.ShaderPassName), true);
if (pass.ExtraDefines != null)
{
foreach (var define in pass.ExtraDefines)
defines.AddShaderChunk(define);
}
defines.AddGenerator(interpolatorDefines);
}
var shaderPassIncludes = new ShaderGenerator();
if (pass.Includes != null)
{
foreach (var include in pass.Includes)
shaderPassIncludes.AddShaderChunk(include);
}
// build graph code
var graph = new ShaderGenerator();
{
graph.AddShaderChunk("// Shared Graph Properties (uniform inputs)");
graph.AddShaderChunk(sharedProperties.GetPropertiesDeclaration(1));
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Inputs");
graph.Indent();
graph.AddGenerator(vertexGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Vertex Graph Outputs");
graph.Indent();
graph.AddShaderChunk(vertexGraphOutputs.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Inputs");
graph.Indent();
graph.AddGenerator(pixelGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Pixel Graph Outputs");
graph.Indent();
graph.AddShaderChunk(pixelGraphOutputs.ToString());
graph.Deindent();
graph.AddShaderChunk("// Shared Graph Node Functions");
graph.AddShaderChunk(graphNodeFunctions.ToString());
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(vertexGraphEvalFunction.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(pixelGraphEvalFunction.ToString());
graph.Deindent();
}
// build the hash table of all named fragments TODO: could make this Dictionary<string, ShaderGenerator / string> ?
Dictionary<string, string> namedFragments = new Dictionary<string, string>();
namedFragments.Add("${Defines}", defines.GetShaderString(2, false));
namedFragments.Add("${Graph}", graph.GetShaderString(2, false));
namedFragments.Add("${LightMode}", pass.LightMode);
namedFragments.Add("${PassName}", pass.Name);
namedFragments.Add("${Includes}", shaderPassIncludes.GetShaderString(2, false));
namedFragments.Add("${InterpolatorPacking}", packedInterpolatorCode.GetShaderString(2, false));
namedFragments.Add("${Blending}", blendCode.ToString());
namedFragments.Add("${Culling}", cullCode.ToString());
namedFragments.Add("${ZTest}", zTestCode.ToString());
namedFragments.Add("${ZWrite}", zWriteCode.ToString());
namedFragments.Add("${Stencil}", stencilCode.ToString());
namedFragments.Add("${ColorMask}", colorMaskCode.ToString());
namedFragments.Add("${LOD}", materialOptions.lod.ToString());
// process the template to generate the shader code for this pass TODO: could make this a shared function
string[] templateLines = File.ReadAllLines(templateLocation);
System.Text.StringBuilder builder = new System.Text.StringBuilder();
foreach (string line in templateLines)
{
ShaderSpliceUtil.PreprocessShaderCode(line, activeFields, namedFragments, builder);
builder.AppendLine();
}
result.AddShaderChunk(builder.ToString(), false);
return true;
// use standard shader pass generation
return HDSubShaderUtilities.GenerateShaderPass(masterNode, pass, mode, materialOptions, activeFields, result, sourceAssetDependencyPaths);
}
public string GetSubshader(IMasterNode iMasterNode, GenerationMode mode, List<string> sourceAssetDependencyPaths = null)

242
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDSubShaderUtilities.cs


struct AttributesMesh
{
[Semantic("POSITION")] Vector3 positionOS;
[Semantic("NORMAL")][Optional] Vector3 normalOS;
[Semantic("TANGENT")][Optional] Vector4 tangentOS; // Stores bi-tangent sign in w
[Semantic("TEXCOORD0")][Optional] Vector2 uv0;
[Semantic("TEXCOORD1")][Optional] Vector2 uv1;
[Semantic("TEXCOORD2")][Optional] Vector2 uv2;
[Semantic("TEXCOORD3")][Optional] Vector2 uv3;
[Semantic("COLOR")][Optional] Vector4 color;
[Semantic("NORMAL")][Optional] Vector3 normalOS;
[Semantic("TANGENT")][Optional] Vector4 tangentOS; // Stores bi-tangent sign in w
[Semantic("TEXCOORD0")][Optional] Vector2 uv0;
[Semantic("TEXCOORD1")][Optional] Vector2 uv1;
[Semantic("TEXCOORD2")][Optional] Vector2 uv2;
[Semantic("TEXCOORD3")][Optional] Vector2 uv3;
[Semantic("COLOR")][Optional] Vector4 color;
};
struct VaryingsMeshToPS

new Dependency("SurfaceDescriptionInputs.uv2", "FragInputs.texCoord2"),
new Dependency("SurfaceDescriptionInputs.uv3", "FragInputs.texCoord3"),
new Dependency("SurfaceDescriptionInputs.VertexColor", "FragInputs.color"),
new Dependency("SurfaceDescriptionInputs.FaceSign", "FragInputs.isFrontFace"),
new Dependency("SurfaceDescriptionInputs.FaceSign", "FragInputs.isFrontFace"),
};
};

public static class HDSubShaderUtilities
{
public static bool GenerateShaderPass(AbstractMaterialNode masterNode, Pass pass, GenerationMode mode, SurfaceMaterialOptions materialOptions, HashSet<string> activeFields, ShaderGenerator result, List<string> sourceAssetDependencyPaths)
{
var templateLocation = Path.Combine(Path.Combine(Path.Combine(HDEditorUtils.GetHDRenderPipelinePath(), "Editor"), "ShaderGraph"), pass.TemplateName);
if (!File.Exists(templateLocation))
{
// TODO: produce error here
return false;
}
bool debugOutput = false;
if (sourceAssetDependencyPaths != null)
sourceAssetDependencyPaths.Add(templateLocation);
// grab all of the active nodes (for pixel and vertex graphs)
var vertexNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(vertexNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.VertexShaderSlots);
var pixelNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(pixelNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.PixelShaderSlots);
// graph requirements describe what the graph itself requires
var pixelRequirements = ShaderGraphRequirements.FromNodes(pixelNodes, ShaderStageCapability.Fragment, false); // TODO: is ShaderStageCapability.Fragment correct?
var vertexRequirements = ShaderGraphRequirements.FromNodes(vertexNodes, ShaderStageCapability.Vertex, false);
// Function Registry tracks functions to remove duplicates, it wraps a string builder that stores the combined function string
ShaderStringBuilder graphNodeFunctions = new ShaderStringBuilder();
graphNodeFunctions.IncreaseIndent();
var functionRegistry = new FunctionRegistry(graphNodeFunctions);
// TODO: this can be a shared function for all HDRP master nodes -- From here through GraphUtil.GenerateSurfaceDescription(..)
// Build the list of active slots based on what the pass requires
var pixelSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.PixelShaderSlots, masterNode);
var vertexSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.VertexShaderSlots, masterNode);
// properties used by either pixel and vertex shader
PropertyCollector sharedProperties = new PropertyCollector();
// build the graph outputs structure to hold the results of each active slots (and fill out activeFields to indicate they are active)
string pixelGraphInputStructName = "SurfaceDescriptionInputs";
string pixelGraphOutputStructName = "SurfaceDescription";
string pixelGraphEvalFunctionName = "SurfaceDescriptionFunction";
ShaderStringBuilder pixelGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder pixelGraphOutputs = new ShaderStringBuilder();
// build initial requirements
HDRPShaderStructs.AddActiveFieldsFromPixelGraphRequirements(activeFields, pixelRequirements);
// build the graph outputs structure, and populate activeFields with the fields of that structure
GraphUtil.GenerateSurfaceDescriptionStruct(pixelGraphOutputs, pixelSlots, true, pixelGraphOutputStructName, activeFields);
// Build the graph evaluation code, to evaluate the specified slots
GraphUtil.GenerateSurfaceDescriptionFunction(
pixelNodes,
masterNode,
masterNode.owner as AbstractMaterialGraph,
pixelGraphEvalFunction,
functionRegistry,
sharedProperties,
pixelRequirements, // TODO : REMOVE UNUSED
mode,
pixelGraphEvalFunctionName,
pixelGraphOutputStructName,
null,
pixelSlots,
pixelGraphInputStructName);
string vertexGraphInputStructName = "VertexDescriptionInputs";
string vertexGraphOutputStructName = "VertexDescription";
string vertexGraphEvalFunctionName = "VertexDescriptionFunction";
ShaderStringBuilder vertexGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder vertexGraphOutputs = new ShaderStringBuilder();
// check for vertex animation -- enables HAVE_VERTEX_MODIFICATION
bool vertexActive = false;
if (masterNode.IsSlotConnected(PBRMasterNode.PositionSlotId))
{
vertexActive = true;
activeFields.Add("features.modifyMesh");
HDRPShaderStructs.AddActiveFieldsFromVertexGraphRequirements(activeFields, vertexRequirements);
// -------------------------------------
// Generate Output structure for Vertex Description function
GraphUtil.GenerateVertexDescriptionStruct(vertexGraphOutputs, vertexSlots, vertexGraphOutputStructName, activeFields);
// -------------------------------------
// Generate Vertex Description function
GraphUtil.GenerateVertexDescriptionFunction(
masterNode.owner as AbstractMaterialGraph,
vertexGraphEvalFunction,
functionRegistry,
sharedProperties,
mode,
vertexNodes,
vertexSlots,
vertexGraphInputStructName,
vertexGraphEvalFunctionName,
vertexGraphOutputStructName);
}
var blendCode = new ShaderStringBuilder();
var cullCode = new ShaderStringBuilder();
var zTestCode = new ShaderStringBuilder();
var zWriteCode = new ShaderStringBuilder();
var stencilCode = new ShaderStringBuilder();
var colorMaskCode = new ShaderStringBuilder();
HDSubShaderUtilities.BuildRenderStatesFromPassAndMaterialOptions(pass, materialOptions, blendCode, cullCode, zTestCode, zWriteCode, stencilCode, colorMaskCode);
HDRPShaderStructs.AddRequiredFields(pass.RequiredFields, activeFields);
// apply dependencies to the active fields, and build interpolators (TODO: split this function)
var packedInterpolatorCode = new ShaderGenerator();
HDRPShaderStructs.Generate(
packedInterpolatorCode,
activeFields);
// debug output all active fields
var interpolatorDefines = new ShaderGenerator();
if (debugOutput)
{
interpolatorDefines.AddShaderChunk("// ACTIVE FIELDS:");
foreach (string f in activeFields)
{
interpolatorDefines.AddShaderChunk("// " + f);
}
}
// build graph inputs structures
ShaderGenerator pixelGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.SurfaceDescriptionInputs), activeFields, pixelGraphInputs);
ShaderGenerator vertexGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.VertexDescriptionInputs), activeFields, vertexGraphInputs);
ShaderGenerator defines = new ShaderGenerator();
{
defines.AddShaderChunk(string.Format("#define SHADERPASS {0}", pass.ShaderPassName), true);
if (pass.ExtraDefines != null)
{
foreach (var define in pass.ExtraDefines)
defines.AddShaderChunk(define);
}
defines.AddGenerator(interpolatorDefines);
}
var shaderPassIncludes = new ShaderGenerator();
if (pass.Includes != null)
{
foreach (var include in pass.Includes)
shaderPassIncludes.AddShaderChunk(include);
}
// build graph code
var graph = new ShaderGenerator();
{
graph.AddShaderChunk("// Shared Graph Properties (uniform inputs)");
graph.AddShaderChunk(sharedProperties.GetPropertiesDeclaration(1));
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Inputs");
graph.Indent();
graph.AddGenerator(vertexGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Vertex Graph Outputs");
graph.Indent();
graph.AddShaderChunk(vertexGraphOutputs.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Inputs");
graph.Indent();
graph.AddGenerator(pixelGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Pixel Graph Outputs");
graph.Indent();
graph.AddShaderChunk(pixelGraphOutputs.ToString());
graph.Deindent();
graph.AddShaderChunk("// Shared Graph Node Functions");
graph.AddShaderChunk(graphNodeFunctions.ToString());
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(vertexGraphEvalFunction.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(pixelGraphEvalFunction.ToString());
graph.Deindent();
}
// build the hash table of all named fragments TODO: could make this Dictionary<string, ShaderGenerator / string> ?
Dictionary<string, string> namedFragments = new Dictionary<string, string>();
namedFragments.Add("${Defines}", defines.GetShaderString(2, false));
namedFragments.Add("${Graph}", graph.GetShaderString(2, false));
namedFragments.Add("${LightMode}", pass.LightMode);
namedFragments.Add("${PassName}", pass.Name);
namedFragments.Add("${Includes}", shaderPassIncludes.GetShaderString(2, false));
namedFragments.Add("${InterpolatorPacking}", packedInterpolatorCode.GetShaderString(2, false));
namedFragments.Add("${Blending}", blendCode.ToString());
namedFragments.Add("${Culling}", cullCode.ToString());
namedFragments.Add("${ZTest}", zTestCode.ToString());
namedFragments.Add("${ZWrite}", zWriteCode.ToString());
namedFragments.Add("${Stencil}", stencilCode.ToString());
namedFragments.Add("${ColorMask}", colorMaskCode.ToString());
namedFragments.Add("${LOD}", materialOptions.lod.ToString());
// process the template to generate the shader code for this pass TODO: could make this a shared function
string[] templateLines = File.ReadAllLines(templateLocation);
System.Text.StringBuilder builder = new System.Text.StringBuilder();
foreach (string line in templateLines)
{
ShaderSpliceUtil.PreprocessShaderCode(line, activeFields, namedFragments, builder, debugOutput);
}
result.AddShaderChunk(builder.ToString(), false);
return true;
}
public static List<MaterialSlot> FindMaterialSlotsOnNode(IEnumerable<int> slots, AbstractMaterialNode node)
{
var activeSlots = new List<MaterialSlot>();

58
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitPassForward.template


//-------------------------------------------------------------------------------------
// Render Modes (Blend, Cull, ZTest, Stencil, etc)
//-------------------------------------------------------------------------------------
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
//-------------------------------------------------------------------------------------
// End Render Modes
//-------------------------------------------------------------------------------------

//#pragma enable_d3d11_debug_symbols
//-------------------------------------------------------------------------------------
// Variant Definitions
// Variant Definitions (active field translations to HDRP defines)
${VariantDefines}
$AlphaTest: #define _ALPHATEST_ON 1
$Material.SubsurfaceScattering: #define _MATERIAL_FEATURE_SUBSURFACE_SCATTERING 1
$Material.Transmission: #define _MATERIAL_FEATURE_TRANSMISSION 1
$Material.Anisotropy: #define _MATERIAL_FEATURE_ANISOTROPY 1
$Material.ClearCoat: #define _MATERIAL_FEATURE_CLEAR_COAT 1
$Material.Iridescence: #define _MATERIAL_FEATURE_IRIDESCENCE 1
$Material.SpecularColor: #define _MATERIAL_FEATURE_SPECULAR_COLOR 1
$SurfaceType.Transparent: #define _SURFACE_TYPE_TRANSPARENT 1
$BlendMode.Alpha: #define _BLENDMODE_ALPHA 1
$BlendMode.Add: #define _BLENDMODE_ADD 1
// End Variant
// End Variant Definitions
//-------------------------------------------------------------------------------------
#pragma vertex Vert

$VertexDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = TransformObjectToWorldNormal(input.normalOS);
$VertexDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = TransformWorldToViewDir(output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$VertexDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(output.WorldSpacePosition), _ProjectionParams.x);
$VertexDescriptionInputs.uv0: output.uv0 = float4(input.uv0, 0.0f, 0.0f);
$VertexDescriptionInputs.uv1: output.uv1 = float4(input.uv1, 0.0f, 0.0f);

$SurfaceDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) unity_ObjectToWorld); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_I_V); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
// TODO: FragInputs.positionWS is badly named -- it's camera relative, not in world space
// we have to fix it up here to match graph input expectations
$SurfaceDescriptionInputs.WorldSpacePosition: // TODO: FragInputs.positionWS is badly named -- it's camera relative, not in world space
$SurfaceDescriptionInputs.WorldSpacePosition: // we have to fix it up here to match graph input expectations
// TODO: positionSS is SV_Position, graph input expects screenPosition to be 0..1 across the active viewport (?)
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.FaceSign: output.FaceSign = input.isFrontFace;

294
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitSubShader.cs


UnlitMasterNode masterNode = iMasterNode as UnlitMasterNode;
if (masterNode == null)
{
return null;
return activeFields;
}
if (masterNode.twoSided.isOn)

activeFields.Add("AlphaTest");
}
// if (kTesselationMode != TessellationMode.None)
// {
// defines.AddShaderChunk("#define _TESSELLATION_PHONG 1", true);
// }
// #pragma shader_feature _ _VERTEX_DISPLACEMENT _PIXEL_DISPLACEMENT
// switch (kDisplacementMode)
// {
// case DisplacementMode.None:
// break;
// case DisplacementMode.Vertex:
// defines.AddShaderChunk("#define _VERTEX_DISPLACEMENT 1", true);
// break;
// case DisplacementMode.Pixel:
// defines.AddShaderChunk("#define _PIXEL_DISPLACEMENT 1", true);
// Depth offset is only enabled if per pixel displacement is
// if (kDepthOffsetEnable)
// {
// // #pragma shader_feature _DEPTHOFFSET_ON
// defines.AddShaderChunk("#define _DEPTHOFFSET_ON 1", true);
// }
// break;
// case DisplacementMode.Tessellation:
// if (kTessellationEnabled)
// {
// defines.AddShaderChunk("#define _TESSELLATION_DISPLACEMENT 1", true);
// }
// break;
// }
// #pragma shader_feature _VERTEX_DISPLACEMENT_LOCK_OBJECT_SCALE
// #pragma shader_feature _DISPLACEMENT_LOCK_TILING_SCALE
// #pragma shader_feature _PIXEL_DISPLACEMENT_LOCK_OBJECT_SCALE
// #pragma shader_feature _VERTEX_WIND
// #pragma shader_feature _ _REFRACTION_PLANE _REFRACTION_SPHERE
//
// #pragma shader_feature _ _MAPPING_PLANAR _MAPPING_TRIPLANAR // MOVE to a node
// #pragma shader_feature _NORMALMAP_TANGENT_SPACE
// #pragma shader_feature _ _REQUIRE_UV2 _REQUIRE_UV3
// #pragma shader_feature _MASKMAP
// #pragma shader_feature _BENTNORMALMAP
// #pragma shader_feature _EMISSIVE_COLOR_MAP
// #pragma shader_feature _ENABLESPECULAROCCLUSION
// #pragma shader_feature _HEIGHTMAP
// #pragma shader_feature _TANGENTMAP
// #pragma shader_feature _ANISOTROPYMAP
// #pragma shader_feature _SUBSURFACE_RADIUS_MAP
// #pragma shader_feature _THICKNESSMAP
// #pragma shader_feature _SPECULARCOLORMAP
// #pragma shader_feature _TRANSMITTANCECOLORMAP
// Keywords for transparent
// #pragma shader_feature _SURFACE_TYPE_TRANSPARENT
if (masterNode.surfaceType != SurfaceType.Opaque)

// {
// defines.AddShaderChunk("#define _BLENDMODE_PRE_MULTIPLY 1", true);
// }
// #pragma shader_feature _BLENDMODE_PRESERVE_SPECULAR_LIGHTING
// if (kEnableBlendModePreserveSpecularLighting)
// {
// defines.AddShaderChunk("#define _BLENDMODE_PRESERVE_SPECULAR_LIGHTING 1", true);
// }
// #pragma shader_feature _ENABLE_FOG_ON_TRANSPARENT
// if (kEnableFogOnTransparent)
// {
// defines.AddShaderChunk("#define _ENABLE_FOG_ON_TRANSPARENT 1", true);
// }
}
else
{

private static bool GenerateShaderPassUnlit(AbstractMaterialNode masterNode, Pass pass, GenerationMode mode, SurfaceMaterialOptions materialOptions, ShaderGenerator result, List<string> sourceAssetDependencyPaths)
{
var templateLocation = Path.Combine(Path.Combine(Path.Combine(HDEditorUtils.GetHDRenderPipelinePath(), "Editor"), "ShaderGraph"), pass.TemplateName);
if (!File.Exists(templateLocation))
{
// TODO: produce error here
return false;
}
if (sourceAssetDependencyPaths != null)
sourceAssetDependencyPaths.Add(templateLocation);
// grab all of the active nodes (for pixel and vertex graphs)
var vertexNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(vertexNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.VertexShaderSlots);
var pixelNodes = ListPool<INode>.Get();
NodeUtils.DepthFirstCollectNodesFromNode(pixelNodes, masterNode, NodeUtils.IncludeSelf.Include, pass.PixelShaderSlots);
// graph requirements describe what the graph itself requires
var pixelRequirements = ShaderGraphRequirements.FromNodes(pixelNodes, ShaderStageCapability.Fragment, false); // TODO: is ShaderStageCapability.Fragment correct?
var vertexRequirements = ShaderGraphRequirements.FromNodes(vertexNodes, ShaderStageCapability.Vertex, false);
// Function Registry tracks functions to remove duplicates, it wraps a string builder that stores the combined function string
ShaderStringBuilder graphNodeFunctions = new ShaderStringBuilder();
graphNodeFunctions.IncreaseIndent();
var functionRegistry = new FunctionRegistry(graphNodeFunctions);
// TODO: this can be a shared function for all HDRP master nodes -- From here through GraphUtil.GenerateSurfaceDescription(..)
// Build the list of active slots based on what the pass requires
var pixelSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.PixelShaderSlots, masterNode);
var vertexSlots = HDSubShaderUtilities.FindMaterialSlotsOnNode(pass.VertexShaderSlots, masterNode);
// properties used by either pixel and vertex shader
PropertyCollector sharedProperties = new PropertyCollector();
// build the graph outputs structure to hold the results of each active slots (and fill out activeFields to indicate they are active)
string pixelGraphInputStructName = "SurfaceDescriptionInputs";
string pixelGraphOutputStructName = "SurfaceDescription";
string pixelGraphEvalFunctionName = "SurfaceDescriptionFunction";
ShaderStringBuilder pixelGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder pixelGraphOutputs = new ShaderStringBuilder();
// dependency tracker -- set of active fields
// apply master node options to active fields
// build initial requirements
HDRPShaderStructs.AddActiveFieldsFromPixelGraphRequirements(activeFields, pixelRequirements);
// build the graph outputs structure, and populate activeFields with the fields of that structure
GraphUtil.GenerateSurfaceDescriptionStruct(pixelGraphOutputs, pixelSlots, true, pixelGraphOutputStructName, activeFields);
// Build the graph evaluation code, to evaluate the specified slots
GraphUtil.GenerateSurfaceDescriptionFunction(
pixelNodes,
masterNode,
masterNode.owner as AbstractMaterialGraph,
pixelGraphEvalFunction,
functionRegistry,
sharedProperties,
pixelRequirements, // TODO : REMOVE UNUSED
mode,
pixelGraphEvalFunctionName,
pixelGraphOutputStructName,
null,
pixelSlots,
pixelGraphInputStructName);
string vertexGraphInputStructName = "VertexDescriptionInputs";
string vertexGraphOutputStructName = "VertexDescription";
string vertexGraphEvalFunctionName = "VertexDescriptionFunction";
ShaderStringBuilder vertexGraphEvalFunction = new ShaderStringBuilder();
ShaderStringBuilder vertexGraphOutputs = new ShaderStringBuilder();
// check for vertex animation -- enables HAVE_VERTEX_MODIFICATION
bool vertexActive = false;
if (masterNode.IsSlotConnected(PBRMasterNode.PositionSlotId))
{
vertexActive = true;
activeFields.Add("features.modifyMesh");
HDRPShaderStructs.AddActiveFieldsFromVertexGraphRequirements(activeFields, vertexRequirements);
// -------------------------------------
// Generate Output structure for Vertex Description function
GraphUtil.GenerateVertexDescriptionStruct(vertexGraphOutputs, vertexSlots, vertexGraphOutputStructName, activeFields);
// -------------------------------------
// Generate Vertex Description function
GraphUtil.GenerateVertexDescriptionFunction(
masterNode.owner as AbstractMaterialGraph,
vertexGraphEvalFunction,
functionRegistry,
sharedProperties,
mode,
vertexNodes,
vertexSlots,
vertexGraphInputStructName,
vertexGraphEvalFunctionName,
vertexGraphOutputStructName);
}
var blendCode = new ShaderStringBuilder();
var cullCode = new ShaderStringBuilder();
var zTestCode = new ShaderStringBuilder();
var zWriteCode = new ShaderStringBuilder();
var stencilCode = new ShaderStringBuilder();
var colorMaskCode = new ShaderStringBuilder();
HDSubShaderUtilities.BuildRenderStatesFromPassAndMaterialOptions(pass, materialOptions, blendCode, cullCode, zTestCode, zWriteCode, stencilCode, colorMaskCode);
HDRPShaderStructs.AddRequiredFields(pass.RequiredFields, activeFields);
// apply dependencies to the active fields, and build interpolators (TODO: split this function)
var packedInterpolatorCode = new ShaderGenerator();
HDRPShaderStructs.Generate(
packedInterpolatorCode,
activeFields);
// debug output all active fields
var interpolatorDefines = new ShaderGenerator();
{
interpolatorDefines.AddShaderChunk("// ACTIVE FIELDS:");
foreach (string f in activeFields)
{
interpolatorDefines.AddShaderChunk("// " + f);
}
}
// build graph inputs structures
ShaderGenerator pixelGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.SurfaceDescriptionInputs), activeFields, pixelGraphInputs);
ShaderGenerator vertexGraphInputs = new ShaderGenerator();
ShaderSpliceUtil.BuildType(typeof(HDRPShaderStructs.VertexDescriptionInputs), activeFields, vertexGraphInputs);
ShaderGenerator defines = new ShaderGenerator();
{
defines.AddShaderChunk(string.Format("#define SHADERPASS {0}", pass.ShaderPassName), true);
if (pass.ExtraDefines != null)
{
foreach (var define in pass.ExtraDefines)
defines.AddShaderChunk(define);
}
defines.AddGenerator(interpolatorDefines);
}
var shaderPassIncludes = new ShaderGenerator();
if (pass.Includes != null)
{
foreach (var include in pass.Includes)
shaderPassIncludes.AddShaderChunk(include);
}
// build graph code
var graph = new ShaderGenerator();
{
graph.AddShaderChunk("// Shared Graph Properties (uniform inputs)");
graph.AddShaderChunk(sharedProperties.GetPropertiesDeclaration(1));
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Inputs");
graph.Indent();
graph.AddGenerator(vertexGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Vertex Graph Outputs");
graph.Indent();
graph.AddShaderChunk(vertexGraphOutputs.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Inputs");
graph.Indent();
graph.AddGenerator(pixelGraphInputs);
graph.Deindent();
graph.AddShaderChunk("// Pixel Graph Outputs");
graph.Indent();
graph.AddShaderChunk(pixelGraphOutputs.ToString());
graph.Deindent();
graph.AddShaderChunk("// Shared Graph Node Functions");
graph.AddShaderChunk(graphNodeFunctions.ToString());
if (vertexActive)
{
graph.AddShaderChunk("// Vertex Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(vertexGraphEvalFunction.ToString());
graph.Deindent();
}
graph.AddShaderChunk("// Pixel Graph Evaluation");
graph.Indent();
graph.AddShaderChunk(pixelGraphEvalFunction.ToString());
graph.Deindent();
}
// build the hash table of all named fragments TODO: could make this Dictionary<string, ShaderGenerator / string> ?
Dictionary<string, string> namedFragments = new Dictionary<string, string>();
namedFragments.Add("${Defines}", defines.GetShaderString(2, false));
namedFragments.Add("${Graph}", graph.GetShaderString(2, false));
namedFragments.Add("${LightMode}", pass.LightMode);
namedFragments.Add("${PassName}", pass.Name);
namedFragments.Add("${Includes}", shaderPassIncludes.GetShaderString(2, false));
namedFragments.Add("${InterpolatorPacking}", packedInterpolatorCode.GetShaderString(2, false));
namedFragments.Add("${Blending}", blendCode.ToString());
namedFragments.Add("${Culling}", cullCode.ToString());
namedFragments.Add("${ZTest}", zTestCode.ToString());
namedFragments.Add("${ZWrite}", zWriteCode.ToString());
namedFragments.Add("${Stencil}", stencilCode.ToString());
namedFragments.Add("${ColorMask}", colorMaskCode.ToString());
namedFragments.Add("${LOD}", materialOptions.lod.ToString());
// process the template to generate the shader code for this pass TODO: could make this a shared function
string[] templateLines = File.ReadAllLines(templateLocation);
System.Text.StringBuilder builder = new System.Text.StringBuilder();
foreach (string line in templateLines)
{
ShaderSpliceUtil.PreprocessShaderCode(line, activeFields, namedFragments, builder);
builder.AppendLine();
}
result.AddShaderChunk(builder.ToString(), false);
return true;
// use standard shader pass generation
return HDSubShaderUtilities.GenerateShaderPass(masterNode, pass, mode, materialOptions, activeFields, result, sourceAssetDependencyPaths);
public string GetSubshader(IMasterNode inMasterNode, GenerationMode mode, List<string> sourceAssetDependencyPaths = null)
public string GetSubshader(IMasterNode iMasterNode, GenerationMode mode, List<string> sourceAssetDependencyPaths = null)
{
if (sourceAssetDependencyPaths != null)
{

sourceAssetDependencyPaths.Add(AssetDatabase.GUIDToAssetPath("713ced4e6eef4a44799a4dd59041484b"));
}
var masterNode = inMasterNode as UnlitMasterNode;
var masterNode = iMasterNode as UnlitMasterNode;
var subShader = new ShaderGenerator();
subShader.AddShaderChunk("SubShader", true);
subShader.AddShaderChunk("{", true);

57
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightEvaluation.hlsl


weight = Smoothstep01(weight);
weight *= lightData.weight;
}
// Ambient occlusion
struct AmbientOcclusionFactor
{
float3 indirectAmbientOcclusion;
float3 directAmbientOcclusion;
float3 indirectSpecularOcclusion;
};
void GetScreenSpaceAmbientOcclusion(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, out AmbientOcclusionFactor aoFactor)
{
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectSpecularOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(specularOcclusionFromData, specularOcclusion));
aoFactor.indirectAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(ambientOcclusionFromData, indirectAmbientOcclusion));
aoFactor.directAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
}
void GetScreenSpaceAmbientOcclusionMultibounce(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, float3 diffuseColor, float3 fresnel0, out AmbientOcclusionFactor aoFactor)
{
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the diffuseColor)
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectSpecularOcclusion = GTAOMultiBounce(min(specularOcclusionFromData, specularOcclusion), fresnel0);
aoFactor.indirectAmbientOcclusion = GTAOMultiBounce(min(ambientOcclusionFromData, indirectAmbientOcclusion), diffuseColor);
aoFactor.directAmbientOcclusion = GTAOMultiBounce(directAmbientOcclusion, diffuseColor);
}

6
com.unity.render-pipelines.high-definition/HDRP/Material/Decal/Decal.cs.hlsl


float4 diffuseScaleBias;
float4 normalScaleBias;
float4 maskScaleBias;
float4 baseColor;
float4 baseColor;
};
//

float4 GetMaskScaleBias(DecalData value)
{
return value.maskScaleBias;
}
float4 GetBaseColor(DecalData value)
{
return value.baseColor;
}
//

101
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.hlsl


#define CLEAR_COAT_IOR 1.5
#define CLEAR_COAT_IETA (1.0 / CLEAR_COAT_IOR) // IETA is the inverse eta which is the ratio of IOR of two interface
#define CLEAR_COAT_F0 0.04 // IORToFresnel0(CLEAR_COAT_IOR)
#define CLEAR_COAT_ROUGHNESS 0.001
#define CLEAR_COAT_ROUGHNESS 0.03
#define CLEAR_COAT_PERCEPTUAL_SMOOTHNESS RoughnessToPerceptualSmoothness(CLEAR_COAT_ROUGHNESS)
#define CLEAR_COAT_PERCEPTUAL_ROUGHNESS RoughnessToPerceptualRoughness(CLEAR_COAT_ROUGHNESS)

#define USE_DEFERRED_DIRECTIONAL_SHADOWS // Deferred shadows are always enabled for opaque objects
#endif
#include "../../Lighting/LightEvaluation.hlsl"
//-----------------------------------------------------------------------------
// Lighting structure for light accumulation
//-----------------------------------------------------------------------------
// These structure allow to accumulate lighting accross the Lit material
// AggregateLighting is init to zero and transfer to EvaluateBSDF, but the LightLoop can't access its content.
struct DirectLighting
{
float3 diffuse;
float3 specular;
};
struct IndirectLighting
{
float3 specularReflected;
float3 specularTransmitted;
};
struct AggregateLighting
{
DirectLighting direct;
IndirectLighting indirect;
};
void AccumulateDirectLighting(DirectLighting src, inout AggregateLighting dst)
{
dst.direct.diffuse += src.diffuse;
dst.direct.specular += src.specular;
}
void AccumulateIndirectLighting(IndirectLighting src, inout AggregateLighting dst)
{
dst.indirect.specularReflected += src.specularReflected;
dst.indirect.specularTransmitted += src.specularTransmitted;
}
#include "HDRP/Material/MaterialEvaluation.hlsl"
#include "HDRP/Lighting/LightEvaluation.hlsl"
//-----------------------------------------------------------------------------
// BSDF share between directional light, punctual light and area light (reference)

PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, AggregateLighting lighting,
out float3 diffuseLighting, out float3 specularLighting)
{
float3 bakeDiffuseLighting = bakeLightingData.bakeDiffuseLighting;
AmbientOcclusionFactor aoFactor;
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the baseColor)
#if 0

#endif
// Add indirect diffuse + emissive (if any) - Ambient occlusion is multiply by emissive which is wrong but not a big deal
bakeDiffuseLighting *= aoFactor.indirectAmbientOcclusion;
lighting.indirect.specularReflected *= aoFactor.indirectSpecularOcclusion;
lighting.direct.diffuse *= aoFactor.directAmbientOcclusion;
ApplyAmbientOcclusionFactor(aoFactor, bakeLightingData, lighting);
// Subsurface scattering mdoe
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;

// diffuse lighting has already had the albedo applied in GetBakedDiffuseLighting().
diffuseLighting = modifiedDiffuseColor * lighting.direct.diffuse + bakeDiffuseLighting;
diffuseLighting = modifiedDiffuseColor * lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
// If refraction is enable we use the transmittanceMask to lerp between current diffuse lighting and refraction value
// Physically speaking, transmittanceMask should be 1, but for artistic reasons, we let the value vary

specularLighting *= 1.0 + bsdfData.fresnel0 * preLightData.energyCompensation;
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode != 0)
{
// Caution: _DebugLightingMode is used in other part of the code, don't do anything outside of
// current cases
switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION:
diffuseLighting = aoFactor.indirectAmbientOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION:
diffuseLighting = aoFactor.indirectSpecularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:
if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
diffuseLighting = lighting.indirect.specularTransmitted;
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION:
if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
diffuseLighting = lighting.indirect.specularReflected;
break;
case DEBUGLIGHTINGMODE_VISUALIZE_SHADOW_MASKS:
#ifdef SHADOWS_SHADOWMASK
diffuseLighting = float3(
bakeLightingData.bakeShadowMask.r / 2 + bakeLightingData.bakeShadowMask.g / 2,
bakeLightingData.bakeShadowMask.g / 2 + bakeLightingData.bakeShadowMask.b / 2,
bakeLightingData.bakeShadowMask.b / 2 + bakeLightingData.bakeShadowMask.a / 2
);
specularLighting = float3(0, 0, 0);
#endif
break ;
}
}
else if (_DebugMipMapMode != DEBUGMIPMAPMODE_NONE)
{
diffuseLighting = bsdfData.diffuseColor;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
PostEvaluateBSDFDebugDisplay(aoFactor, bakeLightingData, lighting, bsdfData.diffuseColor, diffuseLighting, specularLighting);
#endif
}

110
com.unity.render-pipelines.high-definition/HDRP/Material/StackLit/StackLit.hlsl


#include "StackLit.cs.hlsl"
#include "../SubsurfaceScattering/SubsurfaceScattering.hlsl"
#include "../NormalBuffer.hlsl"
//#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
//NEWLITTODO : wireup CBUFFERs for ambientocclusion, and other uniforms and samplers used:
//NEWLITTODO : wireup CBUFFERs for uniforms and samplers used:
//
// We need this for AO, Depth/Color pyramids, LTC lights data, FGD pre-integrated data.
//

#define USE_DEFERRED_DIRECTIONAL_SHADOWS // Deferred shadows are always enabled for opaque objects
#endif
#include "../../Lighting/LightEvaluation.hlsl"
#include "../../Lighting/Reflection/VolumeProjection.hlsl"
//-----------------------------------------------------------------------------
// Lighting structure for light accumulation
//-----------------------------------------------------------------------------
// These structure allow to accumulate lighting accross the Lit material
// AggregateLighting is init to zero and transfer to EvaluateBSDF, but the LightLoop can't access its content.
//
// In fact, all structures here are opaque but used by LightLoop.hlsl.
// The Accumulate* functions are also used by LightLoop to accumulate the contributions of lights.
//
struct DirectLighting
{
float3 diffuse;
float3 specular;
};
struct IndirectLighting
{
float3 specularReflected;
float3 specularTransmitted;
};
struct AggregateLighting
{
DirectLighting direct;
IndirectLighting indirect;
};
void AccumulateDirectLighting(DirectLighting src, inout AggregateLighting dst)
{
dst.direct.diffuse += src.diffuse;
dst.direct.specular += src.specular;
}
#include "HDRP/Material/MaterialEvaluation.hlsl"
#include "HDRP/Lighting/LightEvaluation.hlsl"
void AccumulateIndirectLighting(IndirectLighting src, inout AggregateLighting dst)
{
dst.indirect.specularReflected += src.specularReflected;
dst.indirect.specularTransmitted += src.specularTransmitted;
}
#include "HDRP/Lighting/Reflection/VolumeProjection.hlsl"
//-----------------------------------------------------------------------------
// BSDF share between directional light, punctual light and area light (reference)

PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, AggregateLighting lighting,
out float3 diffuseLighting, out float3 specularLighting)
{
float3 bakeDiffuseLighting = bakeLightingData.bakeDiffuseLighting;
float3 N; float unclampedNdotV;
float3 N;
float unclampedNdotV;
EvaluateBSDF_GetNormalUnclampedNdotV(bsdfData, preLightData, V, N, unclampedNdotV);
AmbientOcclusionFactor aoFactor;

// Add indirect diffuse + emissive (if any) - Ambient occlusion is multiply by emissive which is wrong but not a big deal
bakeDiffuseLighting *= aoFactor.indirectAmbientOcclusion;
lighting.direct.diffuse *= aoFactor.directAmbientOcclusion;
ApplyAmbientOcclusionFactor(aoFactor, bakeLightingData, lighting);
// Subsurface scattering mdoe
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;

// diffuse lighting has already had the albedo applied in GetBakedDiffuseLighting().
diffuseLighting = modifiedDiffuseColor * lighting.direct.diffuse + bakeDiffuseLighting;
diffuseLighting = modifiedDiffuseColor * lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
if (_DebugLightingMode != 0)
{
// Caution: _DebugLightingMode is used in other part of the code, don't do anything outside of
// current cases
switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION:
diffuseLighting = aoFactor.indirectAmbientOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION:
diffuseLighting = aoFactor.indirectSpecularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:
//if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
// diffuseLighting = lighting.indirect.specularTransmitted;
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION:
//if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
// diffuseLighting = lighting.indirect.specularReflected;
break;
case DEBUGLIGHTINGMODE_VISUALIZE_SHADOW_MASKS:
#ifdef SHADOWS_SHADOWMASK
diffuseLighting = float3(
bakeLightingData.bakeShadowMask.r / 2 + bakeLightingData.bakeShadowMask.g / 2,
bakeLightingData.bakeShadowMask.g / 2 + bakeLightingData.bakeShadowMask.b / 2,
bakeLightingData.bakeShadowMask.b / 2 + bakeLightingData.bakeShadowMask.a / 2
);
specularLighting = float3(0, 0, 0);
#endif
break ;
}
}
else if (_DebugMipMapMode != DEBUGMIPMAPMODE_NONE)
{
diffuseLighting = bsdfData.diffuseColor;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
PostEvaluateBSDFDebugDisplay(aoFactor, bakeLightingData, lighting, bsdfData.diffuseColor, diffuseLighting, specularLighting);
#endif
}

76
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


else
{
m_CurrentDebugDisplaySettings = m_DebugDisplaySettings;
}
using (new ProfilingSample(cmd, "Volume Update", CustomSamplerId.VolumeUpdate.GetSampler()))
using (new ProfilingSample(cmd, "Volume Update", CustomSamplerId.VolumeUpdate.GetSampler()))
{
LayerMask layerMask = -1;
if (additionalCameraData != null)
{
layerMask = additionalCameraData.volumeLayerMask;
}
else
LayerMask layerMask = -1;
if (additionalCameraData != null)
{
layerMask = additionalCameraData.volumeLayerMask;
}
else
// Temporary hack:
// For scene view, by default, we use the "main" camera volume layer mask if it exists
// Otherwise we just remove the lighting override layers in the current sky to avoid conflicts
// This is arbitrary and should be editable in the scene view somehow.
if (camera.cameraType == CameraType.SceneView)
// Temporary hack:
// For scene view, by default, we use the "main" camera volume layer mask if it exists
// Otherwise we just remove the lighting override layers in the current sky to avoid conflicts
// This is arbitrary and should be editable in the scene view somehow.
if (camera.cameraType == CameraType.SceneView)
var mainCamera = Camera.main;
bool needFallback = true;
if (mainCamera != null)
var mainCamera = Camera.main;
bool needFallback = true;
if (mainCamera != null)
var mainCamAdditionalData = mainCamera.GetComponent<HDAdditionalCameraData>();
if (mainCamAdditionalData != null)
var mainCamAdditionalData = mainCamera.GetComponent<HDAdditionalCameraData>();
if (mainCamAdditionalData != null)
{
layerMask = mainCamAdditionalData.volumeLayerMask;
needFallback = false;
}
layerMask = mainCamAdditionalData.volumeLayerMask;
needFallback = false;
}
if (needFallback)
{
// If the override layer is "Everything", we fall-back to "Everything" for the current layer mask to avoid issues by having no current layer
// In practice we should never have "Everything" as an override mask as it does not make sense (a warning is issued in the UI)
if (m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask == -1)
layerMask = -1;
else
layerMask = (-1 & ~m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask);
}
if (needFallback)
{
// If the override layer is "Everything", we fall-back to "Everything" for the current layer mask to avoid issues by having no current layer
// In practice we should never have "Everything" as an override mask as it does not make sense (a warning is issued in the UI)
if (m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask == -1)
layerMask = -1;
else
layerMask = (-1 & ~m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask);
VolumeManager.instance.Update(camera.transform, layerMask);
VolumeManager.instance.Update(camera.transform, layerMask);
}
var postProcessLayer = camera.GetComponent<PostProcessLayer>();

ClearBuffers(hdCamera, cmd);
// TODO: Add stereo occlusion mask
RenderDepthPrepass(m_CullResults, hdCamera, renderContext, cmd, m_DbufferManager.EnableDBUffer);
RenderDepthPrepass(m_CullResults, hdCamera, renderContext, cmd);
// This will bind the depth buffer if needed for DBuffer)
RenderDBuffer(hdCamera, cmd, renderContext, m_CullResults);

// Forward only renderer: We always render everything
// Deferred renderer: We always render depth prepass for alpha tested (optimization), other object are render based on engine configuration.
// Forward opaque with deferred renderer (DepthForwardOnly pass): We always render everything
void RenderDepthPrepass(CullResults cull, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd, bool forcePrepass)
void RenderDepthPrepass(CullResults cull, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
// In case of deferred renderer, we can have forward opaque material. These materials need to be render in the depth buffer to correctly build the light list.
// And they will tag the stencil to not be lit during the deferred lighting pass.

RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_DepthOnlyAndDepthForwardOnlyPassNames, 0, HDRenderQueue.k_RenderQueue_AllOpaque);
}
}
else if (hdCamera.frameSettings.enableDepthPrepassWithDeferredRendering || forcePrepass)
// If we enable DBuffer, we need a full depth prepass
else if (hdCamera.frameSettings.enableDepthPrepassWithDeferredRendering || m_DbufferManager.EnableDBUffer)
using (new ProfilingSample(cmd, "Depth Prepass (deferred)", CustomSamplerId.DepthPrepass.GetSampler()))
using (new ProfilingSample(cmd, m_DbufferManager.EnableDBUffer ? "Depth Prepass (deferred) force by DBuffer" : "Depth Prepass (deferred)", CustomSamplerId.DepthPrepass.GetSampler()))
{
cmd.DisableShaderKeyword("WRITE_NORMAL_BUFFER"); // Note: This only disable the output of normal buffer for Lit shader, not the other shader that don't use multicompile

return;
}
// TODO: Move allocation in separate method call in start of the render loop
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorPyramid, m_BufferPyramid.AllocColorRT);

if (!hdCamera.frameSettings.enableRoughRefraction)
return;
// TODO: Move allocation in separate method call in start of the render loop
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthPyramid, m_BufferPyramid.AllocDepthRT);

cmd.SetGlobalInt(HDShaderIDs._DebugLightingMode, (int)m_CurrentDebugDisplaySettings.GetDebugLightingMode());
cmd.SetGlobalInt(HDShaderIDs._DebugLightingSubMode, (int)m_CurrentDebugDisplaySettings.GetDebugLightingSubMode());
cmd.SetGlobalInt(HDShaderIDs._DebugMipMapMode, (int)m_CurrentDebugDisplaySettings.GetDebugMipMapMode());
cmd.SetGlobalInt(HDShaderIDs._ColorPickerMode, (int)m_CurrentDebugDisplaySettings.GetDebugColorPickerMode());
cmd.SetGlobalVector(HDShaderIDs._DebugLightingAlbedo, debugAlbedo);
cmd.SetGlobalVector(HDShaderIDs._DebugLightingSmoothness, debugSmoothness);

// (i.e. we have perform a flip, we need to flip the input texture)
m_DebugFullScreen.SetFloat(HDShaderIDs._RequireToFlipInputTexture, hdCamera.camera.cameraType != CameraType.SceneView ? 1.0f : 0.0f);
m_DebugFullScreen.SetBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid));
HDUtils.DrawFullScreen(cmd, hdCamera, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
PushColorPickerDebugTexture(hdCamera, cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);

cmd.SetGlobalTexture(HDShaderIDs._DebugColorPickerTexture, m_DebugColorPickerBuffer); // No SetTexture with RenderTarget identifier... so use SetGlobalTexture
// TODO: Replace with command buffer call when available
m_DebugColorPicker.SetColor(HDShaderIDs._ColorPickerFontColor, colorPickerDebugSettings.fontColor);
m_DebugColorPicker.SetInt(HDShaderIDs._ColorPickerMode, (int)colorPickerDebugSettings.colorPickerMode);
m_DebugColorPicker.SetInt(HDShaderIDs._FalseColorEnabled, falseColorDebugSettings.falseColor ? 1 : 0);
m_DebugColorPicker.SetVector(HDShaderIDs._FalseColorThresholds, falseColorThresholds);
// The material display debug perform sRGBToLinear conversion as the final blit currently hardcode a linearToSrgb conversion. As when we read with color picker this is not done,

1
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/SceneViewDrawMode.cs


cameraMode.drawMode == DrawCameraMode.DeferredNormal ||
cameraMode.drawMode == DrawCameraMode.ValidateAlbedo ||
cameraMode.drawMode == DrawCameraMode.ValidateMetalSpecular ||
cameraMode.drawMode == DrawCameraMode.ShadowMasks ||
cameraMode.drawMode == DrawCameraMode.LightOverlap
)
return false;

5
com.unity.render-pipelines.lightweight/CHANGELOG.md


## [Unreleased]
### Fixed
- GLCore compute buffer compiler error
## [2.0.1-preview]
### Fixed
- VR Single Pass Instancing shadows
## [2.0.0-preview]

5
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/lightweightPBRForwardPass.template


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords
#pragma multi_compile _ DIRLIGHTMAP_COMBINED

}
ENDHLSL
}
}

10
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderPreprocessor.cs


public static readonly ShaderKeyword DirectionalShadows = new ShaderKeyword(LightweightKeywordStrings.DirectionalShadows);
public static readonly ShaderKeyword LocalShadows = new ShaderKeyword(LightweightKeywordStrings.LocalShadows);
public static readonly ShaderKeyword SoftShadows = new ShaderKeyword(LightweightKeywordStrings.SoftShadows);
public static readonly ShaderKeyword CascadeShadows = new ShaderKeyword(LightweightKeywordStrings.CascadeShadows);
public static readonly ShaderKeyword Lightmap = new ShaderKeyword("LIGHTMAP_ON");
public static readonly ShaderKeyword DirectionalLightmap = new ShaderKeyword("DIRLIGHTMAP_COMBINED");

bool StripInvalidVariants(ShaderCompilerData compilerData)
{
bool isShadowVariant = compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.DirectionalShadows) ||
compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.LocalShadows);
bool isDirectionalShadows = compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.DirectionalShadows);
bool isShadowVariant = isDirectionalShadows || compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.LocalShadows);
if (!isDirectionalShadows && compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.CascadeShadows))
return true;
if (compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.SoftShadows) && !isShadowVariant)
if (!isShadowVariant && compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.SoftShadows))
return true;
if (compilerData.shaderKeywordSet.IsEnabled(LightweightKeyword.VertexLights) &&

4
com.unity.render-pipelines.lightweight/LWRP/LightweightForwardRenderer.cs


get
{
return SystemInfo.supportsComputeShaders &&
!Application.isMobilePlatform && Application.platform != RuntimePlatform.WebGLPlayer;
SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLCore &&
!Application.isMobilePlatform &&
Application.platform != RuntimePlatform.WebGLPlayer;
}
}

4
com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs


bool supportsScreenSpaceShadows = SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
shadowData.renderDirectionalShadows = pipelineAsset.supportsDirectionalShadows && hasDirectionalShadowCastingLight;
shadowData.requiresScreenSpaceShadowResolve = shadowData.renderDirectionalShadows && supportsScreenSpaceShadows;
// we resolve shadows in screenspace when cascades are enabled to save ALU as computing cascade index + shadowCoord on fragment is expensive
shadowData.requiresScreenSpaceShadowResolve = shadowData.renderDirectionalShadows && supportsScreenSpaceShadows && pipelineAsset.cascadeCount > 1;
shadowData.directionalLightCascadeCount = (shadowData.requiresScreenSpaceShadowResolve) ? pipelineAsset.cascadeCount : 1;
shadowData.directionalShadowAtlasWidth = pipelineAsset.directionalShadowAtlasResolution;
shadowData.directionalShadowAtlasHeight = pipelineAsset.directionalShadowAtlasResolution;

10
com.unity.render-pipelines.lightweight/LWRP/Passes/DirectionalShadowsPass.cs


if (success)
{
shadowQuality = (shadowData.supportsSoftShadows) ? light.shadows : LightShadows.Hard;
// In order to avoid shader variants explosion we only do hard shadows when sampling shadowmap in the lit pass.
// GLES2 platform is forced to hard single cascade shadows.
if (!shadowData.requiresScreenSpaceShadowResolve)
shadowQuality = LightShadows.Hard;
SetupDirectionalShadowReceiverConstants(ref context, cmd, ref shadowData, shadowLight);
SetupDirectionalShadowReceiverConstants(cmd, ref shadowData, shadowLight);
}
}
context.ExecuteCommandBuffer(cmd);

shadowData.renderedDirectionalShadowQuality = shadowQuality;
}
void SetupDirectionalShadowReceiverConstants(ref ScriptableRenderContext context, CommandBuffer cmd, ref ShadowData shadowData, VisibleLight shadowLight)
void SetupDirectionalShadowReceiverConstants(CommandBuffer cmd, ref ShadowData shadowData, VisibleLight shadowLight)
{
Light light = shadowLight.light;

1
com.unity.render-pipelines.lightweight/LWRP/Passes/ForwardLitPass.cs


CoreUtils.SetKeyword(cmd, LightweightKeywordStrings.DirectionalShadows, directionalShadowQuality != LightShadows.None);
CoreUtils.SetKeyword(cmd, LightweightKeywordStrings.LocalShadows, localShadowQuality != LightShadows.None);
CoreUtils.SetKeyword(cmd, LightweightKeywordStrings.SoftShadows, hasSoftShadows);
CoreUtils.SetKeyword(cmd, LightweightKeywordStrings.CascadeShadows, shadowData.directionalLightCascadeCount > 1);
// TODO: Remove this. legacy particles support will be removed from Unity in 2018.3. This should be a shader_feature instead with prop exposed in the Standard particles shader.
CoreUtils.SetKeyword(cmd, "SOFTPARTICLES_ON", cameraData.requiresSoftParticles);

2
com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Input.hlsl


// Must match check of use compute buffer in LightweightPipeline.cs
// GLES check here because of WebGL 1.0 support
// TODO: check performance of using StructuredBuffer on mobile as well
#if defined(SHADER_API_MOBILE) || defined(SHADER_API_GLES)
#if defined(SHADER_API_MOBILE) || defined(SHADER_API_GLES) || defined(SHADER_API_GLCORE)
#define USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA 0
#else
#define USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA 1

7
com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Shadows.hlsl


#define MAX_SHADOW_CASCADES 4
#ifndef SHADOWS_SCREEN
#ifdef SHADER_API_GLES
#define SHADOWS_SCREEN 0
#else
#if defined(_SHADOWS_ENABLED) && defined(_SHADOWS_CASCADE) && !defined(SHADER_API_GLES)
#else
#define SHADOWS_SCREEN 0
#endif
#endif

half shadowStrength = GetMainLightShadowStrength();
return SampleShadowmap(shadowCoord, TEXTURE2D_PARAM(_DirectionalShadowmapTexture, sampler_DirectionalShadowmapTexture), shadowSamplingData, shadowStrength);
#endif
}
half LocalLightRealtimeShadowAttenuation(int lightIndex, float3 positionWS)

7
com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightScreenSpaceShadows.shader


HLSLINCLUDE
// Note: Screenspace shadow resolve is only performed when shadow cascades are enabled
// Shadow cascades require cascade index and shadowCoord to be computed on pixel.
#define _SHADOWS_CASCADE
#pragma prefer_hlslcc gles
#pragma exclude_renderers d3d11_9x
//Keep compiler quiet about Shadows.hlsl.

float3 wpos = mul(unity_CameraToWorld, float4(vpos, 1)).xyz;
//Fetch shadow coordinates for cascade.
float4 coords = TransformWorldToShadowCoord(wpos);
float4 coords = TransformWorldToShadowCoord(wpos);
// Screenspace shadowmap is only used for directional lights which use orthogonal projection.
ShadowSamplingData shadowSamplingData = GetMainLightShadowSamplingData();

HLSLPROGRAM
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
#pragma vertex Vertex
#pragma fragment Fragment

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightStandard.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/LightweightStandardSimpleLighting.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrain.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrainAddPass.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightStandardTerrainBase.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightWavingGrass.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

1
com.unity.render-pipelines.lightweight/LWRP/Shaders/Terrain/LightweightWavingGrassBillboard.shader


#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _LOCAL_SHADOWS_ENABLED
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
// -------------------------------------
// Unity defined keywords

68
com.unity.shadergraph/Editor/Data/Util/GraphUtil.cs


}
}
public static System.Text.StringBuilder PreprocessShaderCode(string code, HashSet<string> activeFields, Dictionary<string, string> namedFragments = null, System.Text.StringBuilder result = null)
// returns the offset of the first non-whitespace character, in the range [start, end] inclusive ... will return end if none found
private static int SkipWhitespace(string str, int start, int end)
{
int index = start;
while (index < end)
{
char c = str[index];
if (!Char.IsWhiteSpace(c))
{
break;
}
index++;
}
return index;
}
public static System.Text.StringBuilder PreprocessShaderCode(string code, HashSet<string> activeFields, Dictionary<string, string> namedFragments, System.Text.StringBuilder result, bool debugOutput)
bool skipEndln = false;
while (cur < end)
{

{
// found $ escape sequence
// first append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
// next find the end of the line (or if none found, the end of the code)
// find the end of the line (or if none found, the end of the code)
int endln = code.IndexOf('\n', dollar + 1);
if (endln < 0)
{

int nameLength = curlyend - dollar + 1;
if ((curlyend < 0) || (nameLength <= 0))
{
// no } found, or zero length name
// no } found, or zero length name
// append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
if (curlyend < 0)
{
result.Append("// ERROR: unterminated escape sequence ('${' and '}' must be matched)\n");

// ugh, this probably allocates memory -- wish we could do the name lookup direct from a substring
string name = code.Substring(dollar, nameLength);
// append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
string fragment;
if ((namedFragments != null) && namedFragments.TryGetValue(name, out fragment))
{

if ((colon < 0) || (predicateLength <= 0))
{
// no colon found... error! Spit out error and context
// append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
if (colon < 0)
{
result.Append("// ERROR: unterminated escape sequence ('$' and ':' must be matched)\n");

// colon found!
// ugh, this probably allocates memory -- wish we could do the field lookup direct from a substring
string predicate = code.Substring(dollar + 1, predicateLength);
int nonwhitespace = SkipWhitespace(code, colon + 1, endln);
// append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
result.Append(' ', predicateLength + 2);
AppendSubstring(result, code, colon, false, endln, false);
AppendSubstring(result, code, nonwhitespace, true, endln, false);
// predicate is not active -- comment out line
result.Append("//");
result.Append(' ', predicateLength);
AppendSubstring(result, code, colon, false, endln, false);
// predicate is not active
if (debugOutput)
{
// append everything before the beginning of the escape sequence
AppendSubstring(result, code, cur, true, dollar, false);
result.Append("// ");
AppendSubstring(result, code, nonwhitespace, true, endln, false);
}
else
{
skipEndln = true;
}
}
if (!skipEndln)
{
result.AppendLine();
}
return result;

18
com.unity.shadergraph/Editor/Drawing/Blackboard/BlackboardFieldPropertyView.cs


m_Graph = graph;
m_Property = property;
m_ExposedToogle = new Toggle(() =>
{
property.generatePropertyBlock = m_ExposedToogle.value;
DirtyNodes(ModificationScope.Graph);
});
m_ExposedToogle = new Toggle();
m_ExposedToogle.OnToggleChanged(evt =>
{
property.generatePropertyBlock = evt.newValue;
DirtyNodes(ModificationScope.Graph);
});
m_ExposedToogle.value = property.generatePropertyBlock;
AddRow("Exposed", m_ExposedToogle);

else if (property is BooleanShaderProperty)
{
var booleanProperty = (BooleanShaderProperty)property;
Action onBooleanChanged = () =>
EventCallback<ChangeEvent<bool>> onBooleanChanged = evt =>
booleanProperty.value = !booleanProperty.value;
booleanProperty.value = evt.newValue;
var field = new Toggle(onBooleanChanged);
var field = new Toggle();
field.OnToggleChanged(onBooleanChanged);
field.value = booleanProperty.value;
AddRow("Default", field);
}

8
com.unity.shadergraph/Editor/Drawing/Controls/ToggleControl.cs


var panel = new VisualElement { name = "togglePanel" };
if (!string.IsNullOrEmpty(label))
panel.Add(new Label(label));
Action changedToggle = () => { OnChangeToggle(); };
m_Toggle = new UnityEngine.Experimental.UIElements.Toggle(changedToggle);
m_Toggle = new Toggle();
m_Toggle.OnToggleChanged(OnChangeToggle);
m_Toggle.SetEnabled(value.isEnabled);
m_Toggle.value = value.isOn;
panel.Add(m_Toggle);

}
}
void OnChangeToggle()
void OnChangeToggle(ChangeEvent<bool> evt)
value.isOn = !value.isOn;
value.isOn = evt.newValue;
m_PropertyInfo.SetValue(m_Node, value, null);
this.MarkDirtyRepaint();
}

8
com.unity.shadergraph/Editor/Drawing/Views/PBRSettingsView.cs


ps.Add(new PropertyRow(new Label("Two Sided")), (row) =>
{
row.Add(new Toggle(null), (toggle) =>
row.Add(new Toggle(), (toggle) =>
toggle.OnToggle(ChangeTwoSided);
toggle.OnToggleChanged(ChangeTwoSided);
});
});

m_Node.alphaMode = (AlphaMode)evt.newValue;
}
void ChangeTwoSided()
void ChangeTwoSided(ChangeEvent<bool> evt)
td.isOn ^= true;
td.isOn = evt.newValue;
m_Node.twoSided = td;
}
}

8
com.unity.shadergraph/Editor/Drawing/Views/Slots/BooleanSlotControlView.cs


{
AddStyleSheetPath("Styles/Controls/BooleanSlotControlView");
m_Slot = slot;
Action changedToggle = () => { OnChangeToggle(); };
var toggleField = new UnityEngine.Experimental.UIElements.Toggle(changedToggle);
var toggleField = new Toggle();
toggleField.OnToggleChanged(OnChangeToggle);
void OnChangeToggle()
void OnChangeToggle(ChangeEvent<bool> evt)
value = !value;
value = evt.newValue;
m_Slot.value = value;
m_Slot.owner.Dirty(ModificationScope.Node);
}

8
com.unity.shadergraph/Editor/Drawing/Views/UnlitSettingsView.cs


ps.Add(new PropertyRow(new Label("Two Sided")), (row) =>
{
row.Add(new Toggle(null), (toggle) =>
row.Add(new Toggle(), (toggle) =>
toggle.OnToggle(ChangeTwoSided);
toggle.OnToggleChanged(ChangeTwoSided);
});
});

m_Node.alphaMode = (AlphaMode)evt.newValue;
}
void ChangeTwoSided()
void ChangeTwoSided(ChangeEvent<bool> evt)
td.isOn ^= true;
td.isOn = evt.newValue;
m_Node.twoSided = td;
}
}

9
com.unity.shadergraph/Editor/Util/CompatibilityExtensions.cs


element.ReleaseMouseCapture();
}
#endif
public static void OnToggleChanged(this Toggle toggle, EventCallback<ChangeEvent<bool>> callback)
{
#if UNITY_2018_3_OR_NEWER
toggle.OnValueChanged(callback);
#else
toggle.OnToggle(() => callback(ChangeEvent<bool>.GetPooled(!toggle.value, toggle.value)));
#endif
}
}
static class TrickleDownEnum

168
com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl


// This files include various function uses to evaluate material
//-----------------------------------------------------------------------------
// Lighting structure for light accumulation
//-----------------------------------------------------------------------------
// These structure allow to accumulate lighting accross the Lit material
// AggregateLighting is init to zero and transfer to EvaluateBSDF, but the LightLoop can't access its content.
struct DirectLighting
{
float3 diffuse;
float3 specular;
};
struct IndirectLighting
{
float3 specularReflected;
float3 specularTransmitted;
};
struct AggregateLighting
{
DirectLighting direct;
IndirectLighting indirect;
};
void AccumulateDirectLighting(DirectLighting src, inout AggregateLighting dst)
{
dst.direct.diffuse += src.diffuse;
dst.direct.specular += src.specular;
}
void AccumulateIndirectLighting(IndirectLighting src, inout AggregateLighting dst)
{
dst.indirect.specularReflected += src.specularReflected;
dst.indirect.specularTransmitted += src.specularTransmitted;
}
//-----------------------------------------------------------------------------
// Ambient occlusion helper
//-----------------------------------------------------------------------------
// Ambient occlusion
struct AmbientOcclusionFactor
{
float3 indirectAmbientOcclusion;
float3 directAmbientOcclusion;
float3 indirectSpecularOcclusion;
};
void GetScreenSpaceAmbientOcclusion(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, out AmbientOcclusionFactor aoFactor)
{
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectSpecularOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(specularOcclusionFromData, specularOcclusion));
aoFactor.indirectAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(ambientOcclusionFromData, indirectAmbientOcclusion));
aoFactor.directAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
}
void GetScreenSpaceAmbientOcclusionMultibounce(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, float3 diffuseColor, float3 fresnel0, out AmbientOcclusionFactor aoFactor)
{
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the diffuseColor)
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectSpecularOcclusion = GTAOMultiBounce(min(specularOcclusionFromData, specularOcclusion), fresnel0);
aoFactor.indirectAmbientOcclusion = GTAOMultiBounce(min(ambientOcclusionFromData, indirectAmbientOcclusion), diffuseColor);
aoFactor.directAmbientOcclusion = GTAOMultiBounce(directAmbientOcclusion, diffuseColor);
}
void ApplyAmbientOcclusionFactor(AmbientOcclusionFactor aoFactor, inout BakeLightingData bakeLightingData, inout AggregateLighting lighting)
{
// Note: in case of Lit, bakeLightingData.bakeDiffuseLighting contain indirect diffuse + emissive,
// so Ambient occlusion is multiply by emissive which is wrong but not a big deal
bakeLightingData.bakeDiffuseLighting *= aoFactor.indirectAmbientOcclusion;
lighting.indirect.specularReflected *= aoFactor.indirectSpecularOcclusion;
lighting.direct.diffuse *= aoFactor.directAmbientOcclusion;
}
#ifdef DEBUG_DISPLAY
// mipmapColor is color use to store texture streaming information in XXXData.hlsl (look for DEBUGMIPMAPMODE_NONE)
void PostEvaluateBSDFDebugDisplay( AmbientOcclusionFactor aoFactor, BakeLightingData bakeLightingData, AggregateLighting lighting, float3 mipmapColor,
inout float3 diffuseLighting, inout float3 specularLighting)
{
if (_DebugLightingMode != 0)
{
// Caution: _DebugLightingMode is used in other part of the code, don't do anything outside of
// current cases
switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
//Compress lighting values for color picker if enabled
if (_ColorPickerMode != COLORPICKERDEBUGMODE_NONE)
diffuseLighting = diffuseLighting / LUXMETER_COMPRESSION_RATIO;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION:
diffuseLighting = aoFactor.indirectAmbientOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION:
diffuseLighting = aoFactor.indirectSpecularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:
if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
diffuseLighting = lighting.indirect.specularTransmitted;
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION:
if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
diffuseLighting = lighting.indirect.specularReflected;
break;
case DEBUGLIGHTINGMODE_VISUALIZE_SHADOW_MASKS:
#ifdef SHADOWS_SHADOWMASK
diffuseLighting = float3(
bakeLightingData.bakeShadowMask.r / 2 + bakeLightingData.bakeShadowMask.g / 2,
bakeLightingData.bakeShadowMask.g / 2 + bakeLightingData.bakeShadowMask.b / 2,
bakeLightingData.bakeShadowMask.b / 2 + bakeLightingData.bakeShadowMask.a / 2
);
specularLighting = float3(0, 0, 0);
#endif
break ;
}
}
else if (_DebugMipMapMode != DEBUGMIPMAPMODE_NONE)
{
diffuseLighting = mipmapColor;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
}
#endif

9
com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl.meta


fileFormatVersion: 2
guid: 3c80046db57367445b2a7c154c18879e
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存