Pass { // based on HDUnlitPassForward.template Name "${PassName}" Tags { "LightMode" = "${LightMode}" } //------------------------------------------------------------------------------------- // Render Modes (Blend, Cull, ZTest, Stencil, etc) //------------------------------------------------------------------------------------- ${Blending} ${Culling} ${ZTest} ${ZWrite} ${Stencil} ${ColorMask} //------------------------------------------------------------------------------------- // End Render Modes //------------------------------------------------------------------------------------- HLSLPROGRAM #pragma target 4.5 #pragma only_renderers d3d11 ps4 xboxone vulkan metal switch //#pragma enable_d3d11_debug_symbols //------------------------------------------------------------------------------------- // Variant Definitions //------------------------------------------------------------------------------------- ${VariantDefines} //------------------------------------------------------------------------------------- // End Variant //------------------------------------------------------------------------------------- #pragma vertex Vert #pragma fragment Frag #define UNITY_MATERIAL_UNLIT // Need to be define before including Material.hlsl #include "CoreRP/ShaderLibrary/Common.hlsl" #include "CoreRP/ShaderLibrary/Wind.hlsl" #include "ShaderGraphLibrary/Functions.hlsl" // define FragInputs structure #include "HDRP/ShaderPass/FragInputs.hlsl" #include "HDRP/ShaderPass/ShaderPass.cs.hlsl" //------------------------------------------------------------------------------------- // Defines //------------------------------------------------------------------------------------- ${Defines} // this translates the new dependency tracker into the old preprocessor definitions for the existing HDRP shader code $AttributesMesh.normalOS: #define ATTRIBUTES_NEED_NORMAL $AttributesMesh.tangentOS: #define ATTRIBUTES_NEED_TANGENT $AttributesMesh.uv0: #define ATTRIBUTES_NEED_TEXCOORD0 $AttributesMesh.uv1: #define ATTRIBUTES_NEED_TEXCOORD1 $AttributesMesh.uv2: #define ATTRIBUTES_NEED_TEXCOORD2 $AttributesMesh.uv3: #define ATTRIBUTES_NEED_TEXCOORD3 $AttributesMesh.color: #define ATTRIBUTES_NEED_COLOR $VaryingsMeshToPS.positionWS: #define VARYINGS_NEED_POSITION_WS $VaryingsMeshToPS.normalWS: #define VARYINGS_NEED_TANGENT_TO_WORLD $VaryingsMeshToPS.texCoord0: #define VARYINGS_NEED_TEXCOORD0 $VaryingsMeshToPS.texCoord1: #define VARYINGS_NEED_TEXCOORD1 $VaryingsMeshToPS.texCoord2: #define VARYINGS_NEED_TEXCOORD2 $VaryingsMeshToPS.texCoord3: #define VARYINGS_NEED_TEXCOORD3 $VaryingsMeshToPS.color: #define VARYINGS_NEED_COLOR $VaryingsMeshToPS.cullFace: #define VARYINGS_NEED_CULLFACE $features.modifyMesh: #define HAVE_MESH_MODIFICATION //------------------------------------------------------------------------------------- // End Defines //------------------------------------------------------------------------------------- #include "ShaderGraphLibrary/Functions.hlsl" #include "HDRP/ShaderVariables.hlsl" #ifdef DEBUG_DISPLAY #include "HDRP/Debug/DebugDisplay.hlsl" #endif #if (SHADERPASS == SHADERPASS_FORWARD) // used for shaders that want to do lighting (and materials) #include "HDRP/Lighting/Lighting.hlsl" #else // used for shaders that don't need lighting #include "HDRP/Material/Material.hlsl" #endif #include "HDRP/Material/MaterialUtilities.hlsl" // this function assumes the bitangent flip is encoded in tangentWS.w // TODO: move this function to HDRP shared file, once we merge with HDRP repo float3x3 BuildWorldToTangent(float4 tangentWS, float3 normalWS) { // tangentWS must not be normalized (mikkts requirement) // Normalize normalWS vector but keep the renormFactor to apply it to bitangent and tangent float3 unnormalizedNormalWS = normalWS; float renormFactor = 1.0 / length(unnormalizedNormalWS); // bitangent on the fly option in xnormal to reduce vertex shader outputs. // this is the mikktspace transformation (must use unnormalized attributes) float3x3 worldToTangent = CreateWorldToTangent(unnormalizedNormalWS, tangentWS.xyz, tangentWS.w > 0.0 ? 1.0 : -1.0); // surface gradient based formulation requires a unit length initial normal. We can maintain compliance with mikkts // by uniformly scaling all 3 vectors since normalization of the perturbed normal will cancel it. worldToTangent[0] = worldToTangent[0] * renormFactor; worldToTangent[1] = worldToTangent[1] * renormFactor; worldToTangent[2] = worldToTangent[2] * renormFactor; // normalizes the interpolated vertex normal return worldToTangent; } //------------------------------------------------------------------------------------- // Interpolator Packing And Struct Declarations //------------------------------------------------------------------------------------- ${InterpolatorPacking} //------------------------------------------------------------------------------------- // End Interpolator Packing And Struct Declarations //------------------------------------------------------------------------------------- //------------------------------------------------------------------------------------- // Graph generated code //------------------------------------------------------------------------------------- ${Graph} //------------------------------------------------------------------------------------- // End graph generated code //------------------------------------------------------------------------------------- #ifdef HAVE_MESH_MODIFICATION // TODO: we should share this between template files somehow VertexDescriptionInputs AttributesMeshToVertexDescriptionInputs(AttributesMesh input) { VertexDescriptionInputs output; ZERO_INITIALIZE(VertexDescriptionInputs, output); $VertexDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = input.normalOS; $VertexDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = TransformObjectToWorldNormal(input.normalOS); $VertexDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = TransformWorldToViewDir(output.WorldSpaceNormal); $VertexDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f); $VertexDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = input.tangentOS; $VertexDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = TransformObjectToWorldDir(input.tangentOS.xyz); $VertexDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = TransformWorldToViewDir(output.WorldSpaceTangent); $VertexDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f); $VertexDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = normalize(cross(input.normalOS, input.tangentOS) * (input.tangentOS.w > 0.0f ? 1.0f : -1.0f) * GetOddNegativeScale()); $VertexDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = TransformObjectToWorldDir(output.ObjectSpaceBiTangent); $VertexDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = TransformWorldToViewDir(output.WorldSpaceBiTangent); $VertexDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f); $VertexDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = input.positionOS; $VertexDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = TransformObjectToWorld(input.positionOS); $VertexDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = TransformWorldToView(output.WorldSpacePosition); $VertexDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f); $VertexDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = GetWorldSpaceNormalizeViewDir(output.WorldSpacePosition); $VertexDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = TransformWorldToObjectDir(output.WorldSpaceViewDirection); $VertexDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = TransformWorldToViewDir(output.WorldSpaceViewDirection); $VertexDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal); $VertexDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection); $VertexDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(output.WorldSpacePosition), _ProjectionParams.x); $VertexDescriptionInputs.uv0: output.uv0 = float4(input.uv0, 0.0f, 0.0f); $VertexDescriptionInputs.uv1: output.uv1 = float4(input.uv1, 0.0f, 0.0f); $VertexDescriptionInputs.uv2: output.uv2 = float4(input.uv2, 0.0f, 0.0f); $VertexDescriptionInputs.uv3: output.uv3 = float4(input.uv3, 0.0f, 0.0f); $VertexDescriptionInputs.VertexColor: output.VertexColor = input.color; return output; } AttributesMesh ApplyMeshModification(AttributesMesh input) { // build graph inputs VertexDescriptionInputs vertexDescriptionInputs = AttributesMeshToVertexDescriptionInputs(input); // evaluate vertex graph VertexDescription vertexDescription = VertexDescriptionFunction(vertexDescriptionInputs); // copy graph output to the results $VertexDescription.Position: input.positionOS = vertexDescription.Position; return input; } #endif // HAVE_MESH_MODIFICATION // TODO: Do we want to build include functionality for sharing these preprocessed functions across templates? FragInputs BuildFragInputs(VaryingsMeshToPS input) { FragInputs output; ZERO_INITIALIZE(FragInputs, output); // Init to some default value to make the computer quiet (else it output 'divide by zero' warning even if value is not used). // TODO: this is a really poor workaround, but the variable is used in a bunch of places // to compute normals which are then passed on elsewhere to compute other values... output.worldToTangent = k_identity3x3; output.positionSS = input.positionCS; // input.positionCS is SV_Position $FragInputs.positionWS: output.positionWS = input.positionWS; $FragInputs.worldToTangent: output.worldToTangent = BuildWorldToTangent(input.tangentWS, input.normalWS); $FragInputs.texCoord0: output.texCoord0 = input.texCoord0; $FragInputs.texCoord1: output.texCoord1 = input.texCoord1; $FragInputs.texCoord2: output.texCoord2 = input.texCoord2; $FragInputs.texCoord3: output.texCoord3 = input.texCoord3; $FragInputs.color: output.color = input.color; #if SHADER_STAGE_FRAGMENT $FragInputs.isFrontFace: output.isFrontFace = IS_FRONT_VFACE(input.cullFace, true, false); // TODO: SHADER_STAGE_FRAGMENT only $FragInputs.isFrontFace: // Handle handness of the view matrix (In Unity view matrix default to a determinant of -1) $FragInputs.isFrontFace: // when we render a cubemap the view matrix handness is flipped (due to convention used for cubemap) we have a determinant of +1 $FragInputs.isFrontFace: output.isFrontFace = _DetViewMatrix < 0.0 ? output.isFrontFace : !output.isFrontFace; #endif // SHADER_STAGE_FRAGMENT return output; } SurfaceDescriptionInputs FragInputsToSurfaceDescriptionInputs(FragInputs input, float3 viewWS) { SurfaceDescriptionInputs output; ZERO_INITIALIZE(SurfaceDescriptionInputs, output); $SurfaceDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = normalize(input.worldToTangent[2].xyz); $SurfaceDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) unity_ObjectToWorld); // transposed multiplication by inverse matrix to handle normal scale $SurfaceDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_I_V); // transposed multiplication by inverse matrix to handle normal scale $SurfaceDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f); $SurfaceDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = input.worldToTangent[0].xyz; $SurfaceDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = mul((float3x3) unity_WorldToObject, output.WorldSpaceTangent); $SurfaceDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = mul((float3x3) UNITY_MATRIX_V, output.WorldSpaceTangent); $SurfaceDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f); $SurfaceDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = input.worldToTangent[1].xyz; $SurfaceDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = mul((float3x3) unity_WorldToObject, output.WorldSpaceBiTangent); $SurfaceDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = mul((float3x3) UNITY_MATRIX_V, output.WorldSpaceBiTangent); $SurfaceDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f); $SurfaceDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = normalize(viewWS); $SurfaceDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = mul((float3x3) unity_WorldToObject, output.WorldSpaceViewDirection); $SurfaceDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = mul((float3x3) UNITY_MATRIX_V, output.WorldSpaceViewDirection); $SurfaceDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal); $SurfaceDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection); // TODO: FragInputs.positionWS is badly named -- it's camera relative, not in world space // we have to fix it up here to match graph input expectations $SurfaceDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = input.positionWS + _WorldSpaceCameraPos; $SurfaceDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = mul(unity_WorldToObject, float4(input.positionWS + _WorldSpaceCameraPos, 1.0f)).xyz; $SurfaceDescriptionInputs.ViewSpacePosition: float4 posViewSpace = mul(UNITY_MATRIX_V, float4(input.positionWS, 1.0f)); $SurfaceDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = posViewSpace.xyz / posViewSpace.w; $SurfaceDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f); // TODO: positionSS is SV_Position, graph input expects screenPosition to be 0..1 across the active viewport (?) $SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionWS), _ProjectionParams.x); $SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f); $SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f); $SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f); $SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f); $SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color; $SurfaceDescriptionInputs.FaceSign: output.FaceSign = input.isFrontFace; return output; } // existing HDRP code uses the combined function to go directly from packed to frag inputs FragInputs UnpackVaryingsMeshToFragInputs(PackedVaryingsMeshToPS input) { VaryingsMeshToPS unpacked= UnpackVaryingsMeshToPS(input); return BuildFragInputs(unpacked); } void BuildSurfaceData(FragInputs fragInputs, SurfaceDescription surfaceDescription, float3 V, out SurfaceData surfaceData) { // setup defaults -- these are used if the graph doesn't output a value ZERO_INITIALIZE(SurfaceData, surfaceData); // copy across graph values, if defined $SurfaceDescription.Color: surfaceData.color = surfaceDescription.Color; } void GetSurfaceAndBuiltinData(FragInputs fragInputs, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData) { // this applies the double sided tangent space correction -- see 'ApplyDoubleSidedFlipOrMirror()' $DoubleSided: if (!fragInputs.isFrontFace) { $DoubleSided.Flip: fragInputs.worldToTangent[1] = -fragInputs.worldToTangent[1]; // bitangent $DoubleSided.Flip: fragInputs.worldToTangent[2] = -fragInputs.worldToTangent[2]; // normal $DoubleSided.Mirror: fragInputs.worldToTangent[2] = -fragInputs.worldToTangent[2]; // normal $DoubleSided: } SurfaceDescriptionInputs surfaceDescriptionInputs = FragInputsToSurfaceDescriptionInputs(fragInputs, V); SurfaceDescription surfaceDescription = SurfaceDescriptionFunction(surfaceDescriptionInputs); // Perform alpha test very early to save performance (a killed pixel will not sample textures) // TODO: split graph evaluation to grab just alpha dependencies first? tricky.. $AlphaTest: DoAlphaTest(surfaceDescription.Alpha, surfaceDescription.AlphaClipThreshold); BuildSurfaceData(fragInputs, surfaceDescription, V, surfaceData); // Builtin Data -- we don't call GetBuiltinData(fragInputs, surfaceData, ...) // that function assumes there are specific global properties defined // for shadergraph shaders, we fill it out here instead ZERO_INITIALIZE(BuiltinData, builtinData); builtinData.opacity = surfaceDescription.Alpha; builtinData.bakeDiffuseLighting = float3(0.0, 0.0, 0.0); $SurfaceDescription.Emission: builtinData.emissiveColor = surfaceDescription.Emission; builtinData.velocity = float2(0.0, 0.0); builtinData.shadowMask0 = 0.0; builtinData.shadowMask1 = 0.0; builtinData.shadowMask2 = 0.0; builtinData.shadowMask3 = 0.0; builtinData.distortion = float2(0.0, 0.0); // surfaceDescription.Distortion -- if distortion pass builtinData.distortionBlur = 0.0; // surfaceDescription.DistortionBlur -- if distortion pass builtinData.depthOffset = 0.0; // ApplyPerPixelDisplacement(input, V, layerTexCoord, blendMasks); #ifdef _DEPTHOFFSET_ON : ApplyDepthOffsetPositionInput(V, depthOffset, GetWorldToHClipMatrix(), posInput); } //------------------------------------------------------------------------------------- // Pass Includes //------------------------------------------------------------------------------------- ${Includes} //------------------------------------------------------------------------------------- // End Pass Includes //------------------------------------------------------------------------------------- ENDHLSL }