#ifndef LIGHTWEIGHT_LIGHTING_INCLUDED #define LIGHTWEIGHT_LIGHTING_INCLUDED #include "CoreRP/ShaderLibrary/Common.hlsl" #include "CoreRP/ShaderLibrary/EntityLighting.hlsl" #include "CoreRP/ShaderLibrary/ImageBasedLighting.hlsl" #include "Core.hlsl" #include "Shadows.hlsl" // If lightmap is not defined than we evaluate GI (ambient + probes) from SH // We might do it fully or partially in vertex to save shader ALU #if !defined(LIGHTMAP_ON) // TODO: Controls things like these by exposing SHADER_QUALITY levels (low, medium, high) #if defined(SHADER_API_GLES) || !defined(_NORMALMAP) // Evaluates SH fully in vertex #define EVALUATE_SH_VERTEX #elif !SHADER_HINT_NICE_QUALITY // Evaluates L2 SH in vertex and L0L1 in pixel #define EVALUATE_SH_MIXED #endif // Otherwise evaluate SH fully per-pixel #endif #ifdef LIGHTMAP_ON #define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) float2 lmName : TEXCOORD##index #define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT) OUT.xy = lightmapUV.xy * lightmapScaleOffset.xy + lightmapScaleOffset.zw; #define OUTPUT_SH(normalWS, OUT) #else #define DECLARE_LIGHTMAP_OR_SH(lmName, shName, index) half3 shName : TEXCOORD##index #define OUTPUT_LIGHTMAP_UV(lightmapUV, lightmapScaleOffset, OUT) #define OUTPUT_SH(normalWS, OUT) OUT.xyz = SampleSHVertex(normalWS) #endif /////////////////////////////////////////////////////////////////////////////// // Light Helpers // /////////////////////////////////////////////////////////////////////////////// // Abstraction over Light input constants struct LightInput { float4 position; half3 color; half4 distanceAndSpotAttenuation; half4 spotDirection; }; // Abstraction over Light shading data. struct Light { int index; half3 direction; half3 color; half attenuation; half subtractiveModeAttenuation; }; /////////////////////////////////////////////////////////////////////////////// // Attenuation Functions / /////////////////////////////////////////////////////////////////////////////// // Matches Unity Vanila attenuation // Attenuation smoothly decreases to light range. half DistanceAttenuation(half distanceSqr, half2 distanceAttenuation) { // We use a shared distance attenuation for additional directional and puctual lights // for directional lights attenuation will be 1 half lightAtten = 1.0h / distanceSqr; #if defined(SHADER_HINT_NICE_QUALITY) // Use the smoothing factor also used in the Unity lightmapper. half factor = distanceSqr * distanceAttenuation.x; half smoothFactor = saturate(1.0h - factor * factor); smoothFactor = smoothFactor * smoothFactor; #else // We need to smoothly fade attenuation to light range. We start fading linearly at 80% of light range // Therefore: // fadeDistance = (0.8 * 0.8 * lightRangeSq) // smoothFactor = (lightRangeSqr - distanceSqr) / (lightRangeSqr - fadeDistance) // We can rewrite that to fit a MAD by doing // distanceSqr * (1.0 / (fadeDistanceSqr - lightRangeSqr)) + (-lightRangeSqr / (fadeDistanceSqr - lightRangeSqr) // distanceSqr * distanceAttenuation.y + distanceAttenuation.z half smoothFactor = saturate(distanceSqr * distanceAttenuation.x + distanceAttenuation.y); #endif return lightAtten * smoothFactor; } half SpotAttenuation(half3 spotDirection, half3 lightDirection, half2 spotAttenuation) { // Spot Attenuation with a linear falloff can be defined as // (SdotL - cosOuterAngle) / (cosInnerAngle - cosOuterAngle) // This can be rewritten as // invAngleRange = 1.0 / (cosInnerAngle - cosOuterAngle) // SdotL * invAngleRange + (-cosOuterAngle * invAngleRange) // SdotL * spotAttenuation.x + spotAttenuation.y // If we precompute the terms in a MAD instruction half SdotL = dot(spotDirection, lightDirection); half atten = saturate(SdotL * spotAttenuation.x + spotAttenuation.y); return atten * atten; } half4 GetLightDirectionAndAttenuation(LightInput lightInput, float3 positionWS) { half4 directionAndAttenuation; float3 posToLightVec = lightInput.position.xyz - positionWS * lightInput.position.w; float distanceSqr = max(dot(posToLightVec, posToLightVec), FLT_MIN); directionAndAttenuation.xyz = half3(posToLightVec * rsqrt(distanceSqr)); directionAndAttenuation.w = DistanceAttenuation(distanceSqr, lightInput.distanceAndSpotAttenuation.xy); directionAndAttenuation.w *= SpotAttenuation(lightInput.spotDirection.xyz, directionAndAttenuation.xyz, lightInput.distanceAndSpotAttenuation.zw); return directionAndAttenuation; } /////////////////////////////////////////////////////////////////////////////// // Light Abstraction // /////////////////////////////////////////////////////////////////////////////// Light GetMainLight() { Light light; light.index = 0; light.direction = _MainLightPosition.xyz; light.attenuation = 1.0; light.subtractiveModeAttenuation = _MainLightPosition.w; light.color = _MainLightColor.rgb; return light; } Light GetLight(half i, float3 positionWS) { LightInput lightInput; #if USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA int lightIndex = _LightIndexBuffer[unity_LightIndicesOffsetAndCount.x + i]; #else // The following code is more optimal than indexing unity_4LightIndices0. // Conditional moves are branch free even on mali-400 half i_rem = (i < 2.0h) ? i : i - 2.0h; half2 lightIndex2 = (i < 2.0h) ? unity_4LightIndices0.xy : unity_4LightIndices0.zw; int lightIndex = (i_rem < 1.0h) ? lightIndex2.x : lightIndex2.y; #endif // The following code will turn into a branching madhouse on platforms that don't support // dynamic indexing. Ideally we need to configure light data at a cluster of // objects granularity level. We will only be able to do that when scriptable culling kicks in. // TODO: Use StructuredBuffer on PC/Console and profile access speed on mobile that support it. float4 positionAndSubtractiveLightMode = _AdditionalLightPosition[lightIndex]; lightInput.position = float4(positionAndSubtractiveLightMode.xyz, 1.); lightInput.color = _AdditionalLightColor[lightIndex].rgb; lightInput.distanceAndSpotAttenuation = _AdditionalLightAttenuation[lightIndex]; lightInput.spotDirection = _AdditionalLightSpotDir[lightIndex]; half4 directionAndRealtimeAttenuation = GetLightDirectionAndAttenuation(lightInput, positionWS); Light light; light.index = lightIndex; light.direction = directionAndRealtimeAttenuation.xyz; light.attenuation = directionAndRealtimeAttenuation.w; light.subtractiveModeAttenuation = positionAndSubtractiveLightMode.w; light.color = lightInput.color; return light; } half GetPixelLightCount() { // TODO: we need to expose in SRP api an ability for the pipeline cap the amount of lights // in the culling. This way we could do the loop branch with an uniform // This would be helpful to support baking exceeding lights in SH as well return min(_AdditionalLightCount.x, unity_LightIndicesOffsetAndCount.y); } /////////////////////////////////////////////////////////////////////////////// // BRDF Functions // /////////////////////////////////////////////////////////////////////////////// #define kDieletricSpec half4(0.04, 0.04, 0.04, 1.0 - 0.04) // standard dielectric reflectivity coef at incident angle (= 4%) struct BRDFData { half3 diffuse; half3 specular; half perceptualRoughness; half roughness; half roughness2; half grazingTerm; // We save some light invariant BRDF terms so we don't have to recompute // them in the light loop. Take a look at DirectBRDF function for detailed explaination. half normalizationTerm; // roughness * 4.0 + 2.0 half roughness2MinusOne; // roughness² - 1.0 }; half ReflectivitySpecular(half3 specular) { #if defined(SHADER_API_GLES) return specular.r; // Red channel - because most metals are either monocrhome or with redish/yellowish tint #else return max(max(specular.r, specular.g), specular.b); #endif } half OneMinusReflectivityMetallic(half metallic) { // We'll need oneMinusReflectivity, so // 1-reflectivity = 1-lerp(dielectricSpec, 1, metallic) = lerp(1-dielectricSpec, 0, metallic) // store (1-dielectricSpec) in kDieletricSpec.a, then // 1-reflectivity = lerp(alpha, 0, metallic) = alpha + metallic*(0 - alpha) = // = alpha - metallic * alpha half oneMinusDielectricSpec = kDieletricSpec.a; return oneMinusDielectricSpec - metallic * oneMinusDielectricSpec; } inline void InitializeBRDFData(half3 albedo, half metallic, half3 specular, half smoothness, half alpha, out BRDFData outBRDFData) { #ifdef _SPECULAR_SETUP half reflectivity = ReflectivitySpecular(specular); half oneMinusReflectivity = 1.0 - reflectivity; outBRDFData.diffuse = albedo * (half3(1.0h, 1.0h, 1.0h) - specular); outBRDFData.specular = specular; #else half oneMinusReflectivity = OneMinusReflectivityMetallic(metallic); half reflectivity = 1.0 - oneMinusReflectivity; outBRDFData.diffuse = albedo * oneMinusReflectivity; outBRDFData.specular = lerp(kDieletricSpec.rgb, albedo, metallic); #endif outBRDFData.grazingTerm = saturate(smoothness + reflectivity); outBRDFData.perceptualRoughness = PerceptualSmoothnessToPerceptualRoughness(smoothness); outBRDFData.roughness = PerceptualRoughnessToRoughness(outBRDFData.perceptualRoughness); outBRDFData.roughness2 = outBRDFData.roughness * outBRDFData.roughness; outBRDFData.normalizationTerm = outBRDFData.roughness * 4.0h + 2.0h; outBRDFData.roughness2MinusOne = outBRDFData.roughness2 - 1.0h; #ifdef _ALPHAPREMULTIPLY_ON outBRDFData.diffuse *= alpha; alpha = alpha * oneMinusReflectivity + reflectivity; #endif } half3 EnvironmentBRDF(BRDFData brdfData, half3 indirectDiffuse, half3 indirectSpecular, half fresnelTerm) { half3 c = indirectDiffuse * brdfData.diffuse; float surfaceReduction = 1.0 / (brdfData.roughness2 + 1.0); c += surfaceReduction * indirectSpecular * lerp(brdfData.specular, brdfData.grazingTerm, fresnelTerm); return c; } // Based on Minimalist CookTorrance BRDF // Implementation is slightly different from original derivation: http://www.thetenthplanet.de/archives/255 // // * NDF [Modified] GGX // * Modified Kelemen and Szirmay-​Kalos for Visibility term // * Fresnel approximated with 1/LdotH half3 DirectBDRF(BRDFData brdfData, half3 normalWS, half3 lightDirectionWS, half3 viewDirectionWS) { #ifndef _SPECULARHIGHLIGHTS_OFF half3 halfDir = SafeNormalize(lightDirectionWS + viewDirectionWS); half NoH = saturate(dot(normalWS, halfDir)); half LoH = saturate(dot(lightDirectionWS, halfDir)); // GGX Distribution multiplied by combined approximation of Visibility and Fresnel // BRDFspec = (D * V * F) / 4.0 // D = roughness² / ( NoH² * (roughness² - 1) + 1 )² // V * F = 1.0 / ( LoH² * (roughness + 0.5) ) // See "Optimizing PBR for Mobile" from Siggraph 2015 moving mobile graphics course // https://community.arm.com/events/1155 // Final BRDFspec = roughness² / ( NoH² * (roughness² - 1) + 1 )² * (LoH² * (roughness + 0.5) * 4.0) // We further optimize a few light invariant terms // brdfData.normalizationTerm = (roughness + 0.5) * 4.0 rewritten as roughness * 4.0 + 2.0 to a fit a MAD. half d = NoH * NoH * brdfData.roughness2MinusOne + 1.00001h; half LoH2 = LoH * LoH; half specularTerm = brdfData.roughness2 / ((d * d) * max(0.1h, LoH2) * brdfData.normalizationTerm); // on mobiles (where half actually means something) denominator have risk of overflow // clamp below was added specifically to "fix" that, but dx compiler (we convert bytecode to metal/gles) // sees that specularTerm have only non-negative terms, so it skips max(0,..) in clamp (leaving only min(100,...)) #if defined (SHADER_API_MOBILE) specularTerm = specularTerm - HALF_MIN; specularTerm = clamp(specularTerm, 0.0, 100.0); // Prevent FP16 overflow on mobiles #endif half3 color = specularTerm * brdfData.specular + brdfData.diffuse; return color; #else return brdfData.diffuse; #endif } /////////////////////////////////////////////////////////////////////////////// // Global Illumination // /////////////////////////////////////////////////////////////////////////////// // Samples SH L0, L1 and L2 terms half3 SampleSH(half3 normalWS) { // LPPV is not supported in Ligthweight Pipeline real4 SHCoefficients[7]; SHCoefficients[0] = unity_SHAr; SHCoefficients[1] = unity_SHAg; SHCoefficients[2] = unity_SHAb; SHCoefficients[3] = unity_SHBr; SHCoefficients[4] = unity_SHBg; SHCoefficients[5] = unity_SHBb; SHCoefficients[6] = unity_SHC; return max(half3(0, 0, 0), SampleSH9(SHCoefficients, normalWS)); } // SH Vertex Evaluation. Depending on target SH sampling might be // done completely per vertex or mixed with L2 term per vertex and L0, L1 // per pixel. See SampleSHPixel half3 SampleSHVertex(half3 normalWS) { #if defined(EVALUATE_SH_VERTEX) return max(half3(0, 0, 0), SampleSH(normalWS)); #elif defined(EVALUATE_SH_MIXED) // no max since this is only L2 contribution return SHEvalLinearL2(normalWS, unity_SHBr, unity_SHBg, unity_SHBb, unity_SHC); #endif // Fully per-pixel. Nothing to compute. return half3(0.0, 0.0, 0.0); } // SH Pixel Evaluation. Depending on target SH sampling might be done // mixed or fully in pixel. See SampleSHVertex half3 SampleSHPixel(half3 L2Term, half3 normalWS) { #if defined(EVALUATE_SH_VERTEX) return L2Term; #elif defined(EVALUATE_SH_MIXED) half3 L0L1Term = SHEvalLinearL0L1(normalWS, unity_SHAr, unity_SHAg, unity_SHAb); return max(half3(0, 0, 0), L2Term + L0L1Term); #endif // Default: Evaluate SH fully per-pixel return SampleSH(normalWS); } // Sample baked lightmap. Non-Direction and Directional if available. // Realtime GI is not supported. half3 SampleLightmap(float2 lightmapUV, half3 normalWS) { #ifdef UNITY_LIGHTMAP_FULL_HDR bool encodedLightmap = false; #else bool encodedLightmap = true; #endif // The shader library sample lightmap functions transform the lightmap uv coords to apply bias and scale. // However, lightweight pipeline already transformed those coords in vertex. We pass half4(1, 1, 0, 0) and // the compiler will optimize the transform away. half4 transformCoords = half4(1, 1, 0, 0); #ifdef DIRLIGHTMAP_COMBINED return SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap), TEXTURE2D_PARAM(unity_LightmapInd, samplerunity_Lightmap), lightmapUV, transformCoords, normalWS, encodedLightmap, unity_Lightmap_HDR); #else return SampleSingleLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap), lightmapUV, transformCoords, encodedLightmap, unity_Lightmap_HDR); #endif } // We either sample GI from baked lightmap or from probes. // If lightmap: sampleData.xy = lightmapUV // If probe: sampleData.xyz = L2 SH terms #ifdef LIGHTMAP_ON #define SAMPLE_GI(lmName, shName, normalWSName) SampleLightmap(lmName, normalWSName) #else #define SAMPLE_GI(lmName, shName, normalWSName) SampleSHPixel(shName, normalWSName) #endif half3 GlossyEnvironmentReflection(half3 reflectVector, half perceptualRoughness, half occlusion) { #if !defined(_GLOSSYREFLECTIONS_OFF) half mip = PerceptualRoughnessToMipmapLevel(perceptualRoughness); half4 encodedIrradiance = SAMPLE_TEXTURECUBE_LOD(unity_SpecCube0, samplerunity_SpecCube0, reflectVector, mip); #if !defined(UNITY_USE_NATIVE_HDR) half3 irradiance = DecodeHDREnvironment(encodedIrradiance, unity_SpecCube0_HDR); #else half3 irradiance = encodedIrradiance.rbg; #endif return irradiance * occlusion; #endif // GLOSSY_REFLECTIONS return _GlossyEnvironmentColor.rgb * occlusion; } half3 SubtractDirectMainLightFromLightmap(Light mainLight, half3 normalWS, half3 bakedGI) { // Let's try to make realtime shadows work on a surface, which already contains // baked lighting and shadowing from the main sun light. // Summary: // 1) Calculate possible value in the shadow by subtracting estimated light contribution from the places occluded by realtime shadow: // a) preserves other baked lights and light bounces // b) eliminates shadows on the geometry facing away from the light // 2) Clamp against user defined ShadowColor. // 3) Pick original lightmap value, if it is the darkest one. // 1) Gives good estimate of illumination as if light would've been shadowed during the bake. // We only subtract the main direction light. This is accounted in the contribution term below. half shadowStrength = _ShadowData.x; half contributionTerm = saturate(dot(mainLight.direction, normalWS)); half3 lambert = mainLight.color * contributionTerm; half3 estimatedLightContributionMaskedByInverseOfShadow = lambert * (1.0 - mainLight.attenuation); half3 subtractedLightmap = bakedGI - estimatedLightContributionMaskedByInverseOfShadow; // 2) Allows user to define overall ambient of the scene and control situation when realtime shadow becomes too dark. half3 realtimeShadow = max(subtractedLightmap, _SubtractiveShadowColor.xyz); realtimeShadow = lerp(bakedGI, realtimeShadow, shadowStrength); // 3) Pick darkest color return min(bakedGI, realtimeShadow); } half3 GlobalIllumination(BRDFData brdfData, half3 bakedGI, half occlusion, half3 normalWS, half3 viewDirectionWS) { half3 reflectVector = reflect(-viewDirectionWS, normalWS); half fresnelTerm = Pow4(1.0 - saturate(dot(normalWS, viewDirectionWS))); half3 indirectDiffuse = bakedGI * occlusion; half3 indirectSpecular = GlossyEnvironmentReflection(reflectVector, brdfData.perceptualRoughness, occlusion); return EnvironmentBRDF(brdfData, indirectDiffuse, indirectSpecular, fresnelTerm); } void MixRealtimeAndBakedGI(inout Light light, half3 normalWS, inout half3 bakedGI, half4 shadowMask) { #if defined(_MIXED_LIGHTING_SUBTRACTIVE) && defined(LIGHTMAP_ON) bakedGI = SubtractDirectMainLightFromLightmap(light, normalWS, bakedGI); #endif #if defined(LIGHTMAP_ON) #if defined(_MIXED_LIGHTING_SHADOWMASK) // TODO: #elif defined(_MIXED_LIGHTING_SUBTRACTIVE) // Subtractive Light mode has direct light contribution baked into lightmap for mixed lights. // We need to remove direct realtime contribution from mixed lights // subtractiveModeBakedOcclusion is set 0.0 if this light occlusion was baked in the lightmap, 1.0 otherwise. light.attenuation *= light.subtractiveModeAttenuation; #endif #endif } /////////////////////////////////////////////////////////////////////////////// // Lighting Functions // /////////////////////////////////////////////////////////////////////////////// half3 LightingLambert(half3 lightColor, half3 lightDir, half3 normal) { half NdotL = saturate(dot(normal, lightDir)); return lightColor * NdotL; } half3 LightingSpecular(half3 lightColor, half3 lightDir, half3 normal, half3 viewDir, half4 specularGloss, half shininess) { half3 halfVec = SafeNormalize(lightDir + viewDir); half NdotH = saturate(dot(normal, halfVec)); half modifier = pow(NdotH, shininess) * specularGloss.a; half3 specularReflection = specularGloss.rgb * modifier; return lightColor * specularReflection; } half3 LightingPhysicallyBased(BRDFData brdfData, half3 lightColor, half3 lightDirectionWS, half lightAttenuation, half3 normalWS, half3 viewDirectionWS) { half NdotL = saturate(dot(normalWS, lightDirectionWS)); half3 radiance = lightColor * (lightAttenuation * NdotL); return DirectBDRF(brdfData, normalWS, lightDirectionWS, viewDirectionWS) * radiance; } half3 LightingPhysicallyBased(BRDFData brdfData, Light light, half3 normalWS, half3 viewDirectionWS) { return LightingPhysicallyBased(brdfData, light.color, light.direction, light.attenuation, normalWS, viewDirectionWS); } half3 VertexLighting(float3 positionWS, half3 normalWS) { half3 vertexLightColor = half3(0.0, 0.0, 0.0); #if defined(_VERTEX_LIGHTS) int vertexLightStart = _AdditionalLightCount.x; int vertexLightEnd = min(_AdditionalLightCount.y, unity_LightIndicesOffsetAndCount.y); for (int lightIter = vertexLightStart; lightIter < vertexLightEnd; ++lightIter) { Light light = GetLight(lightIter, positionWS); half3 lightColor = light.color * light.attenuation; vertexLightColor += LightingLambert(lightColor, light.direction, normalWS); } #endif return vertexLightColor; } /////////////////////////////////////////////////////////////////////////////// // Fragment Functions // // Used by ShaderGraph and others builtin renderers // /////////////////////////////////////////////////////////////////////////////// half4 LightweightFragmentPBR(InputData inputData, half3 albedo, half metallic, half3 specular, half smoothness, half occlusion, half3 emission, half alpha) { BRDFData brdfData; InitializeBRDFData(albedo, metallic, specular, smoothness, alpha, brdfData); Light mainLight = GetMainLight(); mainLight.attenuation = MainLightRealtimeShadowAttenuation(inputData.shadowCoord); MixRealtimeAndBakedGI(mainLight, inputData.normalWS, inputData.bakedGI, half4(0, 0, 0, 0)); half3 color = GlobalIllumination(brdfData, inputData.bakedGI, occlusion, inputData.normalWS, inputData.viewDirectionWS); color += LightingPhysicallyBased(brdfData, mainLight, inputData.normalWS, inputData.viewDirectionWS); #ifdef _ADDITIONAL_LIGHTS int pixelLightCount = GetPixelLightCount(); for (int i = 0; i < pixelLightCount; ++i) { Light light = GetLight(i, inputData.positionWS); light.attenuation *= LocalLightRealtimeShadowAttenuation(light.index, inputData.positionWS); color += LightingPhysicallyBased(brdfData, light, inputData.normalWS, inputData.viewDirectionWS); } #endif color += inputData.vertexLighting * brdfData.diffuse; color += emission; return half4(color, alpha); } half4 LightweightFragmentBlinnPhong(InputData inputData, half3 diffuse, half4 specularGloss, half shininess, half3 emission, half alpha) { Light mainLight = GetMainLight(); mainLight.attenuation = MainLightRealtimeShadowAttenuation(inputData.shadowCoord); MixRealtimeAndBakedGI(mainLight, inputData.normalWS, inputData.bakedGI, half4(0, 0, 0, 0)); half3 attenuatedLightColor = mainLight.color * mainLight.attenuation; half3 diffuseColor = inputData.bakedGI + LightingLambert(attenuatedLightColor, mainLight.direction, inputData.normalWS); half3 specularColor = LightingSpecular(attenuatedLightColor, mainLight.direction, inputData.normalWS, inputData.viewDirectionWS, specularGloss, shininess); #ifdef _ADDITIONAL_LIGHTS int pixelLightCount = GetPixelLightCount(); for (int i = 0; i < pixelLightCount; ++i) { Light light = GetLight(i, inputData.positionWS); light.attenuation *= LocalLightRealtimeShadowAttenuation(light.index, inputData.positionWS); half3 attenuatedLightColor = light.color * light.attenuation; diffuseColor += LightingLambert(attenuatedLightColor, light.direction, inputData.normalWS); specularColor += LightingSpecular(attenuatedLightColor, light.direction, inputData.normalWS, inputData.viewDirectionWS, specularGloss, shininess); } #endif half3 fullDiffuse = diffuseColor + inputData.vertexLighting; half3 finalColor = fullDiffuse * diffuse + emission; #if defined(_SPECGLOSSMAP) || defined(_SPECULAR_COLOR) finalColor += specularColor; #endif return half4(finalColor, alpha); } #endif