return lighting;
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BakeLightingData bakeLightingData)
{
if (lightData.lightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
IndirectLighting EvaluateBSDF_SSReflection(LightLoopContext lightLoopContext,
IndirectLighting EvaluateBSDF_SSLighting(LightLoopContext lightLoopContext,
int GPUImageBasedLightingType,
// TODO
return lighting;
}
IndirectLighting EvaluateBSDF_SSRefraction(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, BSDFData bsdfData,
inout float hierarchyWeight)
{
IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
switch (GPUImageBasedLightingType)
{
case GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION:
{
// Refraction process:
// 1. Depending on the shape model, we calculate the refracted point in world space and the optical depth
// 2. We calculate the screen space position of the refracted point
// 3. If this point is available (ie: in color buffer and point is not in front of the object)
// a. Get the corresponding color depending on the roughness from the gaussian pyramid of the color buffer
// b. Multiply by the transmittance for absorption (depends on the optical depth)
// Refraction process:
// 1. Depending on the shape model, we calculate the refracted point in world space and the optical depth
// 2. We calculate the screen space position of the refracted point
// 3. If this point is available (ie: in color buffer and point is not in front of the object)
// a. Get the corresponding color depending on the roughness from the gaussian pyramid of the color buffer
// b. Multiply by the transmittance for absorption (depends on the optical depth)
float3 refractedBackPointWS = EstimateRaycast(V, posInput, preLightData.transparentPositionWS, preLightData.transparentRefractV);
float3 refractedBackPointWS = EstimateRaycast(V, posInput, preLightData.transparentPositionWS, preLightData.transparentRefractV);
// Calculate screen space coordinates of refracted point in back plane
float2 refractedBackPointNDC = ComputeNormalizedDeviceCoordinates(refractedBackPointWS, UNITY_MATRIX_VP);
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointNDC * depthSize, 0).r, _ZBufferParams);
// Calculate screen space coordinates of refracted point in back plane
float2 refractedBackPointNDC = ComputeNormalizedDeviceCoordinates(refractedBackPointWS, UNITY_MATRIX_VP);
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointNDC * depthSize, 0).r, _ZBufferParams);
// Exit if texel is out of color buffer
// Or if the texel is from an object in front of the object
if (refractedBackPointDepth < posInput.linearDepth
|| any(refractedBackPointNDC < 0.0)
|| any(refractedBackPointNDC > 1.0))
{
// Do nothing and don't update the hierarchy weight so we can fall back on refraction probe
return lighting;
}
// Exit if texel is out of color buffer
// Or if the texel is from an object in front of the object
if (refractedBackPointDepth < posInput.linearDepth
|| any(refractedBackPointNDC < 0.0)
|| any(refractedBackPointNDC > 1.0))
{
// Do nothing and don't update the hierarchy weight so we can fall back on refraction probe
return lighting;
}
// Map the roughness to the correct mip map level of the color pyramid
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _GaussianPyramidColorMipSize.xy, preLightData.transparentSSMipLevel).rgb;
// Map the roughness to the correct mip map level of the color pyramid
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _GaussianPyramidColorMipSize.xy, preLightData.transparentSSMipLevel).rgb;
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transparentTransmittance;
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transparentTransmittance;
float weight = 1.0;
UpdateLightingHierarchyWeights(hierarchyWeight, weight); // Shouldn't be needed, but safer in case we decide to change hierarchy priority
// We use specularFGD as an approximation of the fresnel effect (that also handle smoothness), so take the remaining for transmission
lighting.specularTransmitted *= (1.0 - preLightData.specularFGD) * weight;
float weight = 1.0;
UpdateLightingHierarchyWeights(hierarchyWeight, weight); // Shouldn't be needed, but safer in case we decide to change hierarchy priority
// We use specularFGD as an approximation of the fresnel effect (that also handle smoothness), so take the remaining for transmission
lighting.specularTransmitted *= (1.0 - preLightData.specularFGD) * weight;
// No refraction, no need to go further
hierarchyWeight = 1.0;
// No refraction, no need to go further
hierarchyWeight = 1.0;
break;
}
case GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION:
{
break;
}
}
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BakeLightingData bakeLightingData)
{
if (lightData.lightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------