// Color pyramid (width, height, lodcount, Unused)
float4 _GaussianPyramidColorMipSize;
TEXTURE2D(_GaussianPyramidColorTexture);
SAMPLER2D(sampler_GaussianPyramidColorTexture);
SAMPLER2D(sampler_PyramidDepthTexture);
SamplerState ltc_trilinear_clamp_sampler;
// TODO: This one should be set into a constant Buffer at pass frequency (with _Screensize)
TEXTURE2D(_PreIntegratedFGD);
TEXTURE2D_ARRAY(_LtcData); // We pack the 3 Ltc data inside a texture array
// Encode normal on 20bit with oct compression + 2bit of sign
float2 octNormalWS = PackNormalOctEncode((surfaceData.materialId == MATERIALID_LIT_CLEAR_COAT) ? surfaceData.coatNormalWS : surfaceData.normalWS);
// To have more precision encode the sign of xy in a separate uint
uint octNormalSign = (octNormalWS.x > 0.0 ? 1 : 0) + (octNormalWS.y > 0.0 ? 2 : 0);
uint octNormalSign = (octNormalWS.x < 0.0 ? 1 : 0) | (octNormalWS.y < 0.0 ? 2 : 0);
// Store octNormalSign on two bits with perceptualRoughness
outGBuffer1 = float4(abs(octNormalWS), PackFloatInt10bit(PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothness), octNormalSign, 4.0), PackMaterialId(surfaceData.materialId));
{
// Encode tangent on 16bit with oct compression
float2 octTangentWS = PackNormalOctEncode(surfaceData.tangentWS);
outGBuffer2 = float4(octTangentWS * 0.5 + 0.5, surfaceData.anisotropy, PackFloatInt8bit(surfaceData.metallic, 0.0, 4.0));
// To have more precision encode the sign of xy in a separate uint
uint octTangentSign = (octTangentWS.x < 0.0 ? 1 : 0) | (octTangentWS.y < 0.0 ? 2 : 0);
outGBuffer2 = float4(abs(octTangentWS), surfaceData.anisotropy, PackFloatInt8bit(surfaceData.metallic, octTangentSign, 4.0));
}
else if (surfaceData.materialId == MATERIALID_LIT_CLEAR_COAT)
{
int octNormalSign;
UnpackFloatInt10bit(inGBuffer1.b, 4.0, bsdfData.perceptualRoughness, octNormalSign);
inGBuffer1.r *= (octNormalSign & 1) ? 1.0 : -1.0;
inGBuffer1.g *= (octNormalSign & 2) ? 1.0 : -1.0;
inGBuffer1.r = (octNormalSign & 1) ? -inGBuffer1.r : inGBuffer1.r;
inGBuffer1.g = (octNormalSign & 2) ? -inGBuffer1.g : inGBuffer1.g;
bsdfData.normalWS = UnpackNormalOctEncode(float2(inGBuffer1.r, inGBuffer1.g));
bsdfData.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
else if (bsdfData.materialId == MATERIALID_LIT_ANISO && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
{
float metallic;
int unused;
UnpackFloatInt8bit(inGBuffer2.a, 4.0, metallic, unused);
int octTangentSign;
UnpackFloatInt8bit(inGBuffer2.a, 4.0, metallic, octTangentSign);
float3 tangentWS = UnpackNormalOctEncode(float2(inGBuffer2.rg * 2.0 - 1.0));
inGBuffer2.r = (octTangentSign & 1) ? -inGBuffer2.r : inGBuffer2.r;
inGBuffer2.g = (octTangentSign & 2) ? -inGBuffer2.g : inGBuffer2.g;
float3 tangentWS = UnpackNormalOctEncode(inGBuffer2.rg);
float anisotropy = inGBuffer2.b;
FillMaterialIdAnisoData(bsdfData.roughness, bsdfData.normalWS, tangentWS, anisotropy, bsdfData);
}
float3 refractedBackPointWS = float3(0.0, 0.0, 0.0);
float opticalDepth = 0.0;
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
// For all refraction approximation, to calculate the refracted point in world space,
// we approximate the scene as a plane (back plane) with normal -V at the depth hit point.
// (We avoid to raymarch the depth texture to get the refracted point.)
float3 R = refract(-V, bsdfData.normalWS, 1.0 / bsdfData.ior);
// Get the depth of the approximated back plane
float pyramidDepth = SAMPLE_TEXTURE2D_LOD(_PyramidDepthTexture, sampler_PyramidDepthTexture, posInput.positionSS, 2.0 ).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInput.positionSS * (depthSize >> 2), 2 ).r;
float depth = LinearEyeDepth(pyramidDepth, _ZBufferParams);
// Distance from point to the back plane
// So the light is refracted twice: in and out of the tangent sphere
// Get the depth of the approximated back plane
float pyramidDepth = SAMPLE_TEXTURE2D_LOD(_PyramidDepthTexture, sampler_PyramidDepthTexture, posInput.positionSS, 2.0 ).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInput.positionSS * (depthSize >> 2), 2 ).r;
float depth = LinearEyeDepth(pyramidDepth, _ZBufferParams);
// Distance from point to the back plane
float3 R = refract(-V, bsdfData.normalWS, 1.0 / bsdfData.ior);
// Get the depth of the approximated back plane
float pyramidDepth = SAMPLE_TEXTURE2D_LOD(_PyramidDepthTexture, sampler_PyramidDepthTexture, posInput.positionSS, 2.0 ).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInput.positionSS * (depthSize >> 2), 2 ).r;
float depth = LinearEyeDepth(pyramidDepth, _ZBufferParams);
// Distance from point to the back plane
// Calculate screen space coordinates of refracted point in back plane
float4 refractedBackPointCS = mul(_ViewProjMatrix, float4(refractedBackPointWS, 1.0));
float2 refractedBackPointSS = ComputeScreenSpacePosition(refractedBackPointCS);
float refractedBackPointDepth = LinearEyeDepth(SAMPLE_TEXTURE2D_LOD(_PyramidDepthTexture, sampler_PyramidDepthTexture, refractedBackPointSS, 0. 0).r, _ZBufferParams);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointSS * depthSize, 0).r, _ZBufferParams);
// Exit if texel is out of color buffer
// Or if the texel is from an object in front of the object
{
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, sampler_GaussianPyramidColorTexture , posInput.positionSS, 0.0).rgb;
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, ltc_trilinear_clamp_sampler , posInput.positionSS, 0.0).rgb;
float mipLevel = PerceptualRoughnessToMipmapLevel(bsdfData.perceptualRoughness);
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, sampler_GaussianPyramidColorTexture, refractedBackPointSS.xy , mipLevel).rgb;
float mipLevel = PerceptualRoughnessToMipmapLevel(bsdfData.perceptualRoughness, uint(_GaussianPyramidColorMipSize.z) );
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, ltc_trilinear_clamp_sampler, refractedBackPointSS , mipLevel).rgb;
// Beer-Lamber law for absorption
float3 transmittance = exp(-bsdfData.absorptionCoefficient * opticalDepth);
// Use perfect flat transparency when we cannot fetch the correct pixel color for the refracted point
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, sampler_GaussianPyramidColorTexture , posInput.positionSS, 0.0).rgb;
diffuseLighting = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, ltc_trilinear_clamp_sampler , posInput.positionSS, 0.0).rgb;
#endif
}