#include "UnityStandardCore.cginc"
#include "ShaderBase.h"
#include "../../fptl/LightDefinitions.cs.hlsl"
#define MAX_SHADOW_LIGHTS 10
#define MAX_SHADOWMAP_PER_LIGHT 6
half4 ambientOrLightmapUV : TEXCOORD1; // SH or Lightmap UV
half4 tangentToWorldAndParallax[3] : TEXCOORD2; // [3x3:tangentToWorld | 1x3:empty]
float4 posWorld : TEXCOORD8;
float4 posView : TEXCOORD9;
LIGHTING_COORDS(5,6)
UNITY_FOG_COORDS(7)
float4 posWorld = mul(unity_ObjectToWorld, v.vertex);
o.posWorld = posWorld;
o.posView = mul(unity_WorldToCamera, posWorld);
o.pos = UnityObjectToClipPos(v.vertex);
o.tex = TexCoords(v);
static FragmentCommonData gdata;
static float occlusion;
struct LightInput
// reflections
UNITY_DECLARE_ABSTRACT_CUBE_ARRAY(_reflCubeTextures);
UNITY_DECLARE_TEXCUBE(_reflRootCubeTexture);
uniform float _reflRootHdrDecodeMult;
uniform float _reflRootHdrDecodeExp;
StructuredBuffer<SFiniteLightData> g_vProbeData;
// ---- Utilities ---- //
void GetCountAndStart(out uint start, out uint nrLights, uint model)
float4 lightData;
half4 pos;
half4 color;
half4 lightDir;
float4x4 lightMat;
float4x4 worldToLightMat;
};
start = model==REFLECTION_LIGHT ? g_numLights : 0; // offset by numLights entries
nrLights = model==REFLECTION_LIGHT ? g_numReflectionProbes : g_numLights;
}
float GetLinearZFromSVPosW(float posW)
// ---- Reflections ---- //
half3 Unity_GlossyEnvironment (UNITY_ARGS_ABSTRACT_CUBE_ARRAY(tex), int sliceIndex, half4 hdr, Unity_GlossyEnvironmentData glossIn);
half3 distanceFromAABB(half3 p, half3 aabbMin, half3 aabbMax)
{
return max(max(p - aabbMax, aabbMin - p), half3(0.0, 0.0, 0.0));
}
float3 EvalIndirectSpecular(UnityLight light, UnityIndirect ind)
{
return occlusion * UNITY_BRDF_PBS(gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, light, ind);
}
float3 RenderReflectionList(uint start, uint numReflProbes, float3 vP, float3 vNw, float3 Vworld, float smoothness)
#if USE_LEFTHAND_CAMERASPACE
float linZ = posW;
float3 worldNormalRefl = reflect(-Vworld, vNw);
float3 vspaceRefl = mul((float3x3) g_mWorldToView, worldNormalRefl).xyz;
float percRoughness = SmoothnessToPerceptualRoughness(smoothness);
UnityLight light;
light.color = 0;
light.dir = 0;
float3 ints = 0;
// root ibl begin
{
Unity_GlossyEnvironmentData g;
g.roughness = percRoughness;
g.reflUVW = worldNormalRefl;
half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_TEXCUBE(_reflRootCubeTexture), float4(_reflRootHdrDecodeMult, _reflRootHdrDecodeExp, 0.0, 0.0), g);
//half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_TEXCUBEARRAY(_reflCubeTextures), _reflRootSliceIndex, float4(_reflRootHdrDecodeMult, _reflRootHdrDecodeExp, 0.0, 0.0), g);
UnityIndirect ind;
ind.diffuse = 0;
ind.specular = env0;// * data.occlusion;
ints = EvalIndirectSpecular(light, ind);
}
// root ibl end
for (int uIndex=0; uIndex<gLightData.y; uIndex++)
{
SFiniteLightData lgtDat = g_vProbeData[uIndex];
float3 vLp = lgtDat.lightPos.xyz;
float3 vecToSurfPos = vP - vLp; // vector from reflection volume to surface position in camera space
float3 posInReflVolumeSpace = float3( dot(vecToSurfPos, lgtDat.lightAxisX), dot(vecToSurfPos, lgtDat.lightAxisY), dot(vecToSurfPos, lgtDat.lightAxisZ) );
float blendDistance = lgtDat.probeBlendDistance;//unity_SpecCube1_ProbePosition.w; // will be set to blend distance for this probe
float3 sampleDir;
if((lgtDat.flags&IS_BOX_PROJECTED)!=0)
{
// For box projection, use expanded bounds as they are rendered; otherwise
// box projection artifacts when outside of the box.
//float4 boxMin = unity_SpecCube0_BoxMin - float4(blendDistance,blendDistance,blendDistance,0);
//float4 boxMax = unity_SpecCube0_BoxMax + float4(blendDistance,blendDistance,blendDistance,0);
//sampleDir = BoxProjectedCubemapDirection (worldNormalRefl, worldPos, unity_SpecCube0_ProbePosition, boxMin, boxMax);
float4 boxOuterDistance = float4( lgtDat.boxInnerDist + float3(blendDistance, blendDistance, blendDistance), 0.0 );
#if 0
// if rotation is NOT supported
sampleDir = BoxProjectedCubemapDirection(worldNormalRefl, posInReflVolumeSpace, float4(lgtDat.localCubeCapturePoint, 1.0), -boxOuterDistance, boxOuterDistance);
float linZ = -posW;
float3 volumeSpaceRefl = float3( dot(vspaceRefl, lgtDat.lightAxisX), dot(vspaceRefl, lgtDat.lightAxisY), dot(vspaceRefl, lgtDat.lightAxisZ) );
float3 vPR = BoxProjectedCubemapDirection(volumeSpaceRefl, posInReflVolumeSpace, float4(lgtDat.localCubeCapturePoint, 1.0), -boxOuterDistance, boxOuterDistance); // Volume space corrected reflection vector
sampleDir = mul( (float3x3) g_mViewToWorld, vPR.x*lgtDat.lightAxisX + vPR.y*lgtDat.lightAxisY + vPR.z*lgtDat.lightAxisZ );
}
else
sampleDir = worldNormalRefl;
return linZ;
Unity_GlossyEnvironmentData g;
g.roughness = percRoughness;
g.reflUVW = sampleDir;
half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_ABSTRACT_CUBE_ARRAY(_reflCubeTextures), lgtDat.sliceIndex, float4(lgtDat.lightIntensity, lgtDat.decodeExp, 0.0, 0.0), g);
UnityIndirect ind;
ind.diffuse = 0;
ind.specular = env0;// * data.occlusion;
//half3 rgb = UNITY_BRDF_PBS(0, data.specularColor, oneMinusReflectivity, data.smoothness, data.normalWorld, vWSpaceVDir, light, ind).rgb;
half3 rgb = EvalIndirectSpecular(light, ind);
// Calculate falloff value, so reflections on the edges of the Volume would gradually blend to previous reflection.
// Also this ensures that pixels not located in the reflection Volume AABB won't
// accidentally pick up reflections from this Volume.
//half3 distance = distanceFromAABB(worldPos, unity_SpecCube0_BoxMin.xyz, unity_SpecCube0_BoxMax.xyz);
half3 distance = distanceFromAABB(posInReflVolumeSpace, -lgtDat.boxInnerDist, lgtDat.boxInnerDist);
half falloff = saturate(1.0 - length(distance)/blendDistance);
ints = lerp(ints, rgb, falloff);
}
return ints;
float3 GetViewPosFromLinDepth(float2 v2ScrPos, float fLinDepth)
half3 Unity_GlossyEnvironment (UNITY_ARGS_ABSTRACT_CUBE_ARRAY(tex), int sliceIndex, half4 hdr, Unity_GlossyEnvironmentData glossIn)
float fSx = g_mScrProjection[0].x;
//float fCx = g_mScrProjection[2].x;
float fCx = g_mScrProjection[0].z;
float fSy = g_mScrProjection[1].y;
//float fCy = g_mScrProjection[2].y;
float fCy = g_mScrProjection[1].z;
#if UNITY_GLOSS_MATCHES_MARMOSET_TOOLBAG2 && (SHADER_TARGET >= 30)
// TODO: remove pow, store cubemap mips differently
half perceptualRoughness = pow(glossIn.roughness, 3.0/4.0);
#else
half perceptualRoughness = glossIn.roughness; // MM: switched to this
#endif
//perceptualRoughness = sqrt(sqrt(2/(64.0+2))); // spec power to the square root of real roughness
#if 0
float m = perceptualRoughness*perceptualRoughness; // m is the real roughness parameter
const float fEps = 1.192092896e-07F; // smallest such that 1.0+FLT_EPSILON != 1.0 (+1e-4h is NOT good here. is visibly very wrong)
float n = (2.0/max(fEps, m*m))-2.0; // remap to spec power. See eq. 21 in --> https://dl.dropboxusercontent.com/u/55891920/papers/mm_brdf.pdf
#if USE_LEFTHAND_CAMERASPACE
return fLinDepth*float3( ((v2ScrPos.x-fCx)/fSx), ((v2ScrPos.y-fCy)/fSy), 1.0 );
n /= 4; // remap from n_dot_h formulatino to n_dot_r. See section "Pre-convolved Cube Maps vs Path Tracers" --> https://s3.amazonaws.com/docs.knaldtech.com/knald/1.0.0/lys_power_drops.html
perceptualRoughness = pow( 2/(n+2), 0.25); // remap back to square root of real roughness
return fLinDepth*float3( -((v2ScrPos.x+fCx)/fSx), -((v2ScrPos.y+fCy)/fSy), 1.0 );
// MM: came up with a surprisingly close approximation to what the #if 0'ed out code above does.
perceptualRoughness = perceptualRoughness*(1.7 - 0.7*perceptualRoughness);
half mip = perceptualRoughness * UNITY_SPECCUBE_LOD_STEPS;
half4 rgbm = UNITY_SAMPLE_ABSTRACT_CUBE_ARRAY_LOD(tex, float4(glossIn.reflUVW.xyz, sliceIndex), mip);
return DecodeHDR(rgbm, hdr);
#define INITIALIZE_LIGHT(light, lightIndex) \
light.lightData = gPerLightData[lightIndex]; \
light.pos = gLightPos[lightIndex]; \
light.color = gLightColor[lightIndex]; \
light.lightDir = gLightDirection[lightIndex]; \
light.lightMat = gLightMatrix[lightIndex]; \
light.worldToLightMat = gWorldToLightMatrix[lightIndex];
float3 ExecuteReflectionList(out uint numReflectionProbesProcessed, uint2 pixCoord, float3 vP, float3 vNw, float3 Vworld, float smoothness)
{
uint start = 0, numReflectionProbes = 0;
GetCountAndStart(start, numReflectionProbes, REFLECTION_LIGHT);
numReflectionProbesProcessed = numReflectionProbes; // mainly for debugging/heat maps
return RenderReflectionList(start, numReflectionProbes, vP, vNw, Vworld, smoothness);
}
// ---- Lights ---- //
}
float3 EvalIndirectSpecular(UnityLight light, UnityIndirect ind)
{
return occlusion * UNITY_BRDF_PBS(gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, light, ind);
}
float3 RenderLightList(uint start, uint numLights, float3 vPw, float3 Vworld)
return ints;
}
void GetCountAndStart(out uint start, out uint nrLights, uint model)
{
start = model==REFLECTION_LIGHT ? g_numLights : 0; // offset by numLights entries
nrLights = model==REFLECTION_LIGHT ? g_numReflectionProbes : g_numLights;
}
float3 ExecuteLightList(out uint numLightsProcessed, uint2 pixCoord, float3 vPw, float3 Vworld)
{
uint start = 0, numLights = 0;
return RenderLightList(start, numLights, vPw, Vworld);
}
// fragment shader main
half4 fragForward(VertexOutputForwardNew i) : SV_Target
{
//float linZ = GetLinearZFromSVPosW(i.pos.w); // matching script side where camera space is right handed.
float3 vP = i.posView;
float3 vPw = i.posWorld;
float3 Vworld = normalize(_WorldSpaceCameraPos.xyz - vPw);
res += ExecuteLightList(numLightsProcessed, pixCoord, vPw, Vworld);
// specular GI
// res += ExecuteReflectionList(numReflectionsProcessed, pixCoord, vP, gdata.normalWorld, Vworld, gdata.smoothness);
res += ExecuteReflectionList(numReflectionsProcessed, pixCoord, vP, gdata.normalWorld, Vworld, gdata.smoothness);
// diffuse GI
res += UNITY_BRDF_PBS (gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, gi.light, gi.indirect).xyz;