浏览代码

Merge pull request #492 from Unity-Technologies/Improve-reflection-probe

Improve reflection probe
/Yibing-Project-2
GitHub 7 年前
当前提交
e3daac5b
共有 6 个文件被更改,包括 64 次插入61 次删除
  1. 12
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs
  2. 18
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl
  3. 12
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
  4. 1
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl
  5. 12
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl
  6. 70
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl

12
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs


}
// Guideline for reflection volume: In HDRenderPipeline we separate the projection volume (the proxy of the scene) from the influence volume (what pixel on the screen is affected)
// However we add the constrain that the shape of the projection and influence volume is the same (i.e if we have a sphere shape projection volume, we have a shape influence).
// It allow to have more coherence for the dynamic if in shader code.
// Users can also chose to not have any projection, in this case we use the property minProjectionDistance to minimize code change. minProjectionDistance is set to huge number
// that simulate effect of no shape projection
[GenerateHLSL]
public struct EnvLightData
{

public float blendDistance; // blend transition outside the volume
public Vector3 right;
public int unused0;
// User can chose if they use This is use in case we want to force infinite projection distance (i.e no projection);
public float minProjectionDistance;
public float unused1;
public float unused0;
public float unused2;
public float unused1;
};
// Usage of StencilBits.Lighting on 2 bits.

18
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl


float3 up;
float blendDistance;
float3 right;
int unused0;
float minProjectionDistance;
float unused1;
float unused0;
float unused2;
float unused1;
};
//

{
return value.right;
}
int GetUnused0(EnvLightData value)
float GetMinProjectionDistance(EnvLightData value)
return value.unused0;
return value.minProjectionDistance;
float GetUnused1(EnvLightData value)
float GetUnused0(EnvLightData value)
return value.unused1;
return value.unused0;
float GetUnused2(EnvLightData value)
float GetUnused1(EnvLightData value)
return value.unused2;
return value.unused1;
}

12
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs


// CAUTION: localToWorld is the transform for the widget of the reflection probe. i.e the world position of the point use to do the cubemap capture (mean it include the local offset)
envLightData.positionWS = probe.localToWorld.GetColumn(3);
envLightData.envShapeType = EnvShapeType.None;
if (probe.boxProjection != 0)
if (probe.boxProjection == 0)
// If user request to have no projection, then setup a high number for minProjectionDistance
// this will mimic infinite shape projection
envLightData.minProjectionDistance = 65504.0f;
}
else
{
envLightData.envShapeType = EnvShapeType.Box;
envLightData.minProjectionDistance = 0.0f;
}
// remove scale from the matrix (Scale in this matrix is use to scale the widget)

1
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl


float directAmbientOcclusion; // Ambient occlusion use for direct lighting (directional, punctual, area)
// Not visible from Material (user should not use these properties in Material file)
int sampleShadow;
int sampleReflection;
ShadowContext shadowContext;
};

12
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl


// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
context.indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.unPositionSS).x;
context.directAmbientOcclusion = lerp(1.0, context.indirectAmbientOcclusion, _AmbientOcclusionDirectLightStrenght);
context.sampleShadow = 0;
context.sampleReflection = 0;
context.shadowContext = InitShadowContext();

{
#ifdef LIGHTLOOP_TILE_PASS
// TODO: Convert the for loop below to a while on each type as we know we are sorted!
// TODO: Convert the for loop below to a while on each type as we know we are sorted and compare performance.
uint punctualLightStart;
uint punctualLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_PUNCTUAL, punctualLightStart, punctualLightCount);

float3 localDiffuseLighting, localSpecularLighting;
int punctualIndex = FetchIndex(punctualLightStart, i);
EvaluateBSDF_Punctual( context, V, posInput, preLightData, _LightDatas[FetchIndex(punctualLightStart, i)], bsdfData,
EvaluateBSDF_Punctual( context, V, posInput, preLightData, _LightDatas[punctualIndex], bsdfData, _LightDatas[punctualIndex].lightType,
localDiffuseLighting, localSpecularLighting);
accLighting.punctualDiffuseLighting += localDiffuseLighting;

{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Punctual( context, V, posInput, preLightData, _LightDatas[i], bsdfData,
EvaluateBSDF_Punctual( context, V, posInput, preLightData, _LightDatas[i], bsdfData, _LightDatas[i].lightType,
localDiffuseLighting, localSpecularLighting);
accLighting.punctualDiffuseLighting += localDiffuseLighting;

#else
uint envLightIndex = i;
#endif
EvaluateBSDF_Env(context, V, posInput, preLightData, _EnvLightDatas[envLightIndex], bsdfData, localDiffuseLighting, localSpecularLighting, weight);
EvaluateBSDF_Env(context, V, posInput, preLightData, _EnvLightDatas[envLightIndex], bsdfData, _EnvLightDatas[envLightIndex].envShapeType, localDiffuseLighting, localSpecularLighting, weight);
applyWeigthedIblLighting(localDiffuseLighting, localSpecularLighting, weight, accLighting.envDiffuseLighting, accLighting.envSpecularLighting, totalIblWeight);
}
}

// The sky is a single cubemap texture separate from the reflection probe texture array (different resolution and compression)
context.sampleReflection = SINGLE_PASS_CONTEXT_SAMPLE_SKY;
EnvLightData envLightSky = InitSkyEnvLightData(0); // The sky data are generated on the fly so the compiler can optimize the code
EvaluateBSDF_Env(context, V, posInput, preLightData, envLightSky, bsdfData, localDiffuseLighting, localSpecularLighting, weight);
EvaluateBSDF_Env(context, V, posInput, preLightData, envLightSky, bsdfData, ENVSHAPETYPE_SKY, localDiffuseLighting, localSpecularLighting, weight);
applyWeigthedIblLighting(localDiffuseLighting, localSpecularLighting, weight, accLighting.envDiffuseLighting, accLighting.envSpecularLighting, totalIblWeight);
}
}

70
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


}
void EvaluateBSDF_Punctual( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData, LightData lightData, BSDFData bsdfData,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, int GPULightType,
int lightType = lightData.lightType;
int lightType = GPULightType;
// All punctual light type in the same formula, attenuation is neutral depends on light type.
// light.positionWS is the normalize light direction in case of directional light and invSqrAttenuationRadius is 0

#endif // LIT_DISPLAY_REFERENCE_AREA
}
void EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, int GPULightType,
out float3 diffuseLighting, out float3 specularLighting)
void EvaluateBSDF_Area( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, int GPULightType,
out float3 diffuseLighting, out float3 specularLighting)
{
if (GPULightType == GPULIGHTTYPE_LINE)
{

// _preIntegratedFGD and _CubemapLD are unique for each BRDF
void EvaluateBSDF_Env( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData,
float3 V, PositionInputs posInput,
PreLightData preLightData, EnvLightData lightData, BSDFData bsdfData, int envShapeType,
weight = float2(0.0, 1.0);
float3 positionWS = posInput.positionWS;
#ifdef LIT_DISPLAY_REFERENCE_IBL

*/
diffuseLighting = float3(0.0, 0.0, 0.0);
weight = float2(0.0, 1.0);
#else
#else
// also think about how such a loop can handle 2 cubemap at the same time as old unity. Macro can allow to do that
// but we need to have UNITY_SAMPLE_ENV_LOD replace by a true function instead that is define by the lighting arcitecture.
// Also not sure how to deal with 2 intersection....
// Box and sphere are related to light property (but we have also distance based roughness etc...)
// In this code we redefine a bit the behavior of the reflcetion proble. We separate the projection volume (the proxy of the scene) form the influence volume (what pixel on the screen is affected)
// Guideline for reflection volume: In HDRenderPipeline we separate the projection volume (the proxy of the scene) from the influence volume (what pixel on the screen is affected)
// However we add the constrain that the shape of the projection and influence volume is the same (i.e if we have a sphere shape projection volume, we have a shape influence).
// It allow to have more coherence for the dynamic if in shader code.
// Users can also chose to not have any projection, in this case we use the property minProjectionDistance to minimize code change. minProjectionDistance is set to huge number
// that simulate effect of no shape projection
// 1. First determine the projection volume
float3 R = preLightData.iblDirWS;
float3 coatR = preLightData.coatIblDirWS;
// This mean that location and oritention matter. So after intersection of proxy volume we need to convert back to world.
// This mean that location and orientation matter. So after intersection of proxy volume we need to convert back to world.
float3 R = preLightData.iblDirWS;
float3 coatR = preLightData.coatIblDirWS;
if (lightData.envShapeType == ENVSHAPETYPE_SPHERE)
// Note: using envShapeType instead of lightData.envShapeType allow to make compiler optimization in case the type is know (like for sky)
if (envShapeType == ENVSHAPETYPE_SPHERE)
// 1. First process the projection
dist = max(dist, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.positionWS
R = (positionWS + dist * R) - lightData.positionWS;
// Test again for clear code

dist = SphereRayIntersectSimple(positionLS, dirLS, sphereOuterDistance);
coatR = (positionWS + dist * coatR) - lightData.positionWS;
}
// 2. Process the influence
float distFade = max(length(positionLS) - lightData.innerDistance.x, 0.0);
weight.y = saturate(1.0 - distFade / max(lightData.blendDistance, 0.0001)); // avoid divide by zero
else if (lightData.envShapeType == ENVSHAPETYPE_BOX)
else if (envShapeType == ENVSHAPETYPE_BOX)
dist = max(dist, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
// No need to normalize for fetching cubemap
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.positionWS
R = (positionWS + dist * R) - lightData.positionWS;

dist = BoxRayIntersectSimple(positionLS, dirLS, -boxOuterDistance, boxOuterDistance);
coatR = (positionWS + dist * coatR) - lightData.positionWS;
}
}
// 2. Apply the influence volume (Box volume is used for culling whatever the influence shape)
// TODO: In the future we could have an influence volume inside the projection volume (so with a different transform, in this case we will need another transform)
weight.y = 1.0;
if (lightData.envShapeType == ENVSHAPETYPE_SPHERE)
{
float distFade = max(length(positionLS) - lightData.innerDistance.x, 0.0);
weight.y = saturate(1.0 - distFade / max(lightData.blendDistance, 0.0001)); // avoid divide by zero
}
else if (lightData.envShapeType == ENVSHAPETYPE_BOX ||
lightData.envShapeType == ENVSHAPETYPE_NONE)
{
// Influence volume
// Calculate falloff value, so reflections on the edges of the volume would gradually blend to previous reflection.
float distFade = DistancePointBox(positionLS, -lightData.innerDistance, lightData.innerDistance);
weight.y = saturate(1.0 - distFade / max(lightData.blendDistance, 0.0001)); // avoid divide by zero

weight.x = 0.0;
weight.y = Smoothstep01(weight.y);
float3 F = 1.0;

正在加载...
取消
保存