浏览代码

[PlanarReflection] Use Object space vp matrices to sample in planar reflection texture

/main
Frédéric Vauchelles 7 年前
当前提交
4790fa79
共有 3 个文件被更改,包括 4 次插入20 次删除
  1. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  2. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  3. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _CookieCubeTextures = Shader.PropertyToID("_CookieCubeTextures");
public static readonly int _EnvCubemapTextures = Shader.PropertyToID("_EnvCubemapTextures");
public static readonly int _Env2DTextures = Shader.PropertyToID("_Env2DTextures");
public static readonly int _Env2DCapturePositionWS = Shader.PropertyToID("_Env2DCapturePositionWS");
public static readonly int _Env2DCaptureVP = Shader.PropertyToID("_Env2DCaptureVP");
public static readonly int _DirectionalLightDatas = Shader.PropertyToID("_DirectionalLightDatas");
public static readonly int _DirectionalLightCount = Shader.PropertyToID("_DirectionalLightCount");

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


ReflectionProbeCache m_ReflectionProbeCache;
TextureCache2D m_CookieTexArray;
TextureCacheCubemap m_CubeCookieTexArray;
List<Vector4> m_Env2DCapturePositionWS = new List<Vector4>();
List<Matrix4x4> m_Env2DCaptureVP = new List<Matrix4x4>();
public class LightList

var gpuProj = GL.GetGPUProjectionMatrix(projection, true); // Had to change this from 'false'
var gpuView = worldToCamera;
var vp = gpuProj * gpuView;
m_Env2DCapturePositionWS.Add(capturePosition);
// We transform it to object space by translating the capturePosition
var vp = gpuProj * gpuView * Matrix4x4.Translate(capturePosition);
m_Env2DCaptureVP.Add(vp);
break;
}

m_enableBakeShadowMask = false;
m_Env2DCaptureVP.Clear();
m_Env2DCapturePositionWS.Clear();
m_lightList.Clear();
Vector3 camPosWS = camera.transform.position;

}
}
}
// We make the light position camera-relative as late as possible in order
// to allow the preceding code to work with the absolute world space coordinates.
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
for (var i = 0; i < m_Env2DCaptureVP.Count; i++)
m_Env2DCaptureVP[i] = m_Env2DCaptureVP[i] * Matrix4x4.Translate(camPosWS);
for (var i = 0; i < m_Env2DCapturePositionWS.Count; i++)
m_Env2DCapturePositionWS[i] = (Vector3)m_Env2DCapturePositionWS[i] - camPosWS;
}
}
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count;

cmd.SetGlobalTexture(HDShaderIDs._EnvCubemapTextures, m_ReflectionProbeCache.GetTexCache());
cmd.SetGlobalTexture(HDShaderIDs._Env2DTextures, m_ReflectionPlanarProbeCache.GetTexCache());
if (m_Env2DCaptureVP.Count > 0)
{
cmd.SetGlobalVectorArray(HDShaderIDs._Env2DCapturePositionWS, m_Env2DCapturePositionWS);
}
cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, s_DirectionalLightDatas);
cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, m_lightList.directionalLights.Count);

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


// Use texture array for reflection (or LatLong 2D array for mobile)
TEXTURECUBE_ARRAY_ABSTRACT(_EnvCubemapTextures);
TEXTURE2D_ARRAY(_Env2DTextures);
float3 _Env2DCapturePositionWS[MAX_ENV2D_LIGHT];
float4x4 _Env2DCaptureVP[MAX_ENV2D_LIGHT];
TEXTURE2D(_DeferredShadowTexture);

{
if (cacheType == ENVCACHETYPE_TEXTURE2D)
{
float2 ndc = ComputeNormalizedDeviceCoordinates(_Env2DCapturePositionWS[index] + texCoord, _Env2DCaptureVP[index]);
//_Env2DCaptureVP is in capture space
float2 ndc = ComputeNormalizedDeviceCoordinates(texCoord, _Env2DCaptureVP[index]);
float4 color = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc, index, 0);
// Discard pixels out of oblique projection
// We only check RGB because the texture may have BC6H compression

正在加载...
取消
保存