浏览代码

Merge pull request #978 from Unity-Technologies/hdrp-xr-lighting

Stereo-ized HDRP Lighting Update (WIP)
/main
GitHub 7 年前
当前提交
771cccee
共有 9 个文件被更改,包括 273 次插入62 次删除
  1. 87
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  2. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  3. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  4. 199
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  5. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute
  6. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild.compute
  7. 24
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/scrbound.compute
  8. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  9. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesMatrixDefsHDCamera.hlsl

87
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Matrix4x4[] viewMatrixStereo;
public Matrix4x4[] projMatrixStereo;
public Vector4 centerEyeTranslationOffset;
// Non oblique projection matrix (RHS)
public Matrix4x4 nonObliqueProjMatrix

var gpuView = camera.worldToCameraMatrix;
var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);
// In stereo, this corresponds to the center eye position
var relPos = pos; // World-origin-relative
relPos = Vector3.zero; // Camera-relative
}
var gpuVP = gpuNonJitteredProj * gpuView;

screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
}
// Stopgap method used to extract stereo combined matrix state.
public void UpdateStereoDependentState(FrameSettings frameSettings, ref ScriptableCullingParameters cullingParams)
{
if (!frameSettings.enableStereo)
return;
// What constants in UnityPerPass need updating for stereo considerations?
// _ViewProjMatrix - It is used directly for generating tesselation factors. This should be the same
// across both eyes for consistency, and to keep shadow-generation eye-independent
// _ViewParam - Used for isFrontFace determination, should be the same for both eyes. There is the scenario
// where there might be multi-eye sets that are divergent enough where this assumption is not valid,
// but that's a future problem
// _InvProjParam - Intention was for generating linear depths, but not currently used. Will need to be stereo-ized if
// actually needed.
// _FrustumPlanes - Also used for generating tesselation factors. Should be fine to use the combined stereo VP
// to calculate frustum planes.
// TODO: Would it be worth calculating my own combined view/proj matrix in Update?
// In engine, we modify the view and proj matrices accordingly in order to generate the single cull
// * Get the center eye view matrix, and pull it back to cover both eyes
// * Generated an expanded projection matrix (one method - max bound of left/right proj matrices)
// and move near/far planes to match near/far locations of proj matrices located at eyes.
// I think using the cull matrices is valid, as long as I only use them for tess factors in shader.
// Using them for other calculations (like light list generation) could be problematic.
var stereoCombinedViewMatrix = cullingParams.cullStereoView;
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// This is pulled back from the center eye, so set the offset
var translation = stereoCombinedViewMatrix.GetColumn(3);
translation += centerEyeTranslationOffset;
stereoCombinedViewMatrix.SetColumn(3, translation);
}
viewMatrix = stereoCombinedViewMatrix;
var stereoCombinedProjMatrix = cullingParams.cullStereoProj;
projMatrix = GL.GetGPUProjectionMatrix(stereoCombinedProjMatrix, true);
viewParam = new Vector4(viewMatrix.determinant, 0.0f, 0.0f, 0.0f);
frustum = Frustum.Create(viewProjMatrix, true, true);
// Left, right, top, bottom, near, far.
for (int i = 0; i < 6; i++)
{
frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
}
}
void ConfigureStereoMatrices()
{
for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)

projMatrixStereo[eyeIndex] = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
projMatrixStereo[eyeIndex] = GL.GetGPUProjectionMatrix(projMatrixStereo[eyeIndex], true);
}
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
var leftTranslation = viewMatrixStereo[0].GetColumn(3);
var rightTranslation = viewMatrixStereo[1].GetColumn(3);
var centerTranslation = (leftTranslation + rightTranslation) / 2;
var centerOffset = -centerTranslation;
centerOffset.w = 0;
// TODO: Grabbing the CenterEye transform would be preferable, but XRNode.CenterEye
// doesn't always seem to be valid.
for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
{
var translation = viewMatrixStereo[eyeIndex].GetColumn(3);
translation += centerOffset;
viewMatrixStereo[eyeIndex].SetColumn(3, translation);
}
centerEyeTranslationOffset = centerOffset;
}
// TODO: Fetch the single cull matrix stuff

public void SetupGlobalStereoParams(CommandBuffer cmd)
{
var viewProjStereo = new Matrix4x4[2];
var invViewStereo = new Matrix4x4[2];
var invProjStereo = new Matrix4x4[2];
var invViewProjStereo = new Matrix4x4[2];

invProjStereo[eyeIndex] = proj.inverse;
var vp = proj * viewMatrixStereo[eyeIndex];
invViewProjStereo[eyeIndex] = vp.inverse;
var view = viewMatrixStereo[eyeIndex];
invViewStereo[eyeIndex] = view.inverse;
viewProjStereo[eyeIndex] = proj * view;
invViewProjStereo[eyeIndex] = viewProjStereo[eyeIndex].inverse;
cmd.SetGlobalMatrixArray(HDShaderIDs._ViewMatrixStereo, viewMatrixStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._ViewProjMatrixStereo, viewProjStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewMatrixStereo, invViewStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvProjMatrixStereo, invProjStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewProjMatrixStereo, invViewProjStereo);
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


if (m_CurrentWidth > 0 && m_CurrentHeight > 0)
m_LightLoop.ReleaseResolutionDependentBuffers();
m_LightLoop.AllocResolutionDependentBuffers(hdCamera.actualWidth, hdCamera.actualHeight);
m_LightLoop.AllocResolutionDependentBuffers((int)hdCamera.screenSize.x, (int)hdCamera.screenSize.y, m_FrameSettings.enableStereo);
}
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!

UpdateShadowSettings();
m_SkyManager.UpdateCurrentSkySettings(hdCamera);
// TODO: Float HDCamera setup higher in order to pass stereo into GetCullingParameters
ScriptableCullingParameters cullingParams;
if (!CullResults.GetCullingParameters(camera, m_FrameSettings.enableStereo, out cullingParams))
{

m_LightLoop.UpdateCullingParameters(ref cullingParams);
hdCamera.UpdateStereoDependentState(m_FrameSettings, ref cullingParams);
#if UNITY_EDITOR
// emit scene view UI

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int g_vBoundsBuffer = Shader.PropertyToID("g_vBoundsBuffer");
public static readonly int _LightVolumeData = Shader.PropertyToID("_LightVolumeData");
public static readonly int g_data = Shader.PropertyToID("g_data");
public static readonly int g_mProjection = Shader.PropertyToID("g_mProjection");
public static readonly int g_mInvProjection = Shader.PropertyToID("g_mInvProjection");
public static readonly int g_mProjectionArr = Shader.PropertyToID("g_mProjectionArr");
public static readonly int g_mInvProjectionArr = Shader.PropertyToID("g_mInvProjectionArr");
public static readonly int g_viDimensions = Shader.PropertyToID("g_viDimensions");
public static readonly int g_vLightList = Shader.PropertyToID("g_vLightList");

public static readonly int _TaaFrameIndex = Shader.PropertyToID("_TaaFrameIndex");
public static readonly int _TaaFrameRotation = Shader.PropertyToID("_TaaFrameRotation");
public static readonly int _ViewMatrixStereo = Shader.PropertyToID("_ViewMatrixStereo");
public static readonly int _ViewProjMatrixStereo = Shader.PropertyToID("_ViewProjMatrixStereo");
public static readonly int _InvViewMatrixStereo = Shader.PropertyToID("_InvViewMatrixStereo");
public static readonly int _InvProjMatrixStereo = Shader.PropertyToID("_InvProjMatrixStereo");
public static readonly int _InvViewProjMatrixStereo = Shader.PropertyToID("_InvViewProjMatrixStereo");

199
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


public const int k_MaxEnvLightsOnScreen = 64;
public const int k_MaxShadowOnScreen = 16;
public const int k_MaxCascadeCount = 4; //Should be not less than m_Settings.directionalLightCascadeCount;
public const int k_MaxStereoEyes = 2;
public static readonly Vector3 k_BoxCullingExtentThreshold = Vector3.one * 0.01f;
// Static keyword is required here else we get a "DestroyBuffer can only be called from the main thread"

public List<SFiniteLightBound> bounds;
public List<LightVolumeData> lightVolumes;
public List<SFiniteLightBound> rightEyeBounds;
public List<LightVolumeData> rightEyeLightVolumes;
public void Clear()
{

bounds.Clear();
lightVolumes.Clear();
rightEyeBounds.Clear();
rightEyeLightVolumes.Clear();
}
public void Allocate()

bounds = new List<SFiniteLightBound>();
lightVolumes = new List<LightVolumeData>();
rightEyeBounds = new List<SFiniteLightBound>();
rightEyeLightVolumes = new List<LightVolumeData>();
}
}

int GetNumTileFtplX(HDCamera hdCamera)
{
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return ((int)hdCamera.screenSize.x + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return ((int)hdCamera.screenSize.y + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return ((int)hdCamera.screenSize.x + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return ((int)hdCamera.screenSize.y + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
}
public bool GetFeatureVariantsEnabled()

s_GenAABBKernel = buildScreenAABBShader.FindKernel("ScreenBoundsAABB");
s_AABBBoundsBuffer = new ComputeBuffer(2 * k_MaxLightsOnScreen, 3 * sizeof(float));
s_ConvexBoundsBuffer = new ComputeBuffer(k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(SFiniteLightBound)));
s_LightVolumeDataBuffer = new ComputeBuffer(k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(LightVolumeData)));
// The bounds and light volumes are view-dependent, and AABB is additionally projection dependent.
// The view and proj matrices are per eye in stereo. This means we have to double the size of these buffers.
// TODO: Maybe in stereo, we will only support half as many lights total, in order to minimize buffer size waste.
// Alternatively, we could re-size these buffers if any stereo camera is active, instead of unilaterally increasing buffer size.
// TODO: I don't think k_MaxLightsOnScreen corresponds to the actual correct light count for cullable light types (punctual, area, env, decal)
s_AABBBoundsBuffer = new ComputeBuffer(k_MaxStereoEyes * 2 * k_MaxLightsOnScreen, 3 * sizeof(float));
s_ConvexBoundsBuffer = new ComputeBuffer(k_MaxStereoEyes* k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(SFiniteLightBound)));
s_LightVolumeDataBuffer = new ComputeBuffer(k_MaxStereoEyes* k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(LightVolumeData)));
s_DispatchIndirectBuffer = new ComputeBuffer(LightDefinitions.s_NumFeatureVariants * 3, sizeof(uint), ComputeBufferType.IndirectArguments);
// Cluster

return 8 * (1 << k_Log2NumClusters); // total footprint for all layers of the tile (measured in light index entries)
}
// TODO: Add proper stereo support
public void AllocResolutionDependentBuffers(int width, int height)
public void AllocResolutionDependentBuffers(int width, int height, bool stereoEnabled)
var nrStereoLayers = stereoEnabled ? 2 : 1;
var nrTiles = nrTilesX * nrTilesY;
var nrTiles = nrTilesX * nrTilesY * nrStereoLayers;
s_TileFeatureFlags = new ComputeBuffer(nrTilesX * nrTilesY, sizeof(uint));
s_TileFeatureFlags = new ComputeBuffer(nrTiles, sizeof(uint));
var nrClusterTiles = nrClustersX * nrClustersY;
var nrClusterTiles = nrClustersX * nrClustersY * nrStereoLayers;
s_PerVoxelOffset = new ComputeBuffer((int)LightCategory.Count * (1 << k_Log2NumClusters) * nrClusterTiles, sizeof(uint));
s_PerVoxelLightLists = new ComputeBuffer(NumLightIndicesPerClusteredTile() * nrClusterTiles, sizeof(uint));

{
var nrBigTilesX = (width + 63) / 64;
var nrBigTilesY = (height + 63) / 64;
var nrBigTiles = nrBigTilesX * nrBigTilesY;
var nrBigTiles = nrBigTilesX * nrBigTilesY * nrStereoLayers;
s_BigTileLightList = new ComputeBuffer(LightDefinitions.s_MaxNrBigTileLightsPlusOne * nrBigTiles, sizeof(uint));
}
}

return Matrix4x4.Scale(new Vector3(1, 1, -1)) * camera.worldToCameraMatrix;
}
static Matrix4x4 WorldToViewStereo(Camera camera, Camera.StereoscopicEye eyeIndex)
{
return Matrix4x4.Scale(new Vector3(1, 1, -1)) * camera.GetStereoViewMatrix(eyeIndex);
}
// For light culling system, we need non oblique projection matrices
static Matrix4x4 CameraProjectionNonObliqueLHS(HDCamera camera)
{

}
static Matrix4x4 CameraProjectionStereoLHS(Camera camera, Camera.StereoscopicEye eyeIndex)
{
return camera.GetStereoProjectionMatrix(eyeIndex) * Matrix4x4.Scale(new Vector3(1, 1, -1));
}
public Vector3 GetLightColor(VisibleLight light)
{
return new Vector3(light.finalColor.r, light.finalColor.g, light.finalColor.b);

// TODO: we should be able to do this calculation only with LightData without VisibleLight light, but for now pass both
public void GetLightVolumeDataAndBound(LightCategory lightCategory, GPULightType gpuLightType, LightVolumeType lightVolumeType,
VisibleLight light, LightData lightData, Vector3 lightDimensions, Matrix4x4 worldToView)
VisibleLight light, LightData lightData, Vector3 lightDimensions, Matrix4x4 worldToView,
Camera.StereoscopicEye eyeIndex = Camera.StereoscopicEye.Left)
{
// Then Culling side
var range = lightDimensions.z;

Debug.Assert(false, "TODO: encountered an unknown GPULightType.");
}
m_lightList.bounds.Add(bound);
m_lightList.lightVolumes.Add(lightVolumeData);
if (eyeIndex == Camera.StereoscopicEye.Left)
{
m_lightList.bounds.Add(bound);
m_lightList.lightVolumes.Add(lightVolumeData);
}
else
{
m_lightList.rightEyeBounds.Add(bound);
m_lightList.rightEyeLightVolumes.Add(lightVolumeData);
}
}

m_lightList.envLights.Add(envLightData);
return true;
}
public void GetEnvLightVolumeDataAndBound(ProbeWrapper probe, LightVolumeType lightVolumeType, Matrix4x4 worldToView)
public void GetEnvLightVolumeDataAndBound(ProbeWrapper probe, LightVolumeType lightVolumeType, Matrix4x4 worldToView, Camera.StereoscopicEye eyeIndex = Camera.StereoscopicEye.Left)
{
var bound = new SFiniteLightBound();
var lightVolumeData = new LightVolumeData();

}
}
m_lightList.bounds.Add(bound);
m_lightList.lightVolumes.Add(lightVolumeData);
if (eyeIndex == Camera.StereoscopicEye.Left)
{
m_lightList.bounds.Add(bound);
m_lightList.lightVolumes.Add(lightVolumeData);
}
else
{
m_lightList.rightEyeBounds.Add(bound);
m_lightList.rightEyeLightVolumes.Add(lightVolumeData);
}
}
public int GetCurrentShadowCount()

m_enableBakeShadowMask = false;
m_lightList.Clear();
Vector3 camPosWS = camera.transform.position;
var stereoEnabled = m_FrameSettings.enableStereo;
// Note: Light with null intensity/Color are culled by the C++, no need to test it here
if (cullResults.visibleLights.Count != 0 || cullResults.visibleReflectionProbes.Count != 0)
{

//TODO: Do not call ToArray here to avoid GC, refactor API
int[] shadowRequests = m_ShadowRequests.ToArray();
int[] shadowDataIndices;
m_ShadowMgr.ProcessShadowRequests(m_FrameId, cullResults, camera, ShaderConfig.s_CameraRelativeRendering != 0, cullResults.visibleLights,
ref shadowRequestCount, shadowRequests, out shadowDataIndices);

// For now we will still apply the maximum of shadow here but we don't apply the sorting by priority + slot allocation yet
// 2. Go through all lights, convert them to GPU format.
// Create simultaneously data for culling (LigthVolumeData and rendering)
// Simultaneously create data for culling (LightVolumeData and SFiniteLightBound)
Vector3 camPosWS = camera.transform.position;
var rightEyeWorldToView = Matrix4x4.identity;
if (stereoEnabled)
{
worldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Left);
rightEyeWorldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Right);
}
for (int sortIndex = 0; sortIndex < sortCount; ++sortIndex)
{

// Then culling side. Must be call in this order as we pass the created Light data to the function
GetLightVolumeDataAndBound(lightCategory, gpuLightType, lightVolumeType, light, m_lightList.lights[m_lightList.lights.Count - 1], lightDimensions, worldToView);
if (stereoEnabled)
GetLightVolumeDataAndBound(lightCategory, gpuLightType, lightVolumeType, light, m_lightList.lights[m_lightList.lights.Count - 1], lightDimensions, rightEyeWorldToView, Camera.StereoscopicEye.Right);
// We make the light position camera-relative as late as possible in order
// to allow the preceding code to work with the absolute world space coordinates.

if (GetEnvLightData(cmd, camera, probeWrapper))
{
GetEnvLightVolumeDataAndBound(probeWrapper, lightVolumeType, worldToView);
if (stereoEnabled)
GetEnvLightVolumeDataAndBound(probeWrapper, lightVolumeType, rightEyeWorldToView, Camera.StereoscopicEye.Right);
// We make the light position camera-relative as late as possible in order
// to allow the preceding code to work with the absolute world space coordinates.

Debug.Assert(m_lightList.bounds.Count == m_lightCount);
Debug.Assert(m_lightList.lightVolumes.Count == m_lightCount);
m_lightList.bounds.AddRange(DecalSystem.m_Bounds);
m_lightList.lightVolumes.AddRange(DecalSystem.m_LightVolumes);
m_lightCount += DecalSystem.m_DecalDatasCount;
if (DecalSystem.m_DecalDatasCount > 0)
{
// TODO: This adds the entire array to the buffer, likely introducing
// empty entries into the buffer. Currently, stereo assumes this buffer to
// be tightly packed. Decals are currently disabled for stereo anyway, so we
// will coordinate on a proper solution for this.
m_lightList.bounds.AddRange(DecalSystem.m_Bounds);
m_lightList.lightVolumes.AddRange(DecalSystem.m_LightVolumes);
m_lightCount += DecalSystem.m_DecalDatasCount;
}
if (stereoEnabled)
{
// TODO: Proper decal + stereo cull management
Debug.Assert(m_lightList.rightEyeBounds.Count == m_lightCount);
Debug.Assert(m_lightList.rightEyeLightVolumes.Count == m_lightCount);
// TODO: GC considerations?
m_lightList.bounds.AddRange(m_lightList.rightEyeBounds);
m_lightList.lightVolumes.AddRange(m_lightList.rightEyeLightVolumes);
}
UpdateDataBuffers();
m_maxShadowDistance = shadowSettings.maxShadowDistance;

var camera = hdCamera.camera;
cmd.BeginSample("Build Light List");
var w = camera.pixelWidth;
var h = camera.pixelHeight;
var w = (int)hdCamera.screenSize.x;
var h = (int)hdCamera.screenSize.y;
// camera to screen matrix (and it's inverse)
var proj = CameraProjectionNonObliqueLHS(hdCamera);
var projscr = temp * proj;
var invProjscr = projscr.inverse;
// camera to screen matrix (and it's inverse)
var projArr = new Matrix4x4[2];
var projscrArr = new Matrix4x4[2];
var invProjscrArr = new Matrix4x4[2];
if (m_FrameSettings.enableStereo)
{
for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++)
{
projArr[eyeIndex] = CameraProjectionStereoLHS(hdCamera.camera, (Camera.StereoscopicEye)eyeIndex);
projscrArr[eyeIndex] = temp * projArr[eyeIndex];
invProjscrArr[eyeIndex] = projscrArr[eyeIndex].inverse;
}
}
else
{
projArr[0] = CameraProjectionNonObliqueLHS(hdCamera);
projscrArr[0] = temp * projArr[0];
invProjscrArr[0] = projscrArr[0].inverse;
}
// generate screen-space AABBs (used for both fptl and clustered).
if (m_lightCount != 0)
{

temp.SetRow(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
var projh = temp * proj;
var invProjh = projh.inverse;
var projhArr = new Matrix4x4[2];
var invProjhArr = new Matrix4x4[2];
if (m_FrameSettings.enableStereo)
{
for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++)
{
projhArr[eyeIndex] = temp * projArr[eyeIndex];
invProjhArr[eyeIndex] = projhArr[eyeIndex].inverse;
}
}
else
{
projhArr[0] = temp * projArr[0];
invProjhArr[0] = projhArr[0].inverse;
}
// In the stereo case, we have two sets of light bounds to iterate over (bounds are in per-eye view space)
cmd.SetComputeMatrixParam(buildScreenAABBShader, HDShaderIDs.g_mProjection, projh);
cmd.SetComputeMatrixParam(buildScreenAABBShader, HDShaderIDs.g_mInvProjection, invProjh);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mProjectionArr, projhArr);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mInvProjectionArr, invProjhArr);
// In stereo, we output two sets of AABB bounds
cmd.DispatchCompute(buildScreenAABBShader, s_GenAABBKernel, (m_lightCount + 7) / 8, 1, 1);
int tgY = m_FrameSettings.enableStereo ? 2 : 1;
cmd.DispatchCompute(buildScreenAABBShader, s_GenAABBKernel, (m_lightCount + 7) / 8, tgY, 1);
}
// enable coarse 2D pass on 64x64 tiles (used for both fptl and clustered).

cmd.SetComputeIntParam(buildPerBigTileLightListShader, HDShaderIDs._EnvLightIndexShift, m_lightList.lights.Count);
cmd.SetComputeIntParam(buildPerBigTileLightListShader, HDShaderIDs._DecalIndexShift, m_lightList.lights.Count + m_lightList.envLights.Count);
cmd.SetComputeIntParam(buildPerBigTileLightListShader, HDShaderIDs.g_iNrVisibLights, m_lightCount);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, HDShaderIDs.g_mScrProjection, projscr);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscr);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, HDShaderIDs.g_mScrProjection, projscrArr[0]);
cmd.SetComputeMatrixParam(buildPerBigTileLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscrArr[0]);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, HDShaderIDs.g_fNearPlane, camera.nearClipPlane);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, HDShaderIDs.g_fFarPlane, camera.farClipPlane);
cmd.SetComputeBufferParam(buildPerBigTileLightListShader, s_GenListPerBigTileKernel, HDShaderIDs.g_vLightList, s_BigTileLightList);

cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mScrProjection, projscr);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscr);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mScrProjection, projscrArr[0]);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscrArr[0]);
cmd.SetComputeTextureParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_vLightList, s_LightList);
if (m_FrameSettings.lightLoopSettings.enableBigTilePrepass)

}
// Cluster
VoxelLightListGeneration(cmd, hdCamera, projscr, invProjscr, cameraDepthBufferRT);
VoxelLightListGeneration(cmd, hdCamera, projscrArr[0], invProjscrArr[0], cameraDepthBufferRT);
if (enableFeatureVariants)
{

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute


InterlockedAdd(g_LayeredSingleIdxBuffer[0], (uint) iSpaceAvail, start); // alloc list memory
}
// All our cull data are in the same list, but at render time envLights are separated so we need to shit the index
// All our cull data are in the same list, but at render time envLights are separated so we need to shift the index
// to make it work correctly
int shiftIndex[LIGHTCATEGORY_COUNT];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, LIGHTCATEGORY_COUNT);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild.compute


int localOffs=0;
int offs = tileIDX.y*nrTilesX + tileIDX.x;
// All our cull data are in the same list, but at render time envLights are separated so we need to shit the index
// All our cull data are in the same list, but at render time envLights are separated so we need to shift the index
// to make it work correctly
int shiftIndex[LIGHTCATEGORY_COUNT];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, LIGHTCATEGORY_COUNT);

24
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/scrbound.compute


uniform int g_isOrthographic;
uniform int g_iNrVisibLights;
uniform float4x4 g_mInvProjection;
uniform float4x4 g_mProjection;
uniform float4x4 g_mInvProjectionArr[2];
uniform float4x4 g_mProjectionArr[2];
StructuredBuffer<SFiniteLightBound> g_data : register( t0 );

void ScreenBoundsAABB(uint threadID : SV_GroupIndex, uint3 u3GroupID : SV_GroupID)
{
uint groupID = u3GroupID.x;
uint eyeIndex = u3GroupID.y; // currently, can only be 0 or 1
// The g_ is preserved in order to make cross-pipeline (FPTL) updates easier
float4x4 g_mInvProjection = g_mInvProjectionArr[eyeIndex];
float4x4 g_mProjection = g_mProjectionArr[eyeIndex];
//uint vindex = groupID * NR_THREADS + threadID;
unsigned int g = groupID;

const int lgtIndex = subLigt+(int) g*8;
const int sideIndex = (int) (t%8);
SFiniteLightBound lgtDat = g_data[lgtIndex];
const int eyeAdjustedLgtIndex = lgtIndex + (eyeIndex * g_iNrVisibLights);
SFiniteLightBound lgtDat = g_data[eyeAdjustedLgtIndex];
const float3 boxX = lgtDat.boxAxisX.xyz;
const float3 boxY = lgtDat.boxAxisY.xyz;

//g_vBoundsBuffer[lgtIndex+g_iNrVisibLights] = float3(0.5*vMax.x+0.5, -0.5*vMin.y+0.5, vMax.z*VIEWPORT_SCALE_Z);
// changed for unity
g_vBoundsBuffer[lgtIndex+0] = float3(0.5*vMin.x+0.5, 0.5*vMin.y+0.5, vMin.z*VIEWPORT_SCALE_Z);
g_vBoundsBuffer[lgtIndex+(int) g_iNrVisibLights] = float3(0.5*vMax.x+0.5, 0.5*vMax.y+0.5, vMax.z*VIEWPORT_SCALE_Z);
// Each light's AABB is represented by two float3s, the min and max of the box.
// And for stereo, we have two sets of lights. Therefore, each eye has a set of mins, followed by
// a set of maxs, and each set is equal to g_iNrVisibLights.
const int eyeBaseIndex = eyeIndex * g_iNrVisibLights * 2;
const int minIndex = eyeBaseIndex + lgtIndex + 0;
const int maxIndex = eyeBaseIndex + lgtIndex + (int)g_iNrVisibLights;
g_vBoundsBuffer[minIndex] = float3(0.5*vMin.x + 0.5, 0.5*vMin.y + 0.5, vMin.z*VIEWPORT_SCALE_Z);
g_vBoundsBuffer[maxIndex] = float3(0.5*vMax.x + 0.5, 0.5*vMax.y + 0.5, vMax.z*VIEWPORT_SCALE_Z);
}
}
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


#if defined(USING_STEREO_MATRICES)
CBUFFER_START(UnityPerPassStereo)
float4x4 _ViewMatrixStereo[2];
// Proj not needed...yet?
float4x4 _ViewProjMatrixStereo[2];
float4x4 _InvViewMatrixStereo[2];
float4x4 _InvProjMatrixStereo[2];
float4x4 _InvViewProjMatrixStereo[2];
CBUFFER_END

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesMatrixDefsHDCamera.hlsl


#define UNITY_MATRIX_M unity_ObjectToWorld
#define UNITY_MATRIX_I_M unity_WorldToObject
#define UNITY_MATRIX_V unity_StereoMatrixV[unity_StereoEyeIndex]
#define UNITY_MATRIX_I_V unity_StereoMatrixInvV[unity_StereoEyeIndex]
#define UNITY_MATRIX_V _ViewMatrixStereo[unity_StereoEyeIndex]
#define UNITY_MATRIX_I_V _InvViewMatrixStereo[unity_StereoEyeIndex]
#define UNITY_MATRIX_VP unity_StereoMatrixVP[unity_StereoEyeIndex]
#define UNITY_MATRIX_VP _ViewProjMatrixStereo[unity_StereoEyeIndex]
#define UNITY_MATRIX_I_VP _InvViewProjMatrixStereo[unity_StereoEyeIndex]
#else

正在加载...
取消
保存