浏览代码

Merge pull request #1864 from Unity-Technologies/HDRP/fix-gcalloc

Removed a bunch of GC.Alloc each frame.
/main
GitHub 6 年前
当前提交
e5d59bcd
共有 6 个文件被更改,包括 70 次插入46 次删除
  1. 15
      com.unity.render-pipelines.core/CoreRP/CoreResources/TexturePadding.cs
  2. 2
      com.unity.render-pipelines.core/CoreRP/Textures/RTHandleSystem.cs
  3. 4
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Light/HDAdditionalLightData.cs
  4. 79
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/LightLoop.cs
  5. 8
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  6. 8
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs

15
com.unity.render-pipelines.core/CoreRP/CoreResources/TexturePadding.cs


int m_KMainTop;
int m_KMainRight;
// Avoid garbage generated by .SetComputeIntParams methods (using params int[] will generate an array on the fly)
int[] m_IntParams = new int[2];
public TexturePadding(ComputeShader cs)
{
m_CS = cs;

{
if (from.width < to.width)
{
cmd.SetComputeIntParams(m_CS, _RectOffset, from.width, 0);
m_IntParams[0] = from.width;
m_IntParams[1] = 0;
cmd.SetComputeIntParams(m_CS, _RectOffset, m_IntParams);
cmd.SetComputeIntParams(m_CS, _RectOffset, 0, from.height);
m_IntParams[0] = 0;
m_IntParams[1] = from.height;
cmd.SetComputeIntParams(m_CS, _RectOffset, m_IntParams);
cmd.SetComputeIntParams(m_CS, _RectOffset, from.width, from.height);
m_IntParams[0] = from.width;
m_IntParams[1] = from.height;
cmd.SetComputeIntParams(m_CS, _RectOffset, m_IntParams);
cmd.SetComputeTextureParam(m_CS, m_KMainTopRight, _InOutTexture, inOutTexture);
cmd.DispatchCompute(m_CS, m_KMainTopRight, to.width - from.width, to.height - from.height, 1);
}

2
com.unity.render-pipelines.core/CoreRP/Textures/RTHandleSystem.cs


public void DemandResize(RTHandle rth)
{
Assert.IsTrue(m_ResizeOnDemandRTs.Contains(rth), string.Format("The RTHandle {0} is not an resize on demand handle in this RTHandleSystem. Please call SwitchToResizeOnDemand(rth, true) before resizing on demand.", rth));
Assert.IsTrue(m_ResizeOnDemandRTs.Contains(rth), "The RTHandle is not an resize on demand handle in this RTHandleSystem. Please call SwitchToResizeOnDemand(rth, true) before resizing on demand.");
for (int i = 0, c = (int)RTCategory.Count; i < c; ++i)
{

4
com.unity.render-pipelines.high-definition/HDRP/Lighting/Light/HDAdditionalLightData.cs


// This function return a mask of light layers as uint and handle the case of Everything as being 0xFF and not -1
public uint GetLightLayers()
{
int value = Convert.ToInt32(lightLayers);
int value = (int)(lightLayers);
return value < 0 ? (uint)LightLayerEnum.Everything : (uint)value;
}

m_Version = currentVersion;
version = currentVersion;
#pragma warning restore 0618
}
}

79
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/LightLoop.cs


// For now we don't use shadow cascade borders.
static public readonly bool s_UseCascadeBorders = false;
// Keep sorting array around to avoid garbage
uint[] m_SortKeys = null;
void UpdateSortKeysArray(int count)
{
if (m_SortKeys == null ||count > m_SortKeys.Length)
{
m_SortKeys = new uint[count];
}
}
// Matrix used for Light list building
// Keep them around to avoid allocations
Matrix4x4[] m_LightListProjMatrices = new Matrix4x4[2];
Matrix4x4[] m_LightListProjscrMatrices = new Matrix4x4[2];
Matrix4x4[] m_LightListInvProjscrMatrices = new Matrix4x4[2];
Matrix4x4[] m_LightListProjHMatrices = new Matrix4x4[2];
Matrix4x4[] m_LightListInvProjHMatrices = new Matrix4x4[2];
public class LightList
{
public List<DirectionalLightData> directionalLights;

int areaLightCount = 0;
int lightCount = Math.Min(cullResults.visibleLights.Count, k_MaxLightsOnScreen);
var sortKeys = new uint[lightCount];
UpdateSortKeysArray(lightCount);
int sortCount = 0;
for (int lightIndex = 0, numLights = cullResults.visibleLights.Count; (lightIndex < numLights) && (sortCount < lightCount); ++lightIndex)
{

uint shadow = m_ShadowIndices.ContainsKey(lightIndex) ? 1u : 0;
// 5 bit (0x1F) light category, 5 bit (0x1F) GPULightType, 5 bit (0x1F) lightVolume, 1 bit for shadow casting, 16 bit index
sortKeys[sortCount++] = (uint)lightCategory << 27 | (uint)gpuLightType << 22 | (uint)lightVolumeType << 17 | shadow << 16 | (uint)lightIndex;
m_SortKeys[sortCount++] = (uint)lightCategory << 27 | (uint)gpuLightType << 22 | (uint)lightVolumeType << 17 | shadow << 16 | (uint)lightIndex;
CoreUtils.QuickSort(sortKeys, 0, sortCount - 1); // Call our own quicksort instead of Array.Sort(sortKeys, 0, sortCount) so we don't allocate memory (note the SortCount-1 that is different from original call).
CoreUtils.QuickSort(m_SortKeys, 0, sortCount - 1); // Call our own quicksort instead of Array.Sort(sortKeys, 0, sortCount) so we don't allocate memory (note the SortCount-1 that is different from original call).
// TODO: Refactor shadow management
// The good way of managing shadow:

for (int sortIndex = 0; sortIndex < sortCount; ++sortIndex)
{
// In 1. we have already classify and sorted the light, we need to use this sorted order here
uint sortKey = sortKeys[sortIndex];
uint sortKey = m_SortKeys[sortIndex];
LightCategory lightCategory = (LightCategory)((sortKey >> 27) & 0x1F);
GPULightType gpuLightType = (GPULightType)((sortKey >> 22) & 0x1F);
LightVolumeType lightVolumeType = (LightVolumeType)((sortKey >> 17) & 0x1F);

var totalProbes = cullResults.visibleReflectionProbes.Count + reflectionProbeCullResults.visiblePlanarReflectionProbeCount;
int probeCount = Math.Min(totalProbes, k_MaxEnvLightsOnScreen);
sortKeys = new uint[probeCount];
UpdateSortKeysArray(probeCount);
sortCount = 0;
for (int probeIndex = 0, numProbes = totalProbes; (probeIndex < numProbes) && (sortCount < probeCount); probeIndex++)

var logVolume = CalculateProbeLogVolume(probe.bounds);
sortKeys[sortCount++] = PackProbeKey(logVolume, lightVolumeType, 0u, probeIndex); // Sort by volume
m_SortKeys[sortCount++] = PackProbeKey(logVolume, lightVolumeType, 0u, probeIndex); // Sort by volume
}
else
{

var logVolume = CalculateProbeLogVolume(probe.bounds);
sortKeys[sortCount++] = PackProbeKey(logVolume, lightVolumeType, 1u, planarProbeIndex); // Sort by volume
m_SortKeys[sortCount++] = PackProbeKey(logVolume, lightVolumeType, 1u, planarProbeIndex); // Sort by volume
CoreUtils.QuickSort(sortKeys, 0, sortCount - 1); // Call our own quicksort instead of Array.Sort(sortKeys, 0, sortCount) so we don't allocate memory (note the SortCount-1 that is different from original call).
CoreUtils.QuickSort(m_SortKeys, 0, sortCount - 1); // Call our own quicksort instead of Array.Sort(sortKeys, 0, sortCount) so we don't allocate memory (note the SortCount-1 that is different from original call).
uint sortKey = sortKeys[sortIndex];
uint sortKey = m_SortKeys[sortIndex];
LightVolumeType lightVolumeType;
int probeIndex;
int listType;

bool isOrthographic = camera.orthographic;
// camera to screen matrix (and it's inverse)
var projArr = new Matrix4x4[2];
var projscrArr = new Matrix4x4[2];
var invProjscrArr = new Matrix4x4[2];
if (m_FrameSettings.enableStereo)
{
// XRTODO: If possible, we could generate a non-oblique stereo projection

// Once we generate this non-oblique projection matrix, it can be shared across both eyes (un-array)
for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++)
{
projArr[eyeIndex] = CameraProjectionStereoLHS(hdCamera.camera, (Camera.StereoscopicEye)eyeIndex);
projscrArr[eyeIndex] = temp * projArr[eyeIndex];
invProjscrArr[eyeIndex] = projscrArr[eyeIndex].inverse;
m_LightListProjMatrices[eyeIndex] = CameraProjectionStereoLHS(hdCamera.camera, (Camera.StereoscopicEye)eyeIndex);
m_LightListProjscrMatrices[eyeIndex] = temp * m_LightListProjMatrices[eyeIndex];
m_LightListInvProjscrMatrices[eyeIndex] = m_LightListProjscrMatrices[eyeIndex].inverse;
projArr[0] = GeometryUtils.GetProjectionMatrixLHS(hdCamera.camera);
projscrArr[0] = temp * projArr[0];
invProjscrArr[0] = projscrArr[0].inverse;
m_LightListProjMatrices[0] = GeometryUtils.GetProjectionMatrixLHS(hdCamera.camera);
m_LightListProjscrMatrices[0] = temp * m_LightListProjMatrices[0];
m_LightListInvProjscrMatrices[0] = m_LightListProjscrMatrices[0].inverse;
var isProjectionOblique = GeometryUtils.IsProjectionMatrixOblique(projArr[0]);
var isProjectionOblique = GeometryUtils.IsProjectionMatrixOblique(m_LightListProjMatrices[0]);
// generate screen-space AABBs (used for both fptl and clustered).
if (m_lightCount != 0)

temp.SetRow(2, new Vector4(0.0f, 0.0f, 0.5f, 0.5f));
temp.SetRow(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
var projhArr = new Matrix4x4[2];
var invProjhArr = new Matrix4x4[2];
projhArr[eyeIndex] = temp * projArr[eyeIndex];
invProjhArr[eyeIndex] = projhArr[eyeIndex].inverse;
m_LightListProjHMatrices[eyeIndex] = temp * m_LightListProjMatrices[eyeIndex];
m_LightListInvProjHMatrices[eyeIndex] = m_LightListProjHMatrices[eyeIndex].inverse;
projhArr[0] = temp * projArr[0];
invProjhArr[0] = projhArr[0].inverse;
m_LightListProjHMatrices[0] = temp * m_LightListProjMatrices[0];
m_LightListInvProjHMatrices[0] = m_LightListProjHMatrices[0].inverse;
}
var genAABBKernel = isProjectionOblique ? s_GenAABBKernel_Oblique : s_GenAABBKernel;

cmd.SetComputeIntParam(buildScreenAABBShader, HDShaderIDs.g_iNrVisibLights, m_lightCount);
cmd.SetComputeBufferParam(buildScreenAABBShader, genAABBKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mProjectionArr, projhArr);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mInvProjectionArr, invProjhArr);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mProjectionArr, m_LightListProjHMatrices);
cmd.SetComputeMatrixArrayParam(buildScreenAABBShader, HDShaderIDs.g_mInvProjectionArr, m_LightListInvProjHMatrices);
// In stereo, we output two sets of AABB bounds
cmd.SetComputeBufferParam(buildScreenAABBShader, genAABBKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);

cmd.SetComputeIntParam(buildPerBigTileLightListShader, HDShaderIDs._EnvLightIndexShift, m_lightList.lights.Count);
cmd.SetComputeIntParam(buildPerBigTileLightListShader, HDShaderIDs._DecalIndexShift, m_lightList.lights.Count + m_lightList.envLights.Count);
cmd.SetComputeMatrixArrayParam(buildPerBigTileLightListShader, HDShaderIDs.g_mScrProjectionArr, projscrArr);
cmd.SetComputeMatrixArrayParam(buildPerBigTileLightListShader, HDShaderIDs.g_mInvScrProjectionArr, invProjscrArr);
cmd.SetComputeMatrixArrayParam(buildPerBigTileLightListShader, HDShaderIDs.g_mScrProjectionArr, m_LightListProjscrMatrices);
cmd.SetComputeMatrixArrayParam(buildPerBigTileLightListShader, HDShaderIDs.g_mInvScrProjectionArr, m_LightListInvProjscrMatrices);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, HDShaderIDs.g_fNearPlane, camera.nearClipPlane);
cmd.SetComputeFloatParam(buildPerBigTileLightListShader, HDShaderIDs.g_fFarPlane, camera.farClipPlane);

cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mScrProjection, projscrArr[0]);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscrArr[0]);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mScrProjection, m_LightListProjscrMatrices[0]);
cmd.SetComputeMatrixParam(buildPerTileLightListShader, HDShaderIDs.g_mInvScrProjection, m_LightListInvProjscrMatrices[0]);
cmd.SetComputeTextureParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_vLightList, s_LightList);

}
// Cluster
VoxelLightListGeneration(cmd, hdCamera, projscrArr, invProjscrArr, cameraDepthBufferRT);
VoxelLightListGeneration(cmd, hdCamera, m_LightListProjscrMatrices, m_LightListInvProjscrMatrices, cameraDepthBufferRT);
if (enableFeatureVariants)
{

8
com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


// VisualEnvironment sets global fog parameters: _GlobalAnisotropy, _GlobalScattering, _GlobalExtinction.
if (!hdCamera.frameSettings.enableVolumetrics || visualEnvironment.fogType != FogType.Volumetric)
if (!hdCamera.frameSettings.enableVolumetrics || visualEnvironment.fogType.value != FogType.Volumetric)
{
// Set the neutral black texture.
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, CoreUtils.blackVolumeTexture);

return densityVolumes;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric)
if (visualEnvironment.fogType.value != FogType.Volumetric)
return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))

return;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric)
if (visualEnvironment.fogType.value != FogType.Volumetric)
return;
using (new ProfilingSample(cmd, "Volume Voxelization"))

return;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric)
if (visualEnvironment.fogType.value != FogType.Volumetric)
return;
using (new ProfilingSample(cmd, "Volumetric Lighting"))

8
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


{
currentFrameSettings.enablePostprocess = false;
}
// Disable SSS if luxmeter is enabled
if (debugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuxMeter)
{

{
VolumeManager.instance.Update(hdCamera.volumeAnchor, hdCamera.volumeLayerMask);
}
// Do anything we need to do upon a new frame.
// The NewFrame must be after the VolumeManager update and before Resize because it uses properties set in NewFrame
m_LightLoop.NewFrame(currentFrameSettings);

m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBuffer, m_CameraDepthStencilBuffer, m_CurrentDebugDisplaySettings, cmd);
if (visualEnv.fogType != FogType.None)
if (visualEnv.fogType.value != FogType.None)
m_SkyManager.RenderOpaqueAtmosphericScattering(cmd);
}

public void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd)
{
// See ShaderPassForward.hlsl: for forward shaders, if DEBUG_DISPLAY is enabled and no DebugLightingMode or DebugMipMapMod
// See ShaderPassForward.hlsl: for forward shaders, if DEBUG_DISPLAY is enabled and no DebugLightingMode or DebugMipMapMod
// modes have been set, lighting is automatically skipped (To avoid some crashed due to lighting RT not set on console).
// However debug mode like colorPickerModes and false color don't need DEBUG_DISPLAY and must work with the lighting.
// So we will enabled DEBUG_DISPLAY independently

正在加载...
取消
保存