浏览代码

Port VBuffers to the RTHandle system

/main
Evgenii Golubev 7 年前
当前提交
4c75cacd
共有 6 个文件被更改,包括 207 次插入265 次删除
  1. 37
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  2. 15
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  3. 39
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs
  4. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  5. 369
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  6. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs

37
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Vector4 projectionParams;
public Vector4 screenParams;
public VolumetricLightingSystem.VBufferParameters[] vBufferParams; // Double-buffered
public PostProcessRenderContext postprocessRenderContext;
public Matrix4x4[] viewMatrixStereo;

projMatrixStereo = new Matrix4x4[2];
postprocessRenderContext = new PostProcessRenderContext();
Reset();
}

isFirstFrame = true;
}
// Grab the HDCamera tied to a given Camera and update it.
public static HDCamera Get(Camera camera, PostProcessLayer postProcessLayer, FrameSettings frameSettings)
// Will return NULL if the camera does not exist.
public static HDCamera Get(Camera camera)
{
HDCamera hdCamera;
if (!s_Cameras.TryGetValue(camera, out hdCamera))
{
hdCamera = null;
}
return hdCamera;
}
// Pass all the systems that may want to initialize per-camera data here.
// That way you will never create an HDCamera and forget to initialize the data.
public static HDCamera Create(Camera camera, VolumetricLightingSystem vlSys)
HDCamera hdcam;
HDCamera hdCamera = new HDCamera(camera);
s_Cameras.Add(camera, hdCamera);
if (!s_Cameras.TryGetValue(camera, out hdcam))
if (vlSys != null)
hdcam = new HDCamera(camera);
s_Cameras.Add(camera, hdcam);
// Have to perform a NULL check here because the Reflection System internally allocates HDCameras.
vlSys.InitializePerCameraData(hdCamera);
hdcam.Update(postProcessLayer, frameSettings);
return hdcam;
return hdCamera;
}
public static void ClearAll()

// Allocate buffers frames and return current frame
public RTHandleSystem.RTHandle AllocHistoryFrameRT(int id, Func<string, int, RTHandleSystem, RTHandleSystem.RTHandle> allocator)
{
m_HistoryRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), 2);
const int bufferCount = 2; // Hard-coded for now. Will have to see if this is enough...
m_HistoryRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), bufferCount);
return m_HistoryRTSystem.GetFrameRT(id, 0);
}

15
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


// TODO: For MSAA, we'll need to add a Draw path in order to support MSAA properly
m_DeferredShadowBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
m_VolumetricLightingSystem.CreateBuffers();
if (Debug.isDebugBuild)
{
m_DebugColorPickerBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugColorPicker");

m_LightLoop.AllocResolutionDependentBuffers((int)hdCamera.screenSize.x, (int)hdCamera.screenSize.y, m_FrameSettings.enableStereo);
}
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!
m_VolumetricLightingSystem.ResizeVBufferAndUpdateProperties(hdCamera, m_FrameCount);
// update recorded window resolution
m_CurrentWidth = hdCamera.actualWidth;

m_FrameSettings.enablePostprocess = false;
}
var hdCamera = HDCamera.Get(camera, postProcessLayer, m_FrameSettings);
var hdCamera = HDCamera.Get(camera);
if (hdCamera == null)
{
hdCamera = HDCamera.Create(camera, m_VolumetricLightingSystem);
}
hdCamera.Update(postProcessLayer, m_FrameSettings);
m_VolumetricLightingSystem.UpdatePerCameraData(hdCamera);
Resize(hdCamera);

39
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs


for (var i = 0; i < length; i++)
{
var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i];
var hdCamera = HDCamera.Get(renderCamera, null, probe.frameSettings);
var hdCamera = HDCamera.Get(renderCamera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(renderCamera, null);
}
hdCamera.Update(null, probe.frameSettings);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
{
if (probe.realtimeTexture != null)

for (var i = 0; i < length; i++)
{
var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i];
var hdCamera = HDCamera.Get(camera, null, probe.frameSettings);
var hdCamera = HDCamera.Get(camera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(null, probe.frameSettings);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
{
if (probe.realtimeTexture != null)

probe.frameSettings.CopyTo(s_RenderCameraData.GetFrameSettings());
return HDCamera.Get(camera, null, probe.frameSettings);
var hdCamera = HDCamera.Get(camera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(null, probe.frameSettings);
return hdCamera;
}
static Camera GetRenderCamera()

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl


// if (clampToBorder), samples outside of the buffer return 0 (we perform a smooth fade).
// Otherwise, the sampler simply clamps the texture coordinate to the edge of the texture.
// Warning: clamping to border may not work as expected with the quadratic filter due to its extent.
// TODO: clear the history buffer to black to avoid false positives from reprojection.
// Fix clamp to border to clamp UVs rather clamp values to black.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBuffer, clampSampler),
float2 positionNDC,
float linearDepth,

369
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


using System;
using UnityEngine.Rendering;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

public float vBufferNearPlane; // Distance in meters
public float vBufferFarPlane; // Distance in meters
public float depthSliceDistributionUniformity; // Controls the exponential depth distribution: [0, 1]
} // struct ControllerParameters
public class VBuffer
{
public struct Parameters
public static ControllerParameters GetDefaults()
public Vector4 resolution;
public Vector2 sliceCount;
public Vector4 depthEncodingParams;
public Vector4 depthDecodingParams;
public Parameters(int w, int h, int d, ControllerParameters controlParams)
{
resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
sliceCount = new Vector2(d, 1.0f / d);
depthEncodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
depthDecodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
Update(controlParams);
}
public void Update(ControllerParameters controlParams)
{
float n = controlParams.vBufferNearPlane;
float f = controlParams.vBufferFarPlane;
float c = 2 - 2 * controlParams.depthSliceDistributionUniformity; // remap [0, 1] -> [2, 0]
depthEncodingParams = ComputeLogarithmicDepthEncodingParams(n, f, c);
depthDecodingParams = ComputeLogarithmicDepthDecodingParams(n, f, c);
}
} // struct Parameters
ControllerParameters parameters;
const int k_NumFrames = 2; // Double-buffer history and feedback
const int k_NumBuffers = 4; // See the list below
const int k_IndexDensity = 0;
const int k_IndexIntegral = 1;
const int k_IndexHistory = 2; // Depends on frame ID
const int k_IndexFeedback = 3; // Depends on frame ID
long m_ViewID = -1; // (m_ViewID > 0) if valid
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
Parameters[] m_Params = null; // For the current and the previous frame
parameters.vBufferNearPlane = 0.5f;
parameters.vBufferFarPlane = 64.0f;
parameters.depthSliceDistributionUniformity = 0.75f;
public long GetViewID()
{
return m_ViewID;
return parameters;
} // struct ControllerParameters
public bool IsValid()
{
return m_ViewID > 0 && m_Textures != null && m_Textures[0] != null;
}
public struct VBufferParameters
{
public Vector4 resolution;
public Vector2 sliceCount;
public Vector4 depthEncodingParams;
public Vector4 depthDecodingParams;
public Parameters GetParameters(uint frameIndex)
public VBufferParameters(int w, int h, int d, ControllerParameters controlParams)
return m_Params[frameIndex & 1];
}
resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
sliceCount = new Vector2(d, 1.0f / d);
depthEncodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
depthDecodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
public void SetParameters(Parameters parameters, uint frameIndex)
{
m_Params[frameIndex & 1] = parameters;
Update(controlParams);
public RenderTargetIdentifier GetDensityBuffer()
public void Update(ControllerParameters controlParams)
Debug.Assert(IsValid());
return m_Identifiers[k_IndexDensity];
}
float n = controlParams.vBufferNearPlane;
float f = controlParams.vBufferFarPlane;
float c = 2 - 2 * controlParams.depthSliceDistributionUniformity; // remap [0, 1] -> [2, 0]
public RenderTargetIdentifier GetLightingIntegralBuffer() // Of the current frame
{
Debug.Assert(IsValid());
return m_Identifiers[k_IndexIntegral];
depthEncodingParams = ComputeLogarithmicDepthEncodingParams(n, f, c);
depthDecodingParams = ComputeLogarithmicDepthDecodingParams(n, f, c);
public RenderTargetIdentifier GetLightingHistoryBuffer(uint frameIndex) // From the previous frame
{
Debug.Assert(IsValid());
return m_Identifiers[k_IndexHistory + (frameIndex & 1)];
}
public RenderTargetIdentifier GetLightingFeedbackBuffer(uint frameIndex) // For the next frame
{
Debug.Assert(IsValid());
return m_Identifiers[k_IndexFeedback - (frameIndex & 1)];
}
public void Create(long viewID, int w, int h, int d, ControllerParameters controlParams)
{
Debug.Assert(viewID > 0);
Debug.Assert(w > 0 && h > 0 && d > 0);
// Clean up first.
Destroy();
m_ViewID = viewID;
m_Textures = new RenderTexture[k_NumBuffers];
m_Identifiers = new RenderTargetIdentifier[k_NumBuffers];
m_Params = new Parameters[k_NumFrames];
for (int i = 0; i < k_NumBuffers; i++)
{
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i].hideFlags = HideFlags.HideAndDontSave;
m_Textures[i].filterMode = FilterMode.Trilinear; // Custom
m_Textures[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout
m_Textures[i].volumeDepth = d;
m_Textures[i].enableRandomWrite = true;
m_Textures[i].name = CoreUtils.GetRenderTargetAutoName(w, h, d, RenderTextureFormat.ARGBHalf, String.Format("VBuffer{0}", i));
m_Textures[i].Create();
// TODO: clear the texture. Clearing 3D textures does not appear to work right now.
m_Identifiers[i] = new RenderTargetIdentifier(m_Textures[i]);
}
// Start with the same parameters for both frames. Then incrementally update them.
Parameters parameters = new Parameters(w, h, d, controlParams);
m_Params[0] = parameters;
m_Params[1] = parameters;
}
public void Destroy()
{
if (m_Textures != null)
{
for (int i = 0; i < k_NumBuffers; i++)
{
if (m_Textures[i] != null)
{
m_Textures[i].Release();
}
}
}
m_ViewID = -1;
m_Textures = null;
m_Identifiers = null;
m_Params = null;
}
} // class VBuffer
} // struct Parameters
public VolumetricLightingPreset preset { get { return (VolumetricLightingPreset)Math.Min(ShaderConfig.s_VolumetricLightingPreset, (int)VolumetricLightingPreset.Count); } }

List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;

m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
CreateBuffers();
public void CreateBuffers()
// RTHandleSystem API expects a function which computes the resolution. We define it here.
Vector2Int ComputeVBufferSizeXY(Vector2Int screenSize)
if (preset == VolumetricLightingPreset.Off) return;
int t = ComputeVBufferTileSize(preset);
// Ceil(ScreenSize / TileSize).
int w = (screenSize.x + (t - 1)) / t;
int h = (screenSize.y + (t - 1)) / t;
return new Vector2Int(w, h);
}
// BufferedRTHandleSystem API expects an allocator function. We define it here.
RTHandleSystem.RTHandle HistoryBufferAllocatorFunction(string viewName, int frameIndex, RTHandleSystem rtHandleSystem)
{
frameIndex &= 1;
int d = ComputeVBufferSliceCount(preset);
return rtHandleSystem.Alloc(scaleFunc: ComputeVBufferSizeXY,
slices: d,
dimension: TextureDimension.Tex3D,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
enableRandomWrite: true,
enableMSAA: false,
/* useDynamicScale: true, // <- TODO */
name: string.Format("{0}_VBufferHistory{1}", viewName, frameIndex)
);
}
void CreateBuffers()
{
m_VBuffers = new List<VBuffer>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumeDataBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeData)));
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumeDataBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, Marshal.SizeOf(typeof(DensityVolumeData)));
int tileSize = ComputeVBufferTileSize(preset);
int depth = ComputeVBufferSliceCount(preset);
int d = ComputeVBufferSliceCount(preset);
m_DensityBufferHandle = RTHandles.Alloc(scaleFunc: size => new Vector2Int((size.x + (tileSize - 1)) / tileSize, (size.y + (tileSize - 1)) / tileSize),
slices: depth,
m_DensityBufferHandle = RTHandles.Alloc(scaleFunc: ComputeVBufferSizeXY,
slices: d,
dimension: TextureDimension.Tex3D,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,

name: "VBufferDensity");
m_LightingBufferHandle = RTHandles.Alloc(scaleFunc: size => new Vector2Int((size.x + (tileSize - 1)) / tileSize, (size.y + (tileSize - 1)) / tileSize),
slices: depth,
m_LightingBufferHandle = RTHandles.Alloc(scaleFunc: ComputeVBufferSizeXY,
slices: d,
dimension: TextureDimension.Tex3D,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,

name: "VBufferIntegral");
}
public void DestroyBuffers()
VBufferParameters ComputeVBufferParameters(HDCamera camera)
for (int i = 0, n = m_VBuffers.Count; i < n; i++)
ControllerParameters controlParams;
var controller = camera.camera.GetComponent<VolumetricLightingController>();
if (controller != null)
{
controlParams = controller.parameters;
}
else
m_VBuffers[i].Destroy();
controlParams = ControllerParameters.GetDefaults();
RTHandles.Release(m_DensityBufferHandle);
RTHandles.Release(m_LightingBufferHandle);
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(preset, camera.camera.pixelWidth, camera.camera.pixelHeight, ref w, ref h, ref d);
CoreUtils.SafeRelease(s_VisibleVolumeBoundsBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
m_VBuffers = null;
m_VisibleVolumeBounds = null;
m_VisibleVolumeData = null;
// Start with the same parameters for both frames. Then update them one by one every frame.
return new VBufferParameters(w, h, d, controlParams);
public void Cleanup()
public void InitializePerCameraData(HDCamera camera)
DestroyBuffers();
// Start with the same parameters for both frames. Then update them one by one every frame.
var parameters = ComputeVBufferParameters(camera);
camera.vBufferParams = new VBufferParameters[2];
camera.vBufferParams[0] = parameters;
camera.vBufferParams[1] = parameters;
m_VolumeVoxelizationCS = null;
m_VolumetricLightingCS = null;
if (camera.camera.cameraType == CameraType.Game ||
camera.camera.cameraType == CameraType.SceneView)
{
// We don't need reprojection for other view types, such as reflection and preview.
camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting, HistoryBufferAllocatorFunction);
}
public void ResizeVBufferAndUpdateProperties(HDCamera camera, uint frameIndex)
// This function relies on being called once per camera per frame.
// The results are undefined otherwise.
public void UpdatePerCameraData(HDCamera camera)
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return;
var controller = camera.camera.GetComponent<VolumetricLightingController>();
if (camera.camera.cameraType == CameraType.SceneView)
{
// HACK: since it's not possible to add a component to a scene camera,
// we take one from the "main" camera (if present).
Camera mainCamera = Camera.main;
if (mainCamera != null)
{
controller = mainCamera.GetComponent<VolumetricLightingController>();
}
}
if (controller == null) return;
int screenWidth = (int)camera.screenSize.x;
int screenHeight = (int)camera.screenSize.y;
long viewID = camera.GetViewID();
Debug.Assert(viewID > 0);
var parameters = ComputeVBufferParameters(camera);
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(preset, screenWidth, screenHeight, ref w, ref h, ref d);
// Double-buffer. I assume the cost of copying is negligible (don't want to use the frame index).
camera.vBufferParams[1] = camera.vBufferParams[0];
camera.vBufferParams[0] = parameters;
VBuffer vBuffer = FindVBuffer(viewID);
// Note: resizing of history buffer is automatic (handled by the BufferedRTHandleSystem).
}
if (vBuffer != null)
{
VBuffer.Parameters frameParams = vBuffer.GetParameters(frameIndex);
// Found, check resolution.
if (w == frameParams.resolution.x &&
h == frameParams.resolution.y &&
d == frameParams.sliceCount.x)
{
// The resolution matches.
// Depth parameters may have changed, so update those.
frameParams.Update(controller.parameters);
vBuffer.SetParameters(frameParams, frameIndex);
void DestroyBuffers()
{
RTHandles.Release(m_DensityBufferHandle);
RTHandles.Release(m_LightingBufferHandle);
return;
}
}
else
{
// Not found - grow the array.
vBuffer = new VBuffer();
m_VBuffers.Add(vBuffer);
}
CoreUtils.SafeRelease(s_VisibleVolumeBoundsBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
vBuffer.Create(viewID, w, h, d, controller.parameters);
m_VisibleVolumeBounds = null;
m_VisibleVolumeData = null;
VBuffer FindVBuffer(long viewID)
public void Cleanup()
Debug.Assert(viewID > 0);
VBuffer vBuffer = null;
if (m_VBuffers != null)
{
int n = m_VBuffers.Count;
if (preset == VolumetricLightingPreset.Off) return;
for (int i = 0; i < n; i++)
{
// Check whether domain reload killed it...
if (viewID == m_VBuffers[i].GetViewID() && m_VBuffers[i].IsValid())
{
vBuffer = m_VBuffers[i];
}
}
}
DestroyBuffers();
return vBuffer;
m_VolumeVoxelizationCS = null;
m_VolumetricLightingCS = null;
}
static int ComputeVBufferTileSize(VolumetricLightingPreset preset)

int t = ComputeVBufferTileSize(preset);
// Ceil(ScreenSize / TileSize).
w = (screenWidth + t - 1) / t;
h = (screenHeight + t - 1) / t;
w = (screenWidth + (t - 1)) / t;
h = (screenHeight + (t - 1)) / t;
d = ComputeVBufferSliceCount(preset);
return new Vector2((float)screenWidth / (float)(w * t), (float)screenHeight / (float)(h * t));

if (preset == VolumetricLightingPreset.Off) return;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null)
if (visualEnvironment.fogType != FogType.Volumetric)
{
// Set the neutral black texture.
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, CoreUtils.blackVolumeTexture);

SetPreconvolvedAmbientLightProbe(cmd, fog.anisotropy);
var currFrameParams = vBuffer.GetParameters(frameIndex);
var prevFrameParams = vBuffer.GetParameters(frameIndex - 1);
var currFrameParams = camera.vBufferParams[0];
var prevFrameParams = camera.vBufferParams[1];
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, currFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, currFrameParams.sliceCount);

var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{

var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
using (new ProfilingSample(cmd, "Volume Voxelization"))
{

int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered"
: "VolumeVoxelizationBruteforce");
var frameParams = vBuffer.GetParameters(frameIndex);
var frameParams = camera.vBufferParams[0];
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;

var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.

: "VolumetricLightingBruteforce");
}
var frameParams = vBuffer.GetParameters(frameIndex);
var frameParams = camera.vBufferParams[0];
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
// Compose the matrix which allows us to compute the world space view direction.

cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write
if (enableReprojection)
{
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer(frameIndex)); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer(frameIndex)); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Write
}
int w = (int)resolution.x;

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs


public VolumetricLightingController()
{
parameters.vBufferNearPlane = 0.5f;
parameters.vBufferFarPlane = 64.0f;
parameters.depthSliceDistributionUniformity = 0.75f;
parameters = VolumetricLightingSystem.ControllerParameters.GetDefaults();
}
private void Awake()

正在加载...
取消
保存