浏览代码

Merge pull request #1141 from Unity-Technologies/feature/PlanarReflectionUI

Planar probe convolution
/main
GitHub 6 年前
当前提交
38f1b362
共有 10 个文件被更改,包括 377 次插入147 次删除
  1. 5
      CHANGELOG.md
  2. 2
      ScriptableRenderPipeline/Core/CoreRP/CoreResources/TexturePadding.cs
  3. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs
  4. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs
  5. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  6. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
  7. 79
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs
  8. 155
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  9. 235
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs
  10. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta

5
CHANGELOG.md


and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
## [0.1.6] - 2018-xx-yy

2
ScriptableRenderPipeline/Core/CoreRP/CoreResources/TexturePadding.cs


m_KMainTop = m_CS.FindKernel("KMainTop");
m_KMainRight = m_CS.FindKernel("KMainRight");
}
public void Pad(CommandBuffer cmd, RTHandle source, RectInt from, RectInt to)
public void Pad(CommandBuffer cmd, RenderTexture source, RectInt from, RectInt to)
{
if (from.width < to.width)
{

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs


static void Drawer_SectionCaptureMirror(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalPosition, _.GetContent("Plane Position"));
EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalNormal, _.GetContent("Plane Normal"));
// EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalPosition, _.GetContent("Plane Position"));
// EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalNormal, _.GetContent("Plane Normal"));
}
static void Drawer_SectionCaptureSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

static void Drawer_SectionProbeModeRealtimeSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
GUI.enabled = false;
d.refreshMode.enumValueIndex = (int)ReflectionProbeRefreshMode.EveryFrame;
d.capturePositionMode.enumValueIndex = (int)PlanarReflectionProbe.CapturePositionMode.MirrorCamera;
GUI.enabled = true;
}
static void Drawer_SectionInfluenceSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

static void Drawer_FieldCaptureType(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
GUI.enabled = false;
d.mode.enumValueIndex = (int)ReflectionProbeMode.Realtime;
GUI.enabled = true;
}
static void Drawer_FieldProxyVolumeReference(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs


}
}
if (d.useMirrorPlane)
{
var m = Handles.matrix;
Handles.matrix = mat;
Handles.color = k_GizmoMirrorPlaneCamera;
Handles.ArrowHandleCap(
0,
d.captureMirrorPlaneLocalPosition,
Quaternion.LookRotation(d.captureMirrorPlaneLocalNormal),
HandleUtility.GetHandleSize(d.captureMirrorPlaneLocalPosition),
Event.current.type
);
Handles.matrix = m;
}
if (d.proxyVolumeReference != null)
ReflectionProxyVolumeComponentUI.DrawHandles_EditNone(s.reflectionProxyVolume, d.proxyVolumeReference);
}

var showFrustrum = s.showCaptureHandles
|| EditMode.editMode == EditCenter;
var showCaptureMirror = (s.showCaptureHandles && d.useMirrorPlane)
var showCaptureMirror = d.useMirrorPlane
|| EditMode.editMode == EditMirrorPosition
|| EditMode.editMode == EditMirrorRotation;

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


m_Asset = asset;
m_GPUCopy = new GPUCopy(asset.renderPipelineResources.copyChannelCS);
m_BufferPyramid = new BufferPyramid(
var bufferPyramidProcessor = new BufferPyramidProcessor(
);
);
m_BufferPyramid = new BufferPyramid(bufferPyramidProcessor);
EncodeBC6H.DefaultInstance = EncodeBC6H.DefaultInstance ?? new EncodeBC6H(asset.renderPipelineResources.encodeBC6HCS);

m_MaterialList.ForEach(material => material.Build(asset));
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources);
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources, bufferPyramidProcessor);
m_LightLoop.Build(asset, m_ShadowSettings, m_IBLFilterGGX);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


//_Env2DCaptureVP is in capture space
float3 ndc = ComputeNormalizedDeviceCoordinatesWithZ(texCoord, _Env2DCaptureVP[index]);
color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, 0).rgb;
color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, lod).rgb;
color.a = any(ndc.xyz < 0) || any(ndc.xyz > 1) ? 0.0 : 1.0;
#ifdef DEBUG_DISPLAY

79
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs


using UnityEngine.Rendering;
using System;
using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

RenderPipelineResources m_RenderPipelineResources;
BufferPyramidProcessor m_BufferPyramidProcessor;
List<RenderTexture> m_PlanarColorMips = new List<RenderTexture>();
public IBLFilterGGX(RenderPipelineResources renderPipelineResources)
public IBLFilterGGX(RenderPipelineResources renderPipelineResources, BufferPyramidProcessor processor)
m_BufferPyramidProcessor = processor;
}
public bool IsInitialized()

{
CoreUtils.Destroy(m_GgxConvolveMaterial);
CoreUtils.Destroy(m_GgxIblSampleData);
for (var i = 0; i < m_PlanarColorMips.Count; ++i)
m_PlanarColorMips[i].Release();
m_PlanarColorMips.Clear();
}
void FilterCubemapCommon( CommandBuffer cmd,

public void FilterPlanarTexture(CommandBuffer cmd, Texture source, RenderTexture target)
{
// TODO: planar convolution
cmd.CopyTexture(source, 0, 0, target, 0, 0);
var lodCount = Mathf.Max(Mathf.FloorToInt(Mathf.Log(Mathf.Min(source.width, source.height), 2f)), 0);
for (var i = 0 ; i < lodCount - 0; ++i)
{
var width = target.width >> (i + 1);
var height = target.height >> (i + 1);
var rtHash = HashRenderTextureProperties(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
var lodIsMissing = i >= m_PlanarColorMips.Count;
RenderTexture rt = null;
var createRT = lodIsMissing
|| (rt = m_PlanarColorMips[i]) == null
|| rtHash != HashRenderTextureProperties(
rt.width, rt.height, rt.depth, rt.format, rt.sRGB
? RenderTextureReadWrite.sRGB
: RenderTextureReadWrite.Linear
);
if (createRT && rt)
rt.Release();
if (createRT)
{
rt = new RenderTexture(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
rt.enableRandomWrite = true;
rt.name = "Planar Convolution Tmp RT";
rt.hideFlags = HideFlags.HideAndDontSave;
rt.Create();
}
if (lodIsMissing)
m_PlanarColorMips.Add(rt);
else if (createRT)
m_PlanarColorMips[i] = rt;
}
m_BufferPyramidProcessor.RenderColorPyramid(
new RectInt(0, 0, source.width, source.height),
cmd,
source,
target,
m_PlanarColorMips,
lodCount
);
}
// Filters MIP map levels (other than 0) with GGX using multiple importance sampling.

m_GgxConvolveMaterial.SetTexture("_MarginalRowDensities", marginalRowCdf);
FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
}
int HashRenderTextureProperties(
int width,
int height,
int depth,
RenderTextureFormat format,
RenderTextureReadWrite sRGB)
{
return width.GetHashCode()
^ height.GetHashCode()
^ depth.GetHashCode()
^ format.GetHashCode()
^ sRGB.GetHashCode();
}
}
}

155
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


{
class BufferPyramid
{
static readonly int _Size = Shader.PropertyToID("_Size");
static readonly int _Source = Shader.PropertyToID("_Source");
static readonly int _Result = Shader.PropertyToID("_Result");
static readonly int _SrcSize = Shader.PropertyToID("_SrcSize");
const int k_DepthBlockSize = 4;
GPUCopy m_GPUCopy;
TexturePadding m_TexturePadding;
ComputeShader m_ColorPyramidCS;
int m_ColorPyramidKernel;
ComputeShader m_DepthPyramidCS;
int[] m_DepthKernels = null;
int depthKernel8 { get { return m_DepthKernels[0]; } }
int depthKernel1 { get { return m_DepthKernels[1]; } }
public BufferPyramid(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS,
GPUCopy gpuCopy,
TexturePadding texturePadding
)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
BufferPyramidProcessor m_Processor;
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthKernels = new int[]
public BufferPyramid(BufferPyramidProcessor processor)
m_DepthPyramidCS.FindKernel("KDepthDownSample8"),
m_DepthPyramidCS.FindKernel("KDepthDownSample1")
};
m_TexturePadding = texturePadding;
m_Processor = processor;
}
float GetXRscale()

cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, depthTexture, m_DepthPyramidBuffer, new RectInt(0, 0, hdCamera.actualWidth, hdCamera.actualHeight));
RTHandle src = m_DepthPyramidBuffer;
for (var i = 0; i < lodCount; i++)
{
RTHandle dest = m_DepthPyramidMips[i];
var srcMip = new RectInt(0, 0, hdCamera.actualWidth >> i, hdCamera.actualHeight >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var kernel = depthKernel1;
var kernelSize = 1;
var srcWorkMip = srcMip;
var dstWorkMip = dstMip;
if (dstWorkMip.width >= 8 && dstWorkMip.height >= 8)
{
srcWorkMip.width = Mathf.CeilToInt(srcWorkMip.width / 16.0f) * 16;
srcWorkMip.height = Mathf.CeilToInt(srcWorkMip.height / 16.0f) * 16;
dstWorkMip.width = srcWorkMip.width >> 1;
dstWorkMip.height = srcWorkMip.height >> 1;
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
kernel = depthKernel8;
kernelSize = 8;
}
else
{
m_TexturePadding.Pad(cmd, src, srcMip, new RectInt(0, 0, src.rt.width, src.rt.height));
}
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
srcWorkMip.width, srcWorkMip.height,
(1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
);
cmd.DispatchCompute(
m_DepthPyramidCS,
kernel,
Mathf.CeilToInt(dstWorkMip.width / (float)kernelSize),
Mathf.CeilToInt(dstWorkMip.height / (float)kernelSize),
1
m_Processor.RenderDepthPyramid(
hdCamera.actualWidth, hdCamera.actualHeight,
cmd,
depthTexture,
m_DepthPyramidBuffer,
m_DepthPyramidMips,
lodCount,
scale
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_DepthPyramidMips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, m_DepthPyramidBuffer, 0, i + 1, 0, 0);
src = dest;
}
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, m_DepthPyramidBuffer);
}

cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
// Copy mip 0
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, colorTexture, m_ColorPyramidBuffer, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
RTHandle src = m_ColorPyramidBuffer;
for (var i = 0; i < lodCount; i++)
{
RTHandle dest = m_ColorPyramidMips[i];
var srcMip = new RectInt(0, 0, hdCamera.actualWidth >> i, hdCamera.actualHeight >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var srcWorkMip = new RectInt(
0,
0,
Mathf.CeilToInt(srcMip.width / 16.0f) * 16,
Mathf.CeilToInt(srcMip.height / 16.0f) * 16
m_Processor.RenderColorPyramid(
hdCamera,
cmd,
colorTexture,
m_ColorPyramidBuffer,
m_ColorPyramidMips,
lodCount,
scale
var dstWorkMip = new RectInt(0, 0, srcWorkMip.width >> 1, srcWorkMip.height >> 1);
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
// TODO: Add proper stereo support to the compute job
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(
m_ColorPyramidCS,
_Size,
new Vector4(dest.rt.width, dest.rt.height, 1f / dest.rt.width, 1f / dest.rt.height)
);
cmd.DispatchCompute(
m_ColorPyramidCS,
m_ColorPyramidKernel,
dstWorkMip.width / 8,
dstWorkMip.height / 8,
1
);
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(
m_ColorPyramidMips[i],
0, 0, 0, 0,
dstMipWidthToCopy, dstMipHeightToCopy, m_ColorPyramidBuffer, 0, i + 1, 0, 0
);
src = dest;
}
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, m_ColorPyramidBuffer);
}

235
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


using System.Collections.Generic;
using UnityEngine.Assertions;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class BufferPyramidProcessor
{
static readonly int _Size = Shader.PropertyToID("_Size");
static readonly int _Source = Shader.PropertyToID("_Source");
static readonly int _Result = Shader.PropertyToID("_Result");
static readonly int _SrcSize = Shader.PropertyToID("_SrcSize");
const int k_DepthBlockSize = 4;
GPUCopy m_GPUCopy;
TexturePadding m_TexturePadding;
ComputeShader m_ColorPyramidCS;
int m_ColorPyramidKernel;
ComputeShader m_DepthPyramidCS;
int[] m_DepthKernels = null;
int depthKernel8 { get { return m_DepthKernels[0]; } }
int depthKernel1 { get { return m_DepthKernels[1]; } }
List<RenderTexture> m_RenderColorPyramid_CastTmp = new List<RenderTexture>();
public BufferPyramidProcessor(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS,
GPUCopy gpuCopy,
TexturePadding texturePadding
)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthKernels = new int[]
{
m_DepthPyramidCS.FindKernel("KDepthDownSample8"),
m_DepthPyramidCS.FindKernel("KDepthDownSample1")
};
m_TexturePadding = texturePadding;
}
public void RenderDepthPyramid(
int width, int height,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
int lodCount,
Vector2 scale
)
{
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, sourceTexture, targetTexture, new RectInt(0, 0, width, height));
RTHandle src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
RTHandle dest = mips[i];
var srcMip = new RectInt(0, 0, width >> i, height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var kernel = depthKernel1;
var kernelSize = 1;
var srcWorkMip = srcMip;
var dstWorkMip = dstMip;
if (dstWorkMip.width >= 8 && dstWorkMip.height >= 8)
{
srcWorkMip.width = Mathf.CeilToInt(srcWorkMip.width / 16.0f) * 16;
srcWorkMip.height = Mathf.CeilToInt(srcWorkMip.height / 16.0f) * 16;
dstWorkMip.width = srcWorkMip.width >> 1;
dstWorkMip.height = srcWorkMip.height >> 1;
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
kernel = depthKernel8;
kernelSize = 8;
}
else
{
m_TexturePadding.Pad(cmd, src, srcMip, new RectInt(0, 0, src.rt.width, src.rt.height));
}
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
srcWorkMip.width, srcWorkMip.height,
(1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
);
cmd.DispatchCompute(
m_DepthPyramidCS,
kernel,
Mathf.CeilToInt(dstWorkMip.width / (float)kernelSize),
Mathf.CeilToInt(dstWorkMip.height / (float)kernelSize),
1
);
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);
src = dest;
}
}
public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
int lodCount,
Vector2 scale
)
{
// Copy mip 0
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, sourceTexture, targetTexture, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
m_RenderColorPyramid_CastTmp.Clear();
for (var i = 0 ; i < mips.Count; ++i)
m_RenderColorPyramid_CastTmp.Add(mips[i]);
RenderColorPyramidMips(
new RectInt(0, 0, hdCamera.actualWidth, hdCamera.actualHeight),
cmd,
targetTexture,
m_RenderColorPyramid_CastTmp,
lodCount,
scale
);
}
public void RenderColorPyramid(
RectInt srcRect,
CommandBuffer cmd,
Texture sourceTexture,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var scale = new Vector2(
sourceTexture.width / (float)srcRect.width,
sourceTexture.height / (float)srcRect.height
);
cmd.Blit(sourceTexture, targetTexture, scale, Vector2.zero);
RenderColorPyramidMips(
srcRect,
cmd,
targetTexture,
mips,
lodCount,
scale
);
}
void RenderColorPyramidMips(
RectInt srcRect,
CommandBuffer cmd,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount,
Vector2 scale
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var srcWorkMip = new RectInt(
0,
0,
Mathf.CeilToInt(srcMip.width / 16.0f) * 16,
Mathf.CeilToInt(srcMip.height / 16.0f) * 16
);
var dstWorkMip = new RectInt(0, 0, srcWorkMip.width >> 1, srcWorkMip.height >> 1);
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
// TODO: Add proper stereo support to the compute job
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(
m_ColorPyramidCS,
_Size,
new Vector4(dest.width, dest.height, 1f / dest.width, 1f / dest.height)
);
cmd.DispatchCompute(
m_ColorPyramidCS,
m_ColorPyramidKernel,
dstWorkMip.width / 8,
dstWorkMip.height / 8,
1
);
var dstMipWidthToCopy = Mathf.Min(dest.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.height, dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(
mips[i],
0, 0, 0, 0,
dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0
);
src = dest;
}
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta


fileFormatVersion: 2
guid: 5555ed542465a3a42aa9f58eccc62c68
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存