浏览代码

Cleaned up old pyramid code

/main
Thomas 6 年前
当前提交
a9219e03
共有 5 个文件被更改,包括 4 次插入450 次删除
  1. 20
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs
  2. 11
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs.meta
  3. 176
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs
  4. 11
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta
  5. 236
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs

20
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


Material m_CopyStencilForNoLighting;
Material m_CopyDepth;
GPUCopy m_GPUCopy;
BufferPyramid m_BufferPyramid;
MipGenerator m_MipGenerator;
IBLFilterGGX m_IBLFilterGGX = null;

m_MipGenerator = new MipGenerator(m_Asset);
var bufferPyramidProcessor = new BufferPyramidProcessor(
asset.renderPipelineResources.colorPyramidCS,
asset.renderPipelineResources.depthPyramidCS,
m_GPUCopy,
new TexturePadding(asset.renderPipelineResources.texturePaddingCS)
);
m_BufferPyramid = new BufferPyramid(bufferPyramidProcessor);
EncodeBC6H.DefaultInstance = EncodeBC6H.DefaultInstance ?? new EncodeBC6H(asset.renderPipelineResources.encodeBC6HCS);
m_ReflectionProbeCullResults = new ReflectionProbeCullResults(asset.reflectionSystemParameters);

{
m_GbufferManager.DestroyBuffers();
m_DbufferManager.DestroyBuffers();
m_BufferPyramid.DestroyBuffers();
m_MipGenerator.Release();
RTHandles.Release(m_CameraColorBuffer);
RTHandles.Release(m_CameraColorBufferMipChain);

RenderColorPyramid(hdCamera, cmd, false);
AccumulateDistortion(m_CullResults, hdCamera, renderContext, cmd);
RenderDistortion(hdCamera, cmd, m_Asset.renderPipelineResources);
RenderDistortion(hdCamera, cmd);
StopStereoRendering(renderContext, hdCamera);

}
}
void RenderDistortion(HDCamera hdCamera, CommandBuffer cmd, RenderPipelineResources resources)
void RenderDistortion(HDCamera hdCamera, CommandBuffer cmd)
{
if (!hdCamera.frameSettings.enableDistortion)
return;

var pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, m_CameraColorBuffer);
// Need to account for the fact that the gaussian pyramid is actually rendered inside the camera viewport in a square texture so we mutiply by the PyramidToScreen scale
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, pyramidScale.x / hdCamera.screenSize.x, pyramidScale.y / hdCamera.screenSize.y);
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y);
uint x, y, z;
m_applyDistortionCS.GetKernelThreadGroupSizes(m_applyDistortionKernel, out x, out y, out z);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._DistortionTexture, m_DistortionBuffer);

11
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs.meta


fileFormatVersion: 2
guid: ea3e7945ee7dc7a479b9e6846a0c544c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

176
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs


using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
class BufferPyramid
{
List<RTHandleSystem.RTHandle> m_ColorPyramidMips = new List<RTHandleSystem.RTHandle>();
List<RTHandleSystem.RTHandle> m_DepthPyramidMips = new List<RTHandleSystem.RTHandle>();
BufferPyramidProcessor m_Processor;
public BufferPyramid(BufferPyramidProcessor processor)
{
m_Processor = processor;
}
float GetXRscale()
{
// for stereo double-wide, each half of the texture will represent a single eye's pyramid
float scale = 1.0f;
//if (m_Asset.renderPipelineSettings.supportsStereo && (desc.dimension != TextureDimension.Tex2DArray))
// scale = 2.0f; // double-wide
return scale;
}
public void DestroyBuffers()
{
foreach (var rth in m_ColorPyramidMips)
RTHandles.Release(rth);
foreach (var rth in m_DepthPyramidMips)
RTHandles.Release(rth);
}
public int GetPyramidLodCount(Vector2Int size)
{
var minSize = Mathf.Min(size.x, size.y);
return Mathf.Max(0, Mathf.FloorToInt(Mathf.Log(minSize, 2f)));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)
{
return new Vector2Int(baseMipSize.x >> mipIndex, baseMipSize.y >> mipIndex);
}
Vector2Int CalculatePyramidSize(Vector2Int size)
{
// Instead of using the screen size, we round up to the next power of 2 because currently some platforms don't support NPOT Render Texture with mip maps (PS4 for example)
// Then we render in a Screen Sized viewport.
// Note that even if PS4 supported POT Mips, the buffers would be padded to the next power of 2 anyway (TODO: check with other platforms...)
int pyramidSize = (int)Mathf.NextPowerOfTwo(Mathf.Max(size.x, size.y));
return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandleSystem.RTHandle> mipList, int lodCount)
{
int currentLodCount = mipList.Count;
if (lodCount > currentLodCount)
{
for (int i = currentLodCount; i < lodCount; ++i)
{
int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
var newMip = RTHandles.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
mipList.Add(newMip);
}
}
}
public Vector2 GetPyramidToScreenScale(HDCamera camera, RTHandleSystem.RTHandle rth)
{
return new Vector2((float)camera.actualWidth / rth.rt.width, (float)camera.actualHeight / rth.rt.height);
}
public void RenderDepthPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
ScriptableRenderContext renderContext,
RTHandleSystem.RTHandle sourceDepthTexture,
RTHandleSystem.RTHandle targetDepthTexture)
{
int lodCount = Mathf.Min(
GetPyramidLodCount(targetDepthTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping DepthPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetDepthTexture.rt.format, m_DepthPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetDepthTexture);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
m_Processor.RenderDepthPyramid(
hdCamera.actualWidth, hdCamera.actualHeight,
cmd,
sourceDepthTexture,
targetDepthTexture,
m_DepthPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, targetDepthTexture);
}
public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
ScriptableRenderContext renderContext,
RTHandleSystem.RTHandle sourceColorTexture,
RTHandleSystem.RTHandle targetColorTexture)
{
int lodCount = Mathf.Min(
GetPyramidLodCount(targetColorTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping ColorPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetColorTexture.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetColorTexture);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
m_Processor.RenderColorPyramid(
hdCamera,
cmd,
sourceColorTexture,
targetColorTexture,
m_ColorPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, targetColorTexture);
}
public RTHandleSystem.RTHandle AllocColorRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true,
name: string.Format("ColorPyramid-{0}-{1}", id, frameIndex)
);
}
public RTHandleSystem.RTHandle AllocDepthRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.RGFloat,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true, // Need randomReadWrite because we downsample the first mip with a compute shader.
name: string.Format("DepthPyramid-{0}-{1}", id, frameIndex)
);
}
}
}

11
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta


fileFormatVersion: 2
guid: 5555ed542465a3a42aa9f58eccc62c68
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

236
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


using System.Collections.Generic;
using UnityEngine.Assertions;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class BufferPyramidProcessor
{
static readonly int _Size = Shader.PropertyToID("_Size");
static readonly int _Source = Shader.PropertyToID("_Source");
static readonly int _Result = Shader.PropertyToID("_Result");
static readonly int _SrcSize = Shader.PropertyToID("_SrcSize");
const int k_DepthBlockSize = 4;
GPUCopy m_GPUCopy;
TexturePadding m_TexturePadding;
ComputeShader m_ColorPyramidCS;
int m_ColorPyramidKernel;
ComputeShader m_DepthPyramidCS;
int[] m_DepthKernels = null;
int depthKernel8 { get { return m_DepthKernels[0]; } }
int depthKernel1 { get { return m_DepthKernels[1]; } }
List<RenderTexture> m_RenderColorPyramid_CastTmp = new List<RenderTexture>();
public BufferPyramidProcessor(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS,
GPUCopy gpuCopy,
TexturePadding texturePadding
)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KColorGaussian");
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthKernels = new int[]
{
//m_DepthPyramidCS.FindKernel("KDepthDownSample8"),
//m_DepthPyramidCS.FindKernel("KDepthDownSample1")
};
m_TexturePadding = texturePadding;
}
public void RenderDepthPyramid(
int width, int height,
CommandBuffer cmd,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)
{
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, sourceTexture, targetTexture, new RectInt(0, 0, width, height));
var src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
var dest = mips[i];
var srcMip = new RectInt(0, 0, width >> i, height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var kernel = depthKernel1;
var kernelSize = 1;
var srcWorkMip = srcMip;
var dstWorkMip = dstMip;
if (dstWorkMip.width >= 8 && dstWorkMip.height >= 8)
{
srcWorkMip.width = Mathf.CeilToInt(srcWorkMip.width / 16.0f) * 16;
srcWorkMip.height = Mathf.CeilToInt(srcWorkMip.height / 16.0f) * 16;
dstWorkMip.width = srcWorkMip.width >> 1;
dstWorkMip.height = srcWorkMip.height >> 1;
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
kernel = depthKernel8;
kernelSize = 8;
}
else
{
m_TexturePadding.Pad(cmd, src, srcMip, new RectInt(0, 0, src.rt.width, src.rt.height));
}
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
// The compute shader work in texture space
// So we must provide the texture's size
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
src.rt.width, src.rt.height,
(1.0f / src.rt.width), (1.0f / src.rt.height))
);
cmd.DispatchCompute(
m_DepthPyramidCS,
kernel,
Mathf.CeilToInt(dstWorkMip.width / (float)kernelSize),
Mathf.CeilToInt(dstWorkMip.height / (float)kernelSize),
1
);
var dstMipWidthToCopy = Mathf.Min(Mathf.Min(targetTexture.rt.width >> (i + 1), dstWorkMip.width), mips[i].rt.width);
var dstMipHeightToCopy = Mathf.Min(Mathf.Min(targetTexture.rt.height >> (i + 1), dstWorkMip.height), mips[i].rt.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);
src = dest;
}
}
public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)
{
// Copy mip 0
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, sourceTexture, targetTexture, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
m_RenderColorPyramid_CastTmp.Clear();
for (var i = 0; i < mips.Count; ++i)
m_RenderColorPyramid_CastTmp.Add(mips[i]);
RenderColorPyramidMips(
new RectInt(0, 0, hdCamera.actualWidth, hdCamera.actualHeight),
cmd,
targetTexture,
m_RenderColorPyramid_CastTmp,
lodCount,
scale
);
}
public void RenderColorPyramid(
RectInt srcRect,
CommandBuffer cmd,
Texture sourceTexture,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var scale = new Vector2(
sourceTexture.width / (float)srcRect.width,
sourceTexture.height / (float)srcRect.height
);
cmd.Blit(sourceTexture, targetTexture, scale, Vector2.zero);
RenderColorPyramidMips(
srcRect,
cmd,
targetTexture,
mips,
lodCount,
scale
);
}
void RenderColorPyramidMips(
RectInt srcRect,
CommandBuffer cmd,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount,
Vector2 scale
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
var srcWorkMip = new RectInt(
0,
0,
Mathf.CeilToInt(srcMip.width / 16.0f) * 16,
Mathf.CeilToInt(srcMip.height / 16.0f) * 16
);
var dstWorkMip = new RectInt(0, 0, srcWorkMip.width >> 1, srcWorkMip.height >> 1);
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
// TODO: Add proper stereo support to the compute job
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(
m_ColorPyramidCS,
_Size,
new Vector4(src.width >> 1, src.height >> 1, 1f / (src.width >> 1), 1f / (src.height >> 1))
);
cmd.DispatchCompute(
m_ColorPyramidCS,
m_ColorPyramidKernel,
dstWorkMip.width / 8,
dstWorkMip.height / 8,
1
);
var dstMipWidthToCopy = Mathf.Min(Mathf.Min(targetTexture.width >> (i + 1), dstWorkMip.width), mips[i].width);
var dstMipHeightToCopy = Mathf.Min(Mathf.Min(targetTexture.height >> (i + 1), dstWorkMip.height), mips[i].height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(
mips[i],
0, 0, 0, 0,
dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0
);
src = dest;
}
}
}
}
正在加载...
取消
保存