浏览代码

Gaussian/Depth pyramids now allocate buffers with next power of two size (to account for platforms where NPOT RenderTexture Mips are not supported yet)

/main
Julien Ignace 7 年前
当前提交
c81b04cf
共有 4 个文件被更改,包括 93 次插入63 次删除
  1. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  2. 39
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  3. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  4. 98
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


using (new ProfilingSample(cmd, "ApplyDistortion", CustomSamplerId.ApplyDistortion.GetSampler()))
{
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, hdCamera.scaleBias.x / hdCamera.screenSize.x, hdCamera.scaleBias.y / hdCamera.screenSize.y);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, pyramidScale.x / hdCamera.screenSize.x, pyramidScale.y / hdCamera.screenSize.y);
uint x, y, z;
m_applyDistortionCS.GetKernelThreadGroupSizes(m_applyDistortionKernel, out x, out y, out z);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._DistortionTexture, m_DistortionBuffer);

{
RenderTargetIdentifier source = m_CameraColorBuffer;
#if UNITY_EDITOR
if(tempHACK)
#else
// In theory in the player the only place where we have post process is the main camera with the RTHandle reference size, so we won't need to copy.
bool tempHACK = false;
#endif
if (tempHACK)
{
// TEMPORARY:
// Since we don't render to the full render textures, we need to feed the post processing stack with the right scale/bias.

}
else
{
// Note: Here we don't use GetDepthTexture() to get the depth texture but m_CameraDepthStencilBuffer as the Forward transparent pass can
// write extra data to deal with DOF/MB
cmd.SetGlobalTexture(HDShaderIDs._CameraDepthTexture, m_CameraDepthStencilBuffer);
// Note: Here we don't use GetDepthTexture() to get the depth texture but m_CameraDepthStencilBuffer as the Forward transparent pass can
// write extra data to deal with DOF/MB
cmd.SetGlobalTexture(HDShaderIDs._CameraDepthTexture, m_CameraDepthStencilBuffer);
cmd.SetGlobalTexture(HDShaderIDs._CameraMotionVectorsTexture, m_VelocityBuffer);
}

39
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


}
}
public static void BlitTexture(CommandBuffer cmd, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
{
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);
s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevel);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 1 : 0, MeshTopology.Triangles, 3, 1, s_PropertyBlock);
}
// In the context of HDRP, the internal render targets used during the render loop are the same for all cameras, no matter the size of the camera.
// It means that we can end up rendering inside a partial viewport for one of these "camera space" rendering.
// In this case, we need to make sure than when we blit from one such camera texture to another, we only blit the necessary portion corresponding to the camera viewport.

// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
BlitTexture(cmd, source, destination, camera.scaleBias, mipLevel, bilinear);
}
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, camera.scaleBias);
s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevel);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 1 : 0, MeshTopology.Triangles, 3, 1, s_PropertyBlock);
// This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
BlitTexture(cmd, source, destination, scaleBias, mipLevel, bilinear);
}
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
{
SetRenderTarget(cmd, camera, destination);
cmd.SetViewport(destViewport);
BlitTexture(cmd, source, destination, camera.scaleBias, mipLevel, bilinear);
}
// This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.

//s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
//s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, camera.scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), 0, MeshTopology.Triangles, 3, 1);
}
// This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);
s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevel);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 1 : 0, MeshTopology.Triangles, 3, 1, s_PropertyBlock);
}
// These method should be used to render full screen triangles sampling auto-scaling RTs.

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


}
// Map the roughness to the correct mip map level of the color pyramid
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _ScreenToTargetScale.xy, preLightData.transparentSSMipLevel).rgb;
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _GaussianPyramidColorMipSize.xy, preLightData.transparentSSMipLevel).rgb;
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transparentTransmittance;

98
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


m_DepthPyramidKernel_1 = m_DepthPyramidCS.FindKernel("KMain_1");
}
float GetXRscale()
{
// for stereo double-wide, each half of the texture will represent a single eye's pyramid
float scale = 1.0f;
//if (m_Asset.renderPipelineSettings.supportsStereo && (desc.dimension != TextureDimension.Tex2DArray))
// scale = 2.0f; // double-wide
return scale;
}
public void CreateBuffers()
{
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false);
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
public void DestroyBuffers()
{
RTHandle.Release(m_ColorPyramidBuffer);
RTHandle.Release(m_DepthPyramidBuffer);
foreach (var rth in m_ColorPyramidMips)
{
RTHandle.Release(rth);
}
foreach (var rth in m_DepthPyramidMips)
{
RTHandle.Release(rth);
}
}
public int GetPyramidLodCount(HDCamera camera)
{
var minSize = Mathf.Min(camera.actualWidth, camera.actualHeight);

Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)
{
float scale = GetXRscale();
return new Vector2Int((int)(baseMipSize.x * scale) >> mipIndex, baseMipSize.y >> mipIndex);
return new Vector2Int(baseMipSize.x >> mipIndex, baseMipSize.y >> mipIndex);
}
Vector2Int CalculatePyramidSize(Vector2Int size)
{
// Instead of using the screen size, we round up to the next power of 2 because currently some platforms don't support NPOT Render Texture with mip maps (PS4 for example)
// Then we render in a Screen Sized viewport.
// Note that even if PS4 supported POT Mips, the buffers would be padded to the next power of 2 anyway (TODO: check with other platforms...)
int pyramidSize = (int)Mathf.NextPowerOfTwo(Mathf.Max(size.x, size.y));
return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandle> mipList, int lodCount)

for (int i = currentLodCount; i < lodCount; ++i)
{
int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
RTHandle newMip = RTHandle.Alloc(size => CalculatePyramidMipSize(size, mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear);
RTHandle newMip = RTHandle.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear);
public Vector2 GetPyramidToScreenScale(HDCamera camera)
{
return new Vector2((float)camera.actualWidth / m_DepthPyramidBuffer.rt.width, (float)camera.actualHeight / m_DepthPyramidBuffer.rt.height);
}
public void RenderDepthPyramid(
HDCamera hdCamera,
CommandBuffer cmd,

cmd.SetGlobalVector(HDShaderIDs._DepthPyramidMipSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, lodCount, 0.0f));
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, depthTexture, m_DepthPyramidBuffer, new Vector2(hdCamera.actualWidth, hdCamera.actualHeight));
Vector2 scale = GetPyramidToScreenScale(hdCamera);
RTHandle src = m_DepthPyramidBuffer;
for (var i = 0; i < lodCount; i++)

cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(srcMipWidth, srcMipHeight, hdCamera.scaleBias.x / srcMipWidth, hdCamera.scaleBias.y / srcMipHeight));
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(srcMipWidth, srcMipHeight, (1.0f / srcMipWidth) * scale.x, (1.0f / srcMipHeight) * scale.y));
cmd.DispatchCompute(
m_DepthPyramidCS,

1);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_DepthPyramidMips[i], 0, 0, m_DepthPyramidBuffer, 0, i + 1);
cmd.CopyTexture(m_DepthPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_DepthPyramidBuffer, 0, i + 1, 0, 0);
src = dest;
}

int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_ColorPyramidBuffer.rt.format, m_ColorPyramidMips, lodCount);
cmd.SetGlobalVector(HDShaderIDs._GaussianPyramidColorMipSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, lodCount, 0.0f));
Vector2 scale = GetPyramidToScreenScale(hdCamera);
cmd.SetGlobalVector(HDShaderIDs._GaussianPyramidColorMipSize, new Vector4(scale.x, scale.y, lodCount, 0.0f));
HDUtils.BlitCameraTexture(cmd, hdCamera, colorTexture, m_ColorPyramidBuffer); // true : bilinear
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, colorTexture, m_ColorPyramidBuffer, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
RTHandle src = m_ColorPyramidBuffer;
for (var i = 0; i < lodCount; i++)

Mathf.CeilToInt(dstMipHeight / 8f),
1);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_ColorPyramidMips[i], 0, 0, m_ColorPyramidBuffer, 0, i + 1);
cmd.CopyTexture(m_ColorPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_ColorPyramidBuffer, 0, i + 1, 0, 0);
}
float GetXRscale()
{
float scale = 1.0f;
//if (m_Asset.renderPipelineSettings.supportsStereo && (desc.dimension != TextureDimension.Tex2DArray))
// scale = 2.0f; // double-wide
return scale;
}
public void CreateBuffers()
{
Vector2 sizeScale = Vector2.one;
sizeScale.x *= GetXRscale();
m_ColorPyramidBuffer = RTHandle.Alloc(sizeScale, filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: true, useMipMap: true, autoGenerateMips: false);
m_DepthPyramidBuffer = RTHandle.Alloc(sizeScale, filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
public void DestroyBuffers()
{
RTHandle.Release(m_ColorPyramidBuffer);
RTHandle.Release(m_DepthPyramidBuffer);
foreach (var rth in m_ColorPyramidMips)
{
RTHandle.Release(rth);
}
foreach (var rth in m_DepthPyramidMips)
{
RTHandle.Release(rth);
}
}
}
}
正在加载...
取消
保存