浏览代码

Merge branch 'master' into gpu-terrain

/main
GitHub 6 年前
当前提交
dbde5edb
共有 60 个文件被更改,包括 6234 次插入4442 次删除
  1. 998
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1101_Unlit.png
  2. 6
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1101_Unlit.png.meta
  3. 999
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1102_Unlit_Distortion.png
  4. 17
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1102_Unlit_Distortion.png.meta
  5. 998
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1103_Unlit_Distortion_DepthTest.png
  6. 6
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1103_Unlit_Distortion_DepthTest.png.meta
  7. 999
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1205_Lit_Transparent_Refraction.png
  8. 6
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1205_Lit_Transparent_Refraction.png.meta
  9. 998
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1206_Lit_Transparent_Distortion.png
  10. 6
      TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1206_Lit_Transparent_Distortion.png.meta
  11. 3
      TestProjects/LWGraphicsTest/ProjectSettings/EditorBuildSettings.asset
  12. 6
      com.unity.render-pipelines.core/CoreRP/Textures/DepthBits.cs
  13. 3
      com.unity.render-pipelines.high-definition/CHANGELOG.md
  14. 4
      com.unity.render-pipelines.high-definition/HDRP/Camera/HDCameraFrameHistoryType.cs
  15. 60
      com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentEditor.cs
  16. 1
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbeCache.cs
  17. 7
      com.unity.render-pipelines.high-definition/HDRP/Material/Decal/DecalProjectorComponent.cs
  18. 83
      com.unity.render-pipelines.high-definition/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs
  19. 2
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDCustomSamplerId.cs
  20. 114
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs
  21. 5
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDStringConstants.cs
  22. 9
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/ApplyDistorsion.compute
  23. 58
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/ColorPyramid.compute
  24. 62
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/DepthPyramid.compute
  25. 20
      com.unity.render-pipelines.lightweight/CHANGELOG.md
  26. 18
      com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs
  27. 42
      com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs
  28. 76
      com.unity.render-pipelines.lightweight/LWRP/Passes/SetupLightweightConstanstPass.cs
  29. 3
      com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Input.hlsl
  30. 43
      com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Lighting.hlsl
  31. 999
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/053_UnlitShader.png
  32. 999
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/053_UnlitShader.png
  33. 173
      com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentHandle.cs
  34. 11
      com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentHandle.cs.meta
  35. 136
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/MipGenerator.cs
  36. 11
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/MipGenerator.cs.meta
  37. 163
      com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineCameraEditor.cs
  38. 22
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/054_Lighting_Attenuation.png
  39. 88
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/054_Lighting_Attenuation.png.meta
  40. 17
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/054_Lighting_Attenuation.png
  41. 88
      com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/054_Lighting_Attenuation.png.meta
  42. 8
      com.unity.testing.srp.lightweight/Tests/Scenes/052_Lighting_Attenuation.meta
  43. 8
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.meta
  44. 1001
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.unity
  45. 7
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.unity.meta
  46. 8
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/LightingData.asset.meta
  47. 88
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_dir.png.meta
  48. 88
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_light.exr.meta
  49. 7
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/LightingData.asset
  50. 87
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_dir.png
  51. 408
      com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_light.exr
  52. 11
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta
  53. 236
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs
  54. 11
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs.meta
  55. 176
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs
  56. 173
      com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightCameraEditor.cs
  57. 0
      /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs.meta
  58. 0
      /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs
  59. 0
      /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineCameraEditor.cs.meta

998
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1101_Unlit.png
文件差异内容过多而无法显示
查看文件

6
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1101_Unlit.png.meta


fileFormatVersion: 2
guid: a3f1cb86a1e031a44bd3380ab40642cf
guid: c14640e6cd052a54dbedd03d1de4b094
serializedVersion: 6
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0

edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

999
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1102_Unlit_Distortion.png
文件差异内容过多而无法显示
查看文件

17
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1102_Unlit_Distortion.png.meta


fileFormatVersion: 2
guid: 74ce626098389e144ae57a1c9e1ad613
guid: 63d4d615f7b52154984007cacaf2f082
serializedVersion: 6
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0

allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []

edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

998
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1103_Unlit_Distortion_DepthTest.png
文件差异内容过多而无法显示
查看文件

6
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1103_Unlit_Distortion_DepthTest.png.meta


fileFormatVersion: 2
guid: 5a59cd7fd9c84cb4ca0f998edbb72442
guid: 1e3e18c578889334ba442b1bdab350d1
serializedVersion: 6
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0

edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

999
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1205_Lit_Transparent_Refraction.png
文件差异内容过多而无法显示
查看文件

6
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1205_Lit_Transparent_Refraction.png.meta


fileFormatVersion: 2
guid: 5812afd0981a5ff4392f8a3ad2fa1080
guid: 5909ed2ce8074034fa8a856ce381404f
serializedVersion: 6
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0

edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

998
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1206_Lit_Transparent_Distortion.png
文件差异内容过多而无法显示
查看文件

6
TestProjects/HDRP_Tests/Assets/ReferenceImages/Linear/WindowsEditor/Direct3D11/1206_Lit_Transparent_Distortion.png.meta


fileFormatVersion: 2
guid: b2dd9eeef86426449b5f1a674ab0a5cf
guid: e520e0ec1f31e394fbb8f6b074f6731a
serializedVersion: 6
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0

edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

3
TestProjects/LWGraphicsTest/ProjectSettings/EditorBuildSettings.asset


- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/053_UnlitShader.unity
guid: a28e1d48e6e3c0e42a4050ab4e770bf8
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.unity
guid: 93a99004f07ca6f4dbbc9ccb319c7698
m_configObjects: {}

6
com.unity.render-pipelines.core/CoreRP/Textures/DepthBits.cs


using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering
{
public enum DepthBits

Depth16 = 16,
Depth24 = 24
Depth24 = 24,
Depth32 = 32
}
}

3
com.unity.render-pipelines.high-definition/CHANGELOG.md


### Fixed
- Fixed an issue where sometimes the deferred shadow texture would not be valid, causing wrong rendering.
### Changed
- Changed the way depth & color pyramids are built to be faster and better quality, thus improving the look of distortion and refraction.
## [3.3.0-preview]
### Added

4
com.unity.render-pipelines.high-definition/HDRP/Camera/HDCameraFrameHistoryType.cs


using UnityEngine.Serialization;
DepthPyramid,
ColorPyramid,
VolumetricLighting,
Count
}

60
com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentEditor.cs


private SerializedProperty m_UVScaleProperty;
private SerializedProperty m_UVBiasProperty;
private SerializedProperty m_AffectsTransparencyProperty;
private SerializedProperty m_Center;
private BoxBoundsHandle m_Handle = new BoxBoundsHandle();
private SerializedProperty m_IsCropModeEnabledProperty;
private DecalProjectorComponentHandle m_Handle = new DecalProjectorComponentHandle();
private void OnEnable()
{

m_UVScaleProperty = serializedObject.FindProperty("m_UVScale");
m_UVBiasProperty = serializedObject.FindProperty("m_UVBias");
m_AffectsTransparencyProperty = serializedObject.FindProperty("m_AffectsTransparency");
m_Center = serializedObject.FindProperty("m_Offset");
m_IsCropModeEnabledProperty = serializedObject.FindProperty("m_IsCropModeEnabled");
}
private void OnDisable()

}
void OnSceneGUI()
{
{
Handles.matrix = m_DecalProjectorComponent.transform.localToWorldMatrix;
Handles.matrix = m_DecalProjectorComponent.transform.localToWorldMatrix;
Vector3 boundsSizePreviousOS = m_Handle.size;
Vector3 boundsMinPreviousOS = m_Handle.size * -0.5f + m_Handle.center;
// adjust decal transform if handle changed
// Adjust decal transform if handle changed.
Undo.RecordObject(m_DecalProjectorComponent, "Decal Projector Change");
m_DecalProjectorComponent.m_Size = m_Handle.size;
m_DecalProjectorComponent.m_Size = m_Handle.size;
EditorUtility.SetDirty(m_DecalProjectorComponent);
Vector3 boundsSizeCurrentOS = m_Handle.size;
Vector3 boundsMinCurrentOS = m_Handle.size * -0.5f + m_Handle.center;
if (m_DecalProjectorComponent.m_IsCropModeEnabled)
{
// Treat decal projector bounds as a crop tool, rather than a scale tool.
// Compute a new uv scale and bias terms to pin decal projection pixels in world space, irrespective of projector bounds.
m_DecalProjectorComponent.m_UVScale.x *= Mathf.Max(1e-5f, boundsSizeCurrentOS.x) / Mathf.Max(1e-5f, boundsSizePreviousOS.x);
m_DecalProjectorComponent.m_UVScale.y *= Mathf.Max(1e-5f, boundsSizeCurrentOS.z) / Mathf.Max(1e-5f, boundsSizePreviousOS.z);
m_DecalProjectorComponent.m_UVBias.x += (boundsMinCurrentOS.x - boundsMinPreviousOS.x) / Mathf.Max(1e-5f, boundsSizeCurrentOS.x) * m_DecalProjectorComponent.m_UVScale.x;
m_DecalProjectorComponent.m_UVBias.y += (boundsMinCurrentOS.z - boundsMinPreviousOS.z) / Mathf.Max(1e-5f, boundsSizeCurrentOS.z) * m_DecalProjectorComponent.m_UVScale.y;
}
// Automatically recenter our transform component if necessary.
// In order to correctly handle world-space snapping, we only perform this recentering when the user is no longer interacting with the gizmo.
if ((GUIUtility.hotControl == 0) && (m_DecalProjectorComponent.m_Offset != Vector3.zero))
{
// Both the DecalProjectorComponent, and the transform will be modified.
// The undo system will automatically group all RecordObject() calls here into a single action.
Undo.RecordObject(m_DecalProjectorComponent.transform, "Decal Projector Change");
// Re-center the transform to the center of the decal projector bounds,
// while maintaining the world-space coordinates of the decal projector boundings vertices.
m_DecalProjectorComponent.transform.Translate(
Vector3.Scale(m_DecalProjectorComponent.m_Offset, m_DecalProjectorComponent.transform.localScale),
Space.Self
);
m_DecalProjectorComponent.m_Offset = Vector3.zero;
}
Handles.matrix = mat;
Handles.color = col;
}

EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(m_Center);
EditorGUILayout.PropertyField(m_IsCropModeEnabledProperty, new GUIContent("Crop Decal with Gizmo"));
EditorGUILayout.PropertyField(m_Size);
EditorGUILayout.PropertyField(m_MaterialProperty);
EditorGUILayout.PropertyField(m_DrawDistanceProperty);

EditorGUILayout.PropertyField(m_AffectsTransparencyProperty);
if (EditorGUI.EndChangeCheck())
{
serializedObject.ApplyModifiedProperties();

1
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbeCache.cs


m_ConvolutionTargetTexture.useMipMap = true;
m_ConvolutionTargetTexture.autoGenerateMips = false;
m_ConvolutionTargetTexture.name = CoreUtils.GetRenderTargetAutoName(m_ProbeSize, m_ProbeSize, 1, RenderTextureFormat.ARGBHalf, "PlanarReflectionConvolution", mips: true);
m_ConvolutionTargetTexture.enableRandomWrite = true;
m_ConvolutionTargetTexture.Create();
InitializeProbeBakingStates();

7
com.unity.render-pipelines.high-definition/HDRP/Material/Decal/DecalProjectorComponent.cs


public Vector3 m_Size = new Vector3(1, 1, 1);
private Material m_OldMaterial = null;
private DecalSystem.DecalHandle m_Handle = null;
public bool m_IsCropModeEnabled = false;
public DecalSystem.DecalHandle Handle
{

}
if (m_Handle != null)
{
{
DecalSystem.instance.RemoveDecal(m_Handle);
m_Handle = null;
}

public void OnDisable()
{
if (m_Handle != null)
{
{
DecalSystem.instance.RemoveDecal(m_Handle);
m_Handle = null;
}

public event OnMaterialChangeDelegate OnMaterialChange;
public void OnValidate()
{
{
if (m_Handle != null) // don't do anything if OnEnable hasn't been called yet when scene is loading.
{
Vector4 uvScaleBias = new Vector4(m_UVScale.x, m_UVScale.y, m_UVBias.x, m_UVBias.y);

83
com.unity.render-pipelines.high-definition/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs


using UnityEngine.Rendering;
using System;
using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering.HDPipeline

Matrix4x4[] m_faceWorldToViewMatrixMatrices = new Matrix4x4[6];
BufferPyramidProcessor m_BufferPyramidProcessor;
List<RenderTexture> m_PlanarColorMips = new List<RenderTexture>();
MipGenerator m_MipGenerator;
public IBLFilterGGX(RenderPipelineResources renderPipelineResources, BufferPyramidProcessor processor)
public IBLFilterGGX(RenderPipelineResources renderPipelineResources, MipGenerator mipGenerator)
m_BufferPyramidProcessor = processor;
m_MipGenerator = mipGenerator;
}
public bool IsInitialized()

{
CoreUtils.Destroy(m_GgxConvolveMaterial);
CoreUtils.Destroy(m_GgxIblSampleData);
for (var i = 0; i < m_PlanarColorMips.Count; ++i)
m_PlanarColorMips[i].Release();
m_PlanarColorMips.Clear();
}
void FilterCubemapCommon(CommandBuffer cmd,

FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
}
public void FilterPlanarTexture(CommandBuffer cmd, Texture source, RenderTexture target)
public void FilterPlanarTexture(CommandBuffer cmd, RenderTexture source, RenderTexture target)
var lodCount = Mathf.Max(Mathf.FloorToInt(Mathf.Log(Mathf.Min(source.width, source.height), 2f)), 0);
for (var i = 0; i < lodCount - 0; ++i)
{
var width = target.width >> (i + 1);
var height = target.height >> (i + 1);
var rtHash = HashRenderTextureProperties(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
var lodIsMissing = i >= m_PlanarColorMips.Count;
RenderTexture rt = null;
var createRT = lodIsMissing
|| (rt = m_PlanarColorMips[i]) == null
|| rtHash != HashRenderTextureProperties(
rt.width, rt.height, rt.depth, rt.format, rt.sRGB
? RenderTextureReadWrite.sRGB
: RenderTextureReadWrite.Linear
);
if (createRT && rt)
rt.Release();
if (createRT)
{
rt = new RenderTexture(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
rt.enableRandomWrite = true;
rt.name = "Planar Convolution Tmp RT";
rt.hideFlags = HideFlags.HideAndDontSave;
rt.Create();
}
if (lodIsMissing)
m_PlanarColorMips.Add(rt);
else if (createRT)
m_PlanarColorMips[i] = rt;
}
m_BufferPyramidProcessor.RenderColorPyramid(
new RectInt(0, 0, source.width, source.height),
cmd,
source,
target,
m_PlanarColorMips,
lodCount
);
m_MipGenerator.RenderColorGaussianPyramid(cmd, new Vector2Int(source.width, source.height), source, target);
}
// Filters MIP map levels (other than 0) with GGX using multiple importance sampling.

m_GgxConvolveMaterial.SetTexture("_MarginalRowDensities", marginalRowCdf);
FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
}
int HashRenderTextureProperties(
int width,
int height,
int depth,
RenderTextureFormat format,
RenderTextureReadWrite sRGB)
{
return width.GetHashCode()
^ height.GetHashCode()
^ depth.GetHashCode()
^ format.GetHashCode()
^ sRGB.GetHashCode();
}
}
}

2
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDCustomSamplerId.cs


{
PushGlobalParameters,
CopySetDepthBuffer,
CopyDepthStencilbuffer,
CopyDepthBuffer,
HTileForSSS,
Forward,
RenderSSAO,

114
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


Material m_CopyStencilForNoLighting;
Material m_CopyDepth;
GPUCopy m_GPUCopy;
BufferPyramid m_BufferPyramid;
MipGenerator m_MipGenerator;
IBLFilterGGX m_IBLFilterGGX = null;

// 'm_CameraColorBuffer' does not contain diffuse lighting of SSS materials until the SSS pass. It is stored within 'm_CameraSssDiffuseLightingBuffer'.
RTHandleSystem.RTHandle m_CameraColorBuffer;
RTHandleSystem.RTHandle m_CameraColorBufferMipChain;
RTHandleSystem.RTHandle m_CameraDepthBufferCopy;
RTHandleSystem.RTHandle m_CameraDepthBufferMipChain;
RTHandleSystem.RTHandle m_CameraStencilBufferCopy;
RTHandleSystem.RTHandle m_VelocityBuffer;

m_GPUCopy = new GPUCopy(asset.renderPipelineResources.copyChannelCS);
var bufferPyramidProcessor = new BufferPyramidProcessor(
asset.renderPipelineResources.colorPyramidCS,
asset.renderPipelineResources.depthPyramidCS,
m_GPUCopy,
new TexturePadding(asset.renderPipelineResources.texturePaddingCS)
);
m_BufferPyramid = new BufferPyramid(bufferPyramidProcessor);
m_MipGenerator = new MipGenerator(m_Asset);
EncodeBC6H.DefaultInstance = EncodeBC6H.DefaultInstance ?? new EncodeBC6H(asset.renderPipelineResources.encodeBC6HCS);

m_MaterialList.ForEach(material => material.Build(asset));
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources, bufferPyramidProcessor);
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources, m_MipGenerator);
m_LightLoop.Build(asset, m_ShadowSettings, m_IBLFilterGGX);

m_SSSBufferManager.InitSSSBuffers(m_GbufferManager, m_Asset.renderPipelineSettings);
m_NormalBufferManager.InitNormalBuffers(m_GbufferManager, m_Asset.renderPipelineSettings);
m_CameraColorBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "CameraColor");
m_CameraColorBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, enableRandomWrite: true, enableMSAA: true, useMipMap: false, name: "CameraColor");
m_CameraColorBufferMipChain = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, enableRandomWrite: true, enableMSAA: false, useMipMap: true, autoGenerateMips: false, name: "CameraColorBufferMipChain");
m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth32, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilCopy");
m_CameraDepthBufferMipChain = RTHandles.Alloc(Vector2.one, colorFormat: RenderTextureFormat.RFloat, filterMode: FilterMode.Point, sRGB: false, bindTextureMS: true, enableMSAA: true, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "CameraDepthBufferMipChain");
}
// Technically we won't need this buffer in some cases, but nothing that we can determine at init time.

{
m_GbufferManager.DestroyBuffers();
m_DbufferManager.DestroyBuffers();
m_BufferPyramid.DestroyBuffers();
m_MipGenerator.Release();
RTHandles.Release(m_CameraColorBufferMipChain);
RTHandles.Release(m_CameraDepthBufferCopy);
RTHandles.Release(m_CameraDepthBufferMipChain);
RTHandles.Release(m_CameraStencilBufferCopy);
RTHandles.Release(m_AmbientOcclusionBuffer);

PushGlobalRTHandle(
cmd,
hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid),
m_CameraDepthBufferMipChain,
HDShaderIDs._DepthPyramidTexture,
HDShaderIDs._DepthPyramidSize,
HDShaderIDs._DepthPyramidScale

hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid),
m_CameraColorBufferMipChain,
HDShaderIDs._ColorPyramidTexture,
HDShaderIDs._ColorPyramidSize,
HDShaderIDs._ColorPyramidScale

bool NeedDepthBufferCopy()
{
// For now we consider all console to be able to read from a bound depth buffer.
return !IsConsolePlatform();
// We always need it for SSR since depth textures do not support MIP maps.
return !IsConsolePlatform()
|| m_Asset.GetFrameSettings().enableRoughRefraction
|| m_Asset.GetFrameSettings().enableSSR;
}
bool NeedStencilBufferCopy()

RTHandleSystem.RTHandle GetDepthTexture()
{
return NeedDepthBufferCopy() ? m_CameraDepthBufferCopy : m_CameraDepthStencilBuffer;
return NeedDepthBufferCopy() ? m_CameraDepthBufferMipChain : m_CameraDepthStencilBuffer;
}
void CopyDepthBufferIfNeeded(CommandBuffer cmd)

if (NeedDepthBufferCopy())
{
using (new ProfilingSample(cmd, "Copy depth-stencil buffer", CustomSamplerId.CopyDepthStencilbuffer.GetSampler()))
using (new ProfilingSample(cmd, "Copy depth buffer", CustomSamplerId.CopyDepthBuffer.GetSampler()))
cmd.CopyTexture(m_CameraDepthStencilBuffer, m_CameraDepthBufferCopy);
// TODO: maybe we don't actually need the top MIP level?
// That way we could avoid making the copy, and build the MIP hierarchy directly.
// The downside is that our SSR tracing accuracy would decrease a little bit.
// But since we never render SSR at full resolution, this may be acceptable.
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, m_CameraDepthStencilBuffer, m_CameraDepthBufferMipChain, new RectInt(0, 0, m_CurrentWidth, m_CurrentHeight));
}
}
}

// Depth texture is now ready, bind it (Depth buffer could have been bind before if DBuffer is enable)
cmd.SetGlobalTexture(HDShaderIDs._CameraDepthTexture, GetDepthTexture());
RenderDepthPyramid(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
RenderDepthPyramid(hdCamera, cmd, FullScreenDebugMode.DepthPyramid);
// TODO: In the future we will render object velocity at the same time as depth prepass (we need C++ modification for this)
// Once the C++ change is here we will first render all object without motion vector then motion vector object

// Render pre refraction objects
RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.PreRefraction);
RenderColorPyramid(hdCamera, cmd, renderContext, true);
RenderColorPyramid(hdCamera, cmd, true);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.Transparent);

// Fill depth buffer to reduce artifact for transparent object during postprocess
RenderTransparentDepthPostpass(m_CullResults, hdCamera, renderContext, cmd);
RenderColorPyramid(hdCamera, cmd, renderContext, false);
RenderColorPyramid(hdCamera, cmd, false);
RenderDistortion(hdCamera, cmd, m_Asset.renderPipelineResources);
RenderDistortion(hdCamera, cmd);
StopStereoRendering(renderContext, hdCamera);

}
}
void RenderDistortion(HDCamera hdCamera, CommandBuffer cmd, RenderPipelineResources resources)
void RenderDistortion(HDCamera hdCamera, CommandBuffer cmd)
{
if (!hdCamera.frameSettings.enableDistortion)
return;

var colorPyramidRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
var pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, colorPyramidRT);
// Need to account for the fact that the gaussian pyramid is actually rendered inside the camera viewport in a square texture so we mutiply by the PyramidToScreen scale
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, pyramidScale.x / hdCamera.screenSize.x, pyramidScale.y / hdCamera.screenSize.y);
var size = new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, colorPyramidRT);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._CameraColorTexture, m_CameraColorBuffer);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, m_CameraColorBufferMipChain);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._Destination, m_CameraColorBuffer);
cmd.SetComputeVectorParam(m_applyDistortionCS, HDShaderIDs._Size, size);
cmd.DispatchCompute(m_applyDistortionCS, m_applyDistortionKernel, Mathf.CeilToInt(size.x / x), Mathf.CeilToInt(size.y / y), 1);

}
}
void RenderColorPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, bool isPreRefraction)
void RenderColorPyramid(HDCamera hdCamera, CommandBuffer cmd, bool isPreRefraction)
{
if (isPreRefraction)
{

return;
}
// TODO: Move allocation in separate method call in start of the render loop
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorPyramid, m_BufferPyramid.AllocColorRT);
int lodCount;
using (new ProfilingSample(cmd, "Color Pyramid", CustomSamplerId.ColorPyramid.GetSampler()))
m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer, cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(hdCamera, cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
using (new ProfilingSample(cmd, "Color Gaussian MIP Chain", CustomSamplerId.ColorPyramid))
{
var size = new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight);
lodCount = m_MipGenerator.RenderColorGaussianPyramid(cmd, size, m_CameraColorBuffer, m_CameraColorBufferMipChain);
}
float scaleX = hdCamera.actualWidth / (float)m_CameraColorBufferMipChain.rt.width;
float scaleY = hdCamera.actualHeight / (float)m_CameraColorBufferMipChain.rt.height;
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, m_CameraColorBufferMipChain);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scaleX, scaleY, lodCount, 0.0f));
PushFullScreenDebugTextureMip(hdCamera, cmd, m_CameraColorBufferMipChain, lodCount, new Vector4(scaleX, scaleY, 0f, 0f), isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
void RenderDepthPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)
void RenderDepthPyramid(HDCamera hdCamera, CommandBuffer cmd, FullScreenDebugMode debugMode)
if (!hdCamera.frameSettings.enableRoughRefraction)
if (!hdCamera.frameSettings.enableRoughRefraction && !hdCamera.frameSettings.enableSSR)
// TODO: Move allocation in separate method call in start of the render loop
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthPyramid, m_BufferPyramid.AllocDepthRT);
int lodCount;
using (new ProfilingSample(cmd, "Depth Pyramid", CustomSamplerId.DepthPyramid.GetSampler()))
m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture(), cameraRT);
using (new ProfilingSample(cmd, "Depth Buffer MIP Chain", CustomSamplerId.DepthPyramid))
{
var size = new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight);
lodCount = m_MipGenerator.RenderMinDepthPyramid(cmd, size, m_CameraDepthBufferMipChain);
}
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(hdCamera, cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), debugMode);
float scaleX = hdCamera.actualWidth / (float)m_CameraDepthBufferMipChain.rt.width;
float scaleY = hdCamera.actualHeight / (float)m_CameraDepthBufferMipChain.rt.height;
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, m_CameraDepthBufferMipChain);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scaleX, scaleY, lodCount, 0.0f));
PushFullScreenDebugTextureMip(hdCamera, cmd, m_CameraDepthBufferMipChain, lodCount, new Vector4(scaleX, scaleY, 0f, 0f), debugMode);
}
void RenderPostProcess(HDCamera hdcamera, CommandBuffer cmd, PostProcessLayer layer)

5
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDStringConstants.cs


public static readonly int _PixelCoordToViewDirWS = Shader.PropertyToID("_PixelCoordToViewDirWS");
public static readonly int _Size = Shader.PropertyToID("_Size");
public static readonly int _Source4 = Shader.PropertyToID("_Source4");
public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _Source = Shader.PropertyToID("_Source");
public static readonly int _Destination = Shader.PropertyToID("_Destination");
public static readonly int _Mip0 = Shader.PropertyToID("_Mip0");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");

9
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/ApplyDistorsion.compute


#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "HDRP/Material/Builtin/BuiltinData.hlsl"
#include "HDRP/ShaderVariables.hlsl"
TEXTURE2D(_ColorPyramidTexture);
RW_TEXTURE2D(float4, _CameraColorTexture);
RW_TEXTURE2D(float4, _Destination);
float4 _ColorPyramidScale;
CBUFFER_END
#pragma kernel KMain

return;
// Get source pixel for distortion
float2 distordedUV = float2(dispatchThreadId + int2(distortion * _FetchBias)) * _Size.zw;
float2 distordedUV = float2(dispatchThreadId + int2(distortion * _FetchBias)) * _Size.zw * _ScreenToTargetScale.xy;
_CameraColorTexture[dispatchThreadId] = sampled;
_Destination[dispatchThreadId] = sampled;
}

58
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/ColorPyramid.compute


#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
#pragma kernel KColorGaussian KERNEL_SIZE=8 MAIN_GAUSSIAN=KColorGaussian
#pragma kernel KColorDownsample KERNEL_SIZE=8 MAIN_DOWNSAMPLE=KColorDownsample
#pragma kernel KColorDownsampleCopyMip0 KERNEL_SIZE=8 MAIN_DOWNSAMPLE=KColorDownsampleCopyMip0 COPY_MIP_0
#if COPY_MIP_0
RWTexture2D<float4> _Result;
RWTexture2D<float4> _Mip0;
#else
RWTexture2D<float4> _Source;
#endif
RWTexture2D<float4> _Destination;
float4 _Size;
float4 _Size; // x: src width, y: src height, zw: unused
CBUFFER_END
// 16x16 pixels with an 8x8 center that we will be blurring writing out. Each uint is two color

float4 blurred = BlurPixels(s0, s1, s2, s3, s4, s5, s6, s7, s8);
// Write to the final target
_Result[pixelCoord] = blurred;
_Destination[pixelCoord] = blurred;
#pragma kernel KMain
[numthreads(8, 8, 1)]
void KMain(uint2 groupId : SV_GroupID, uint2 groupThreadId : SV_GroupThreadID, uint2 dispatchThreadId : SV_DispatchThreadID)
[numthreads(KERNEL_SIZE, KERNEL_SIZE, 1)]
void MAIN_GAUSSIAN(uint2 groupId : SV_GroupID, uint2 groupThreadId : SV_GroupThreadID, uint2 dispatchThreadId : SV_DispatchThreadID)
// Downsample the block
float2 offset = float2(threadUL);
float4 p00 = _Source.SampleLevel(sampler_LinearClamp, (offset + 0.5) * _Size.zw, 0.0);
float4 p10 = _Source.SampleLevel(sampler_LinearClamp, (offset + float2(1.0, 0.0) + 0.5) * _Size.zw, 0.0);
float4 p01 = _Source.SampleLevel(sampler_LinearClamp, (offset + float2(0.0, 1.0) + 0.5) * _Size.zw, 0.0);
float4 p11 = _Source.SampleLevel(sampler_LinearClamp, (offset + float2(1.0, 1.0) + 0.5) * _Size.zw, 0.0);
uint2 size = uint2(_Size.xy) - 1u;
float4 p00 = _Source[clamp(threadUL + uint2(0u, 0u), 0u, size)];
float4 p10 = _Source[clamp(threadUL + uint2(1u, 0u), 0u, size)];
float4 p11 = _Source[clamp(threadUL + uint2(1u, 1u), 0u, size)];
float4 p01 = _Source[clamp(threadUL + uint2(0u, 1u), 0u, size)];
// Store the 4 downsampled pixels in LDS
uint destIdx = groupThreadId.x + (groupThreadId.y << 4u);

// Vertically blur the pixels in LDS and write the result to memory
BlurVertically(dispatchThreadId, (groupThreadId.y << 3u) + groupThreadId.x);
}
[numthreads(KERNEL_SIZE, KERNEL_SIZE, 1)]
void MAIN_DOWNSAMPLE(uint2 dispatchThreadId : SV_DispatchThreadID)
{
uint2 offset = dispatchThreadId * 2u;
uint2 size = uint2(_Size.xy) - 1u;
uint2 c00 = clamp(offset + uint2(0u, 0u), 0u, size);
uint2 c10 = clamp(offset + uint2(1u, 0u), 0u, size);
uint2 c11 = clamp(offset + uint2(1u, 1u), 0u, size);
uint2 c01 = clamp(offset + uint2(0u, 1u), 0u, size);
float4 p00 = _Source[c00];
float4 p10 = _Source[c10];
float4 p11 = _Source[c11];
float4 p01 = _Source[c01];
#if COPY_MIP_0
_Mip0[c00] = p00;
_Mip0[c10] = p10;
_Mip0[c11] = p11;
_Mip0[c01] = p01;
#endif
_Destination[dispatchThreadId] = (p00 + p01 + p11 + p10) * 0.25;
}

62
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/DepthPyramid.compute


#include "CoreRP/ShaderLibrary/Common.hlsl"
// ------------------------------------------------
// Algorithm
// ------------------------------------------------
// Downsample a depth texture by taking min value of sampled pixels
// ------------------------------------------------
// Variants
// ------------------------------------------------
#pragma kernel KDepthDownSample8 KERNEL_SIZE=8 KERNEL_NAME=KDepthDownSample8
#pragma kernel KDepthDownSample1 KERNEL_SIZE=1 KERNEL_NAME=KDepthDownSample1
// ------------------------------------------------
// Texture buffers
// ------------------------------------------------
#pragma kernel KDepthDownsample8DualUav KERNEL_SIZE=8 KERNEL_NAME=KDepthDownsample8DualUav
Texture2D<float2> _Source;
RW_TEXTURE2D(float2, _Result);
SamplerState sampler_PointClamp; //TODO: could we use min-sampler instead of using ALU?
RW_TEXTURE2D(float, _Destination);
RW_TEXTURE2D(float, _Source);
// ------------------------------------------------
// Constant buffers
// ------------------------------------------------
float4 _SrcSize;
int2 _RectOffset; // Offset in source texture
float4 _Size; // x: src width, y: src height, zw: unused
// ------------------------------------------------
// Kernel
// ------------------------------------------------
# define MAX_DEPTH(l, r) min(l, r)
# define MAX_DEPTH(l, r) max(l, r)
// Downsample a depth texture by taking the min value of sampled pixels
// The size of the dispatch is (DstMipSize / KernelSize).
void KERNEL_NAME(uint2 groupId : SV_GroupID, uint2 groupThreadId : SV_GroupThreadID, uint2 dispatchThreadId : SV_DispatchThreadID)
void KERNEL_NAME(uint2 dispatchThreadId : SV_DispatchThreadID)
uint2 srcPixelUL = _RectOffset + (dispatchThreadId << 1);
// Offset by 0.5 so sampling get the proper pixels
float2 offset = float2(srcPixelUL) + 0.5;
uint2 srcPixelUL = dispatchThreadId << 1;
#if defined(PLATFORM_SUPPORT_GATHER)
float4 depths = GATHER_RED_TEXTURE2D(_Source, sampler_PointClamp, offset * _SrcSize.zw).wzxy;
#else
float p00 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset) * _SrcSize.zw, 0.0).x;
float p10 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset + float2(1.0, 0.0)) * _SrcSize.zw, 0.0).x;
float p01 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset + float2(0.0, 1.0)) * _SrcSize.zw, 0.0).x;
float p11 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset + float2(1.0, 1.0)) * _SrcSize.zw, 0.0).x;
// '_Source' and '_Destination' are two different MIP levels of the same texture.
// TODO: Use Gather here instead of 4 loads
uint2 size = uint2(_Size.xy) - 1u;
float p00 = _Source[min(srcPixelUL + uint2(0u, 0u), size)];
float p10 = _Source[min(srcPixelUL + uint2(1u, 0u), size)];
float p01 = _Source[min(srcPixelUL + uint2(0u, 1u), size)];
float p11 = _Source[min(srcPixelUL + uint2(1u, 1u), size)];
#endif
float maxDepth = MAX_DEPTH(MAX_DEPTH(depths.x, depths.y), MAX_DEPTH(depths.z, depths.w));
uint2 dstPixel = (_RectOffset >> 1) + dispatchThreadId;
_Result[dstPixel] = float2(minDepth, maxDepth);
_Destination[dispatchThreadId] = minDepth;
#undef MAX_DEPTH

20
com.unity.render-pipelines.lightweight/CHANGELOG.md


### Added
- When you have enabled Gizmos, they now appear correctly in the Game view.
- Added requiresDepthPrepass field to RenderingData struct to tell if the runtime platform requires a depth prepass to generate a camera depth texture.
- The _RenderingData_ struct now holds a reference to _CullResults_.
- The `RenderingData` struct now holds a reference to `CullResults`.
- When __HDR__ is enabled in the Camera but disabled in the Asset, an information box in the Camera Inspector informs you about it.
- When __MSAA__ is enabled in the Camera but disabled in the Asset, an information box in the Camera Inspector informs you about it.
- The _RenderingData_ struct is now read-only.
- _ScriptableRenderer_ always perform a Clear before calling _IRendererSetup::Setup_.
- _ScriptableRenderPass::Execute_ no longer takes _CullResults_ as input. Instead, use _RenderingData_ as input, since that references _CullResults_.
- _IRendererSetup::Setup_ no longer takes _ScriptableRenderContext_ and _CullResults_ as input.
- The `RenderingData` struct is now read-only.
- `ScriptableRenderer`always perform a Clear before calling `IRendererSetup::Setup.`
- `ScriptableRenderPass::Execute` no longer takes `CullResults` as input. Instead, use `RenderingData`as input, since that references `CullResults`.
- `IRendererSetup_Setup` no longer takes `ScriptableRenderContext` and `CullResults` as input.
- The tooltip for the the camera __MSAA__ property now appears correctly.
- Add callbacks to LWRP that can be attached to a camera (IBeforeCameraRender, IAfterDepthPrePass, IAfterOpaquePass, IAfterOpaquePostProcess, IAfterSkyboxPass, IAfterTransparentPass, IAfterRender)
- Added callbacks to LWRP that can be attached to a camera (IBeforeCameraRender, IAfterDepthPrePass, IAfterOpaquePass, IAfterOpaquePostProcess, IAfterSkyboxPass, IAfterTransparentPass, IAfterRender)
###Changed
- Clean up LWRP creation of render textures. If we are not going straight to screen ensure that we create both depth and color targets.

### Fixed
- Lightweight Unlit shader UI doesn't throw an error about missing receive shadow property anymore.
### Changed
- Change real-time attenuation to inverse square.
- Change attenuation for baked GI to inverse square, to match real-time attenuation.
- Small optimization in light attenuation shader code.
## [3.2.0-preview]
### Changed

18
com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs


{
[CanEditMultipleObjects]
[CustomEditorForRenderPipeline(typeof(Light), typeof(LightweightPipelineAsset))]
class LightweightLightEditor : LightEditor
internal class LightweightRenderPipelineLightEditor : LightEditor
{
AnimBool m_AnimSpotOptions = new AnimBool();
AnimBool m_AnimPointOptions = new AnimBool();

class Styles
{
public readonly GUIContent SpotAngle = new GUIContent("Spot Angle", "Controls the angle in degrees at the base of a Spot light's cone.");
public readonly GUIContent Cookie = new GUIContent("Cookie", "Specifies the Texture mask to cast shadows, create silhouettes, or patterned illumination for the light.");
public readonly GUIContent CookieSize = new GUIContent("Cookie Size", "Controls the size of the cookie mask currently assigned to the light.");
public readonly GUIContent SpotAngle = EditorGUIUtility.TrTextContent("Spot Angle", "Controls the angle in degrees at the base of a Spot light's cone.");
public readonly GUIStyle invisibleButton = "InvisibleButton";
public readonly GUIContent Cookie = EditorGUIUtility.TrTextContent("Cookie", "Specifies the Texture mask to cast shadows, create silhouettes, or patterned illumination for the light.");
public readonly GUIContent CookieSize = EditorGUIUtility.TrTextContent("Cookie Size", "Controls the size of the cookie mask currently assigned to the light.");
public readonly GUIContent BakingWarning = new GUIContent("Light mode is currently overridden to Realtime mode. Enable Baked Global Illumination to use Mixed or Baked light modes.");
public readonly GUIContent CookieWarning = new GUIContent("Cookie textures for spot lights should be set to clamp, not repeat, to avoid artifacts.");
public readonly GUIContent DisabledLightWarning = new GUIContent("Lighting has been disabled in at least one Scene view. Any changes applied to lights in the Scene will not be updated in these views until Lighting has been enabled again.");
public readonly GUIContent BakingWarning = EditorGUIUtility.TrTextContent("Light mode is currently overridden to Realtime mode. Enable Baked Global Illumination to use Mixed or Baked light modes.");
public readonly GUIContent CookieWarning = EditorGUIUtility.TrTextContent("Cookie textures for spot lights should be set to clamp, not repeat, to avoid artifacts.");
public readonly GUIContent DisabledLightWarning = EditorGUIUtility.TrTextContent("Lighting has been disabled in at least one Scene view. Any changes applied to lights in the Scene will not be updated in these views until Lighting has been enabled again.");
public readonly GUIContent ShadowsNotSupportedWarning = new GUIContent("Realtime shadows for point lights are not supported. Either disable shadows or set the light mode to Baked.");
public readonly GUIContent ShadowsNotSupportedWarning = EditorGUIUtility.TrTextContent("Realtime shadows for point lights are not supported. Either disable shadows or set the light mode to Baked.");
}
static Styles s_Styles;

42
com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs


using System;
using System.Collections.Generic;
using Unity.Collections;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.Experimental.Rendering.LightweightPipeline;

using UnityEngine.Experimental.GlobalIllumination;
using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
namespace UnityEngine.Experimental.Rendering.LightweightPipeline
{

QualitySettings.antiAliasing = m_PipelineSettings.msaaSampleCount;
Shader.globalRenderPipeline = "LightweightPipeline";
Lightmapping.SetDelegate(lightsDelegate);
}
public override void Dispose()

#endif
m_Renderer.Dispose();
Lightmapping.ResetDelegate();
}
public interface IBeforeCameraRender

Matrix4x4 invViewProjMatrix = Matrix4x4.Inverse(viewProjMatrix);
Shader.SetGlobalMatrix(PerCameraBuffer._InvCameraViewProj, invViewProjMatrix);
}
public static Lightmapping.RequestLightsDelegate lightsDelegate = (Light[] requests, NativeArray<LightDataGI> lightsOutput) =>
{
LightDataGI lightData = new LightDataGI();
for (int i = 0; i < requests.Length; i++)
{
Light light = requests[i];
switch (light.type)
{
case LightType.Directional:
DirectionalLight directionalLight = new DirectionalLight();
LightmapperUtils.Extract(light, ref directionalLight); lightData.Init(ref directionalLight);
break;
case LightType.Point:
PointLight pointLight = new PointLight();
LightmapperUtils.Extract(light, ref pointLight); lightData.Init(ref pointLight);
break;
case LightType.Spot:
SpotLight spotLight = new SpotLight();
LightmapperUtils.Extract(light, ref spotLight); lightData.Init(ref spotLight);
break;
case LightType.Area:
RectangleLight rectangleLight = new RectangleLight();
LightmapperUtils.Extract(light, ref rectangleLight); lightData.Init(ref rectangleLight);
break;
default:
lightData.InitNoBake(light.GetInstanceID());
break;
}
lightData.falloff = FalloffType.InverseSquared;
lightsOutput[i] = lightData;
}
};
}
}

76
com.unity.render-pipelines.lightweight/LWRP/Passes/SetupLightweightConstanstPass.cs


public static int _AdditionalLightCount;
public static int _AdditionalLightPosition;
public static int _AdditionalLightColor;
public static int _AdditionalLightDistanceAttenuation;
public static int _AdditionalLightAttenuation;
public static int _AdditionalLightSpotAttenuation;
public static int _LightIndexBuffer;
}

Vector4 k_DefaultLightPosition = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
Vector4 k_DefaultLightColor = Color.black;
Vector4 k_DefaultLightAttenuation = new Vector4(0.0f, 1.0f, 0.0f, 1.0f);
Vector4 k_DefaultLightAttenuation = new Vector4(1.0f, 0.0f, 0.0f, 1.0f);
Vector4 k_DefaultLightSpotAttenuation = new Vector4(0.0f, 1.0f, 0.0f, 0.0f);
Vector4[] m_LightDistanceAttenuations;
Vector4[] m_LightAttenuations;
Vector4[] m_LightSpotAttenuations;
private int maxVisibleLocalLights { get; set; }
private ComputeBuffer perObjectLightIndices { get; set; }

LightConstantBuffer._AdditionalLightCount = Shader.PropertyToID("_AdditionalLightCount");
LightConstantBuffer._AdditionalLightPosition = Shader.PropertyToID("_AdditionalLightPosition");
LightConstantBuffer._AdditionalLightColor = Shader.PropertyToID("_AdditionalLightColor");
LightConstantBuffer._AdditionalLightDistanceAttenuation = Shader.PropertyToID("_AdditionalLightDistanceAttenuation");
LightConstantBuffer._AdditionalLightAttenuation = Shader.PropertyToID("_AdditionalLightAttenuation");
LightConstantBuffer._AdditionalLightSpotAttenuation = Shader.PropertyToID("_AdditionalLightSpotAttenuation");
m_LightDistanceAttenuations = new Vector4[0];
m_LightAttenuations = new Vector4[0];
m_LightSpotAttenuations = new Vector4[0];
}
public void Setup(int maxVisibleLocalLights, ComputeBuffer perObjectLightIndices)

{
m_LightPositions = new Vector4[maxVisibleLocalLights];
m_LightColors = new Vector4[maxVisibleLocalLights];
m_LightDistanceAttenuations = new Vector4[maxVisibleLocalLights];
m_LightAttenuations = new Vector4[maxVisibleLocalLights];
m_LightSpotAttenuations = new Vector4[maxVisibleLocalLights];
void InitializeLightConstants(List<VisibleLight> lights, int lightIndex, out Vector4 lightPos, out Vector4 lightColor, out Vector4 lightDistanceAttenuation, out Vector4 lightSpotDir,
out Vector4 lightSpotAttenuation)
void InitializeLightConstants(List<VisibleLight> lights, int lightIndex, out Vector4 lightPos, out Vector4 lightColor, out Vector4 lightAttenuation, out Vector4 lightSpotDir)
lightDistanceAttenuation = k_DefaultLightSpotAttenuation;
lightAttenuation = k_DefaultLightAttenuation;
lightSpotAttenuation = k_DefaultLightAttenuation;
float subtractiveMixedLighting = 0.0f;
// When no lights are visible, main light will be set to -1.
// In this case we initialize it to default values and return

else
{
Vector4 pos = lightData.localToWorld.GetColumn(3);
lightPos = new Vector4(pos.x, pos.y, pos.z, 1.0f);
lightPos = new Vector4(pos.x, pos.y, pos.z, 0.0f);
}
// VisibleLight.finalColor already returns color in active color space

if (lightData.lightType != LightType.Directional)
{
// Light attenuation in lightweight matches the unity vanilla one.
// attenuation = 1.0 / 1.0 + distanceToLightSqr * quadraticAttenuation
// then a smooth factor is applied to linearly fade attenuation to light range
// the attenuation smooth factor starts having effect at 80% of light range
// attenuation = 1.0 / distanceToLightSqr
// We offer two different smoothing factors.
// The smoothing factors make sure that the light intensity is zero at the light range limit.
// The first smoothing factor is a linear fade starting at 80 % of the light range.
// The other smoothing factor matches the one used in the Unity lightmapper but is slower than the linear one.
// smoothFactor = (1.0 - saturate((distanceSqr * 1.0 / lightrangeSqr)^2))^2
float quadAtten = 25.0f / lightRangeSqr;
lightDistanceAttenuation = new Vector4(quadAtten, oneOverFadeRangeSqr, lightRangeSqrOverFadeRangeSqr, 1.0f);
float oneOverLightRangeSqr = 1.0f / Mathf.Max(0.0001f, lightData.range * lightData.range);
// On mobile: Use the faster linear smoothing factor.
// On other devices: Use the smoothing factor that matches the GI.
lightAttenuation.x = Application.isMobilePlatform ? oneOverFadeRangeSqr : oneOverLightRangeSqr;
lightAttenuation.y = lightRangeSqrOverFadeRangeSqr;
subtractiveMixedLighting = 1.0f;
}
if (lightData.lightType == LightType.Spot)

float smoothAngleRange = Mathf.Max(0.001f, cosInnerAngle - cosOuterAngle);
float invAngleRange = 1.0f / smoothAngleRange;
float add = -cosOuterAngle * invAngleRange;
lightSpotAttenuation = new Vector4(invAngleRange, add, 0.0f);
lightAttenuation.z = invAngleRange;
lightAttenuation.w = add;
}
Light light = lightData.light;

if (m_MixedLightingSetup == MixedLightingSetup.None && lightData.light.shadows != LightShadows.None)
{
m_MixedLightingSetup = MixedLightingSetup.Subtractive;
lightDistanceAttenuation.w = 0.0f;
subtractiveMixedLighting = 0.0f;
// Use the w component of the light position to indicate subtractive mixed light mode.
// The only directional light is the main light, and the rest are punctual lights.
// The main light will always have w = 0 and the additional lights have w = 1.
lightPos.w = subtractiveMixedLighting;
}
void SetupShaderLightConstants(CommandBuffer cmd, ref LightData lightData)

InitializeLightConstants(lightData.visibleLights, -1, out m_LightPositions[i],
out m_LightColors[i],
out m_LightDistanceAttenuations[i],
out m_LightSpotDirections[i],
out m_LightSpotAttenuations[i]);
out m_LightAttenuations[i],
out m_LightSpotDirections[i]);
m_MixedLightingSetup = MixedLightingSetup.None;

void SetupMainLightConstants(CommandBuffer cmd, ref LightData lightData)
{
Vector4 lightPos, lightColor, lightDistanceAttenuation, lightSpotDir, lightSpotAttenuation;
Vector4 lightPos, lightColor, lightAttenuation, lightSpotDir;
InitializeLightConstants(lightData.visibleLights, lightData.mainLightIndex, out lightPos, out lightColor, out lightDistanceAttenuation, out lightSpotDir, out lightSpotAttenuation);
InitializeLightConstants(lightData.visibleLights, lightData.mainLightIndex, out lightPos, out lightColor, out lightAttenuation, out lightSpotDir);
if (lightData.mainLightIndex >= 0)
{

}
}
cmd.SetGlobalVector(LightConstantBuffer._MainLightPosition, new Vector4(lightPos.x, lightPos.y, lightPos.z, lightDistanceAttenuation.w));
cmd.SetGlobalVector(LightConstantBuffer._MainLightPosition, lightPos);
cmd.SetGlobalVector(LightConstantBuffer._MainLightColor, lightColor);
}

{
InitializeLightConstants(lights, i, out m_LightPositions[localLightsCount],
out m_LightColors[localLightsCount],
out m_LightDistanceAttenuations[localLightsCount],
out m_LightSpotDirections[localLightsCount],
out m_LightSpotAttenuations[localLightsCount]);
out m_LightAttenuations[localLightsCount],
out m_LightSpotDirections[localLightsCount]);
localLightsCount++;
}
}

cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightPosition, m_LightPositions);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightColor, m_LightColors);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightDistanceAttenuation, m_LightDistanceAttenuations);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightAttenuation, m_LightAttenuations);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightSpotAttenuation, m_LightSpotAttenuations);
}
void SetShaderKeywords(CommandBuffer cmd, ref CameraData cameraData, ref LightData lightData, ref ShadowData shadowData)

3
com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Input.hlsl


half4 _AdditionalLightCount;
float4 _AdditionalLightPosition[MAX_VISIBLE_LIGHTS];
half4 _AdditionalLightColor[MAX_VISIBLE_LIGHTS];
half4 _AdditionalLightDistanceAttenuation[MAX_VISIBLE_LIGHTS];
half4 _AdditionalLightAttenuation[MAX_VISIBLE_LIGHTS];
half4 _AdditionalLightSpotAttenuation[MAX_VISIBLE_LIGHTS];
CBUFFER_END
#if USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA

43
com.unity.render-pipelines.lightweight/LWRP/ShaderLibrary/Lighting.hlsl


{
float4 position;
half3 color;
half4 distanceAttenuation;
half4 distanceAndSpotAttenuation;
half4 spotAttenuation;
};
// Abstraction over Light shading data.

// Matches Unity Vanila attenuation
// Attenuation smoothly decreases to light range.
half DistanceAttenuation(half distanceSqr, half3 distanceAttenuation)
half DistanceAttenuation(half distanceSqr, half2 distanceAttenuation)
half quadFalloff = distanceAttenuation.x;
half denom = distanceSqr * quadFalloff + 1.0h;
half lightAtten = 1.0h / denom;
half lightAtten = 1.0h / distanceSqr;
#if defined(SHADER_HINT_NICE_QUALITY)
// Use the smoothing factor also used in the Unity lightmapper.
half factor = distanceSqr * distanceAttenuation.x;
half smoothFactor = saturate(1.0h - factor * factor);
smoothFactor = smoothFactor * smoothFactor;
#else
// We need to smoothly fade attenuation to light range. We start fading linearly at 80% of light range
// Therefore:
// fadeDistance = (0.8 * 0.8 * lightRangeSq)

// distanceSqr * distanceAttenuation.y + distanceAttenuation.z
half smoothFactor = saturate(distanceSqr * distanceAttenuation.y + distanceAttenuation.z);
half smoothFactor = saturate(distanceSqr * distanceAttenuation.x + distanceAttenuation.y);
#endif
half SpotAttenuation(half3 spotDirection, half3 lightDirection, half4 spotAttenuation)
half SpotAttenuation(half3 spotDirection, half3 lightDirection, half2 spotAttenuation)
{
// Spot Attenuation with a linear falloff can be defined as
// (SdotL - cosOuterAngle) / (cosInnerAngle - cosOuterAngle)

float distanceSqr = max(dot(posToLightVec, posToLightVec), FLT_MIN);
directionAndAttenuation.xyz = half3(posToLightVec * rsqrt(distanceSqr));
directionAndAttenuation.w = DistanceAttenuation(distanceSqr, lightInput.distanceAttenuation.xyz);
directionAndAttenuation.w *= SpotAttenuation(lightInput.spotDirection.xyz, directionAndAttenuation.xyz, lightInput.spotAttenuation);
return directionAndAttenuation;
}
half4 GetMainLightDirectionAndAttenuation(LightInput lightInput, float3 positionWS)
{
half4 directionAndAttenuation = GetLightDirectionAndAttenuation(lightInput, positionWS);
// Cookies disabled for now due to amount of shader variants
//directionAndAttenuation.w *= CookieAttenuation(positionWS);
directionAndAttenuation.w = DistanceAttenuation(distanceSqr, lightInput.distanceAndSpotAttenuation.xy);
directionAndAttenuation.w *= SpotAttenuation(lightInput.spotDirection.xyz, directionAndAttenuation.xyz, lightInput.distanceAndSpotAttenuation.zw);
return directionAndAttenuation;
}

// dynamic indexing. Ideally we need to configure light data at a cluster of
// objects granularity level. We will only be able to do that when scriptable culling kicks in.
// TODO: Use StructuredBuffer on PC/Console and profile access speed on mobile that support it.
lightInput.position = _AdditionalLightPosition[lightIndex];
float4 positionAndSubtractiveLightMode = _AdditionalLightPosition[lightIndex];
lightInput.position = float4(positionAndSubtractiveLightMode.xyz, 1.);
lightInput.distanceAttenuation = _AdditionalLightDistanceAttenuation[lightIndex];
lightInput.distanceAndSpotAttenuation = _AdditionalLightAttenuation[lightIndex];
lightInput.spotAttenuation = _AdditionalLightSpotAttenuation[lightIndex];
half4 directionAndRealtimeAttenuation = GetLightDirectionAndAttenuation(lightInput, positionWS);

light.attenuation = directionAndRealtimeAttenuation.w;
light.subtractiveModeAttenuation = lightInput.distanceAttenuation.w;
light.subtractiveModeAttenuation = positionAndSubtractiveLightMode.w;
light.color = lightInput.color;
return light;

999
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/053_UnlitShader.png
文件差异内容过多而无法显示
查看文件

999
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/053_UnlitShader.png
文件差异内容过多而无法显示
查看文件

173
com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentHandle.cs


using System;
using UnityEngine;
namespace UnityEditor.IMGUI.Controls
{
public class DecalProjectorComponentHandle : PrimitiveBoundsHandle
{
public DecalProjectorComponentHandle() : base()
{
midpointHandleDrawFunction = DrawHandleMidpoint;
}
public UnityEngine.Vector3 size { get { return GetSize(); } set { SetSize(value); } }
protected override void DrawWireframe()
{
Handles.DrawWireCube(center, size);
DrawArrowDownProjectionDirection();
}
protected void DrawArrowDownProjectionDirection()
{
int controlID = GUIUtility.GetControlID(GetHashCode(), FocusType.Passive);
Quaternion arrowRotation = Quaternion.LookRotation(Vector3.down, Vector3.right);
float arrowSize = size.y * 0.25f;
Handles.ArrowHandleCap(controlID, center, arrowRotation, arrowSize, EventType.Repaint);
}
// Could use a static readonly LUT, but this would require syncing with order with enum HandleDirection.
protected static Color ColorFromHandleDirection(HandleDirection handleDirection)
{
switch (handleDirection)
{
case HandleDirection.PositiveX:
case HandleDirection.NegativeX:
return Handles.xAxisColor;
case HandleDirection.PositiveY:
case HandleDirection.NegativeY:
return Handles.yAxisColor;
case HandleDirection.PositiveZ:
case HandleDirection.NegativeZ:
return Handles.zAxisColor;
default:
throw new ArgumentOutOfRangeException("handleDirection", "Must be PositiveX, NegativeX, PositiveY, NegativeY, PositiveZ, or NegativeZ");
}
}
protected static void PlaneVerticesFromHandleDirection(ref Vector3[] outVertices, HandleDirection handleDirection, Vector3 boundsSize, Vector3 boundsCenter)
{
Vector3 boundsMin = boundsSize * -0.5f + boundsCenter;
Vector3 boundsMax = boundsSize * 0.5f + boundsCenter;
switch (handleDirection)
{
case HandleDirection.PositiveX:
outVertices[0] = new Vector3(boundsMax.x, boundsMin.y, boundsMin.z);
outVertices[1] = new Vector3(boundsMax.x, boundsMax.y, boundsMin.z);
outVertices[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMax.z);
outVertices[3] = new Vector3(boundsMax.x, boundsMin.y, boundsMax.z);
break;
case HandleDirection.NegativeX:
outVertices[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMin.z);
outVertices[1] = new Vector3(boundsMin.x, boundsMax.y, boundsMin.z);
outVertices[2] = new Vector3(boundsMin.x, boundsMax.y, boundsMax.z);
outVertices[3] = new Vector3(boundsMin.x, boundsMin.y, boundsMax.z);
break;
case HandleDirection.PositiveY:
outVertices[0] = new Vector3(boundsMin.x, boundsMax.y, boundsMin.z);
outVertices[1] = new Vector3(boundsMax.x, boundsMax.y, boundsMin.z);
outVertices[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMax.z);
outVertices[3] = new Vector3(boundsMin.x, boundsMax.y, boundsMax.z);
break;
case HandleDirection.NegativeY:
outVertices[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMin.z);
outVertices[1] = new Vector3(boundsMax.x, boundsMin.y, boundsMin.z);
outVertices[2] = new Vector3(boundsMax.x, boundsMin.y, boundsMax.z);
outVertices[3] = new Vector3(boundsMin.x, boundsMin.y, boundsMax.z);
break;
case HandleDirection.PositiveZ:
outVertices[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMax.z);
outVertices[1] = new Vector3(boundsMax.x, boundsMin.y, boundsMax.z);
outVertices[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMax.z);
outVertices[3] = new Vector3(boundsMin.x, boundsMax.y, boundsMax.z);
break;
case HandleDirection.NegativeZ:
outVertices[0] = new Vector3(boundsMin.x, boundsMin.y, boundsMin.z);
outVertices[1] = new Vector3(boundsMax.x, boundsMin.y, boundsMin.z);
outVertices[2] = new Vector3(boundsMax.x, boundsMax.y, boundsMin.z);
outVertices[3] = new Vector3(boundsMin.x, boundsMax.y, boundsMin.z);
break;
default:
throw new ArgumentOutOfRangeException("handleDirection", "Must be PositiveX, NegativeX, PositiveY, NegativeY, PositiveZ, or NegativeZ");
}
}
// As DrawHandleDirectionPlane() is called every frame during gizmo rendering, we pre-allocate a scratch array for passing along to DrawSolidRectangleWithOutline()
// rather than putting pressure on the garbage collector every frame. Since DrawHandleDirectionPlane() is responsible for drawing handles, we will only ever call
// it from the main thread, so there is no realistic risk of a race condition occuring due to this static allocation.
private static Vector3[] s_PlaneVertices = new Vector3[4];
protected static void DrawHandleDirectionPlane(HandleDirection handleDirection, Vector3 size, Vector3 center)
{
// Set global Handles.color to white to avoid global state from interfering with the desired colors set at DrawSolidRectangleWithOutline().
Color handlesColorPrevious = Handles.color;
Handles.color = Color.white;
Color planeColorOutline = ColorFromHandleDirection(handleDirection);
const float planeColorFillAlpha = 0.25f;
Color planeColorFill = planeColorOutline * planeColorFillAlpha;
PlaneVerticesFromHandleDirection(ref s_PlaneVertices, handleDirection, size, center);
Handles.DrawSolidRectangleWithOutline(s_PlaneVertices, planeColorFill, planeColorOutline);
Handles.color = handlesColorPrevious;
}
// Utility function for determining the handle direction (which face) we are currently rendering within DrawHandleMidpoint().
// Ideally, the base class PrimitiveBoundsHandle would expose the reverse lookup: HandleDirectionFromControlID().
// In lieu of an explicit way to handle this look up, we derive the handle direction from the handle rotation.
protected static HandleDirection HandleDirectionFromRotation(Quaternion rotation)
{
if (rotation.x == 0.0f && rotation.y == 0.7071068f && rotation.z == 0.0f && rotation.w == 0.7071068f)
{
return HandleDirection.PositiveX;
}
else if (rotation.x == 0.0f && rotation.y == -0.7071068f && rotation.z == 0.0f && rotation.w == 0.7071068f)
{
return HandleDirection.NegativeX;
}
else if (rotation.x == -0.7071068f && rotation.y == 0.0f && rotation.z == 0.0f && rotation.w == 0.7071068f)
{
return HandleDirection.PositiveY;
}
else if (rotation.x == 0.7071068f && rotation.y == 0.0f && rotation.z == 0.0f && rotation.w == 0.7071068f)
{
return HandleDirection.NegativeY;
}
else if (rotation.x == 0.0f && rotation.y == 0.0f && rotation.z == 0.0f && rotation.w == 1.0f)
{
return HandleDirection.PositiveZ;
}
else if (rotation.x == 0.0f && rotation.y == 1.0f && rotation.z == 0.0f && rotation.w == 0.0f)
{
return HandleDirection.NegativeZ;
}
else
{
throw new ArgumentOutOfRangeException("rotation", "Must point down PositiveX, NegativeX, PositiveY, NegativeY, PositiveZ, or NegativeZ");
}
}
protected void DrawHandleMidpoint(int handleControlID, Vector3 handlePosition, Quaternion handleRotation, float handleSize, EventType eventType)
{
// Highlight the plane we are currently interacting with.
if (handleControlID == GUIUtility.hotControl)
{
HandleDirection handleDirection = HandleDirectionFromRotation(handleRotation);
DrawHandleDirectionPlane(handleDirection, size, center);
}
// Draw standard PrimitiveBoundsHandle mindpoint handle.
Handles.DotHandleCap(handleControlID, handlePosition, handleRotation, handleSize, eventType);
}
}
}

11
com.unity.render-pipelines.high-definition/HDRP/Editor/Material/Decal/DecalProjectorComponentHandle.cs.meta


fileFormatVersion: 2
guid: b29d5aac6308c3a4088143d90a1f1925
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

136
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/MipGenerator.cs


using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
using RTHandle = RTHandleSystem.RTHandle;
public class MipGenerator
{
RTHandle m_TempColorTarget;
ComputeShader m_DepthPyramidCS;
ComputeShader m_ColorPyramidCS;
int m_DepthDownsampleKernel;
int m_ColorDownsampleKernel;
int m_ColorDownsampleKernelCopyMip0;
int m_ColorGaussianKernel;
public MipGenerator(HDRenderPipelineAsset asset)
{
m_DepthPyramidCS = asset.renderPipelineResources.depthPyramidCS;
m_ColorPyramidCS = asset.renderPipelineResources.colorPyramidCS;
m_DepthDownsampleKernel = m_DepthPyramidCS.FindKernel("KDepthDownsample8DualUav");
m_ColorDownsampleKernel = m_ColorPyramidCS.FindKernel("KColorDownsample");
m_ColorDownsampleKernelCopyMip0 = m_ColorPyramidCS.FindKernel("KColorDownsampleCopyMip0");
m_ColorGaussianKernel = m_ColorPyramidCS.FindKernel("KColorGaussian");
}
public void Release()
{
RTHandles.Release(m_TempColorTarget);
m_TempColorTarget = null;
}
// Generates an in-place depth pyramid
// Returns the number of generated mips
// TODO: Mip-mapping depth is problematic for precision at lower mips, generate a packed atlas instead
public int RenderMinDepthPyramid(CommandBuffer cmd, Vector2Int size, RenderTexture texture)
{
var cs = m_DepthPyramidCS;
int kernel = m_DepthDownsampleKernel;
int srcMipLevel = 0;
int srcMipWidth = size.x;
int srcMipHeight = size.y;
// TODO: Do it 1x MIP at a time for now. In the future, do 4x MIPs per pass, or even use a single pass.
// Note: Gather() doesn't take a LOD parameter and we cannot bind an SRV of a MIP level,
// and we don't support Min samplers either. So we are forced to perform 4x loads.
while (srcMipWidth >= 2 || srcMipHeight >= 2)
{
int dstMipWidth = Mathf.Max(1, srcMipWidth >> 1);
int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1);
cmd.SetComputeVectorParam(cs, HDShaderIDs._Size, new Vector4(srcMipWidth, srcMipHeight, 0f, 0f));
cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._Source, texture, srcMipLevel);
cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._Destination, texture, srcMipLevel + 1);
cmd.DispatchCompute(cs, kernel, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, 1);
srcMipLevel++;
srcMipWidth = srcMipWidth >> 1;
srcMipHeight = srcMipHeight >> 1;
}
return srcMipLevel - 1;
}
// Generates the gaussian pyramid of source into destination
// We can't do it in place as the color pyramid has to be read while writing to the color
// buffer in some cases (e.g. refraction, distortion)
// Returns the number of mips
public int RenderColorGaussianPyramid(CommandBuffer cmd, Vector2Int size, Texture source, RenderTexture destination)
{
// Only create the temporary target on-demand in case the game doesn't actually need it
if (m_TempColorTarget == null)
{
m_TempColorTarget = RTHandles.Alloc(
Vector2.one * 0.5f,
filterMode: FilterMode.Bilinear,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
enableRandomWrite: true,
useMipMap: false,
enableMSAA: false,
name: "Temp Gaussian Pyramid Target"
);
}
var cs = m_ColorPyramidCS;
int downsampleKernel = m_ColorDownsampleKernel;
int downsampleKernelMip0 = m_ColorDownsampleKernelCopyMip0;
int gaussianKernel = m_ColorGaussianKernel;
int srcMipLevel = 0;
int srcMipWidth = size.x;
int srcMipHeight = size.y;
// Note: smaller mips are excluded as we don't need them and the gaussian compute works
// on 8x8 blocks
// TODO: Could be further optimized by merging the smaller mips to reduce the amount of dispatches
while (srcMipWidth >= 8 || srcMipHeight >= 8)
{
int dstMipWidth = Mathf.Max(1, srcMipWidth >> 1);
int dstMipHeight = Mathf.Max(1, srcMipHeight >> 1);
cmd.SetComputeVectorParam(cs, HDShaderIDs._Size, new Vector4(srcMipWidth, srcMipHeight, 0f, 0f));
// First dispatch also copies src to dst mip0
if (srcMipLevel == 0)
{
cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Source, source, 0);
cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Mip0, destination, 0);
cmd.SetComputeTextureParam(cs, downsampleKernelMip0, HDShaderIDs._Destination, m_TempColorTarget);
cmd.DispatchCompute(cs, downsampleKernelMip0, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, 1);
}
else
{
cmd.SetComputeTextureParam(cs, downsampleKernel, HDShaderIDs._Source, destination, srcMipLevel);
cmd.SetComputeTextureParam(cs, downsampleKernel, HDShaderIDs._Destination, m_TempColorTarget);
cmd.DispatchCompute(cs, downsampleKernel, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, 1);
}
cmd.SetComputeVectorParam(cs, HDShaderIDs._Size, new Vector4(dstMipWidth, dstMipHeight, 0f, 0f));
cmd.SetComputeTextureParam(cs, gaussianKernel, HDShaderIDs._Source, m_TempColorTarget);
cmd.SetComputeTextureParam(cs, gaussianKernel, HDShaderIDs._Destination, destination, srcMipLevel + 1);
cmd.DispatchCompute(cs, gaussianKernel, (dstMipWidth + 7) / 8, (dstMipHeight + 7) / 8, 1);
srcMipLevel++;
srcMipWidth = srcMipWidth >> 1;
srcMipHeight = srcMipHeight >> 1;
}
return srcMipLevel - 1;
}
}
}

11
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/MipGenerator.cs.meta


fileFormatVersion: 2
guid: 9fb5527a68d5789439b1caea92b87bdf
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

163
com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineCameraEditor.cs


using System;
using UnityEditor.AnimatedValues;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Experimental.Rendering.LightweightPipeline;
namespace UnityEditor.Experimental.Rendering.LightweightPipeline
{
[CustomEditorForRenderPipeline(typeof(Camera), typeof(LightweightPipelineAsset))]
[CanEditMultipleObjects]
internal class LightweightRenderPipelineCameraEditor : CameraEditor
{
internal class Styles
{
public readonly GUIContent renderingPathLabel = EditorGUIUtility.TrTextContent("Rendering Path", "Lightweight Render Pipeline only supports Forward rendering path.");
public readonly GUIContent[] renderingPathOptions = { EditorGUIUtility.TrTextContent("Forward") };
public readonly string hdrDisabledWarning = "HDR rendering is disabled in the Lightweight Render Pipeline asset.";
public readonly string mssaDisabledWarning = "Anti-aliasing is disabled in the Lightweight Render Pipeline asset.";
};
public Camera camera { get { return target as Camera; } }
// Animation Properties
public bool isSameClearFlags { get { return !settings.clearFlags.hasMultipleDifferentValues; } }
public bool isSameOrthographic { get { return !settings.orthographic.hasMultipleDifferentValues; } }
static readonly int[] s_RenderingPathValues = {0};
static Styles s_Styles;
LightweightPipelineAsset m_LightweightPipeline;
readonly AnimBool m_ShowBGColorAnim = new AnimBool();
readonly AnimBool m_ShowOrthoAnim = new AnimBool();
readonly AnimBool m_ShowTargetEyeAnim = new AnimBool();
void SetAnimationTarget(AnimBool anim, bool initialize, bool targetValue)
{
if (initialize)
{
anim.value = targetValue;
anim.valueChanged.AddListener(Repaint);
}
else
{
anim.target = targetValue;
}
}
void UpdateAnimationValues(bool initialize)
{
SetAnimationTarget(m_ShowBGColorAnim, initialize, isSameClearFlags && (camera.clearFlags == CameraClearFlags.SolidColor || camera.clearFlags == CameraClearFlags.Skybox));
SetAnimationTarget(m_ShowOrthoAnim, initialize, isSameOrthographic && camera.orthographic);
SetAnimationTarget(m_ShowTargetEyeAnim, initialize, settings.targetEye.intValue != (int)StereoTargetEyeMask.Both || PlayerSettings.virtualRealitySupported);
}
public new void OnEnable()
{
m_LightweightPipeline = GraphicsSettings.renderPipelineAsset as LightweightPipelineAsset;
settings.OnEnable();
UpdateAnimationValues(true);
}
public void OnDisable()
{
m_ShowBGColorAnim.valueChanged.RemoveListener(Repaint);
m_ShowOrthoAnim.valueChanged.RemoveListener(Repaint);
m_ShowTargetEyeAnim.valueChanged.RemoveListener(Repaint);
m_LightweightPipeline = null;
}
public override void OnInspectorGUI()
{
if (s_Styles == null)
s_Styles = new Styles();
settings.Update();
UpdateAnimationValues(false);
settings.DrawClearFlags();
using (var group = new EditorGUILayout.FadeGroupScope(m_ShowBGColorAnim.faded))
if (group.visible) settings.DrawBackgroundColor();
settings.DrawCullingMask();
EditorGUILayout.Space();
settings.DrawProjection();
settings.DrawClippingPlanes();
settings.DrawNormalizedViewPort();
EditorGUILayout.Space();
settings.DrawDepth();
DrawRenderingPath();
DrawTargetTexture();
settings.DrawOcclusionCulling();
DrawHDR();
DrawMSAA();
settings.DrawVR();
settings.DrawMultiDisplay();
using (var group = new EditorGUILayout.FadeGroupScope(m_ShowTargetEyeAnim.faded))
if (group.visible) settings.DrawTargetEye();
EditorGUILayout.Space();
EditorGUILayout.Space();
settings.ApplyModifiedProperties();
}
void DrawRenderingPath()
{
using (new EditorGUI.DisabledScope(true))
{
EditorGUILayout.IntPopup(s_Styles.renderingPathLabel, 0, s_Styles.renderingPathOptions, s_RenderingPathValues);
}
}
void DrawHDR()
{
bool disabled = settings.HDR.boolValue && !m_LightweightPipeline.supportsHDR;
using (new EditorGUI.DisabledScope(disabled))
{
settings.DrawHDR();
}
if (disabled)
EditorGUILayout.HelpBox(s_Styles.hdrDisabledWarning, MessageType.Info);
}
void DrawTargetTexture()
{
EditorGUILayout.PropertyField(settings.targetTexture);
if (!settings.targetTexture.hasMultipleDifferentValues)
{
var texture = settings.targetTexture.objectReferenceValue as RenderTexture;
int pipelineSamplesCount = m_LightweightPipeline.msaaSampleCount;
if (texture && texture.antiAliasing > pipelineSamplesCount)
{
string pipelineMSAACaps = (pipelineSamplesCount > 1)
? String.Format("is set to support {0}x", pipelineSamplesCount)
: "has MSAA disabled";
EditorGUILayout.HelpBox(String.Format("Camera target texture requires {0}x MSAA. Lightweight pipeline {1}.", texture.antiAliasing, pipelineMSAACaps),
MessageType.Warning, true);
}
}
}
void DrawMSAA()
{
bool disabled = settings.allowMSAA.boolValue && m_LightweightPipeline.msaaSampleCount <= 1;
using (new EditorGUI.DisabledScope(disabled))
{
settings.DrawMSAA();
}
if (disabled)
EditorGUILayout.HelpBox(s_Styles.mssaDisabledWarning, MessageType.Info);
}
}
}

22
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/054_Lighting_Attenuation.png

之前 之后
宽度: 640  |  高度: 360  |  大小: 21 KiB

88
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/OSXEditor/Metal/054_Lighting_Attenuation.png.meta


fileFormatVersion: 2
guid: 77454810ee6d94b4f8e94a5db6b50f60
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/054_Lighting_Attenuation.png
文件差异内容过多而无法显示
查看文件

88
com.unity.testing.srp.lightweight/Tests/ReferenceImages/Linear/WindowsEditor/Direct3D11/054_Lighting_Attenuation.png.meta


fileFormatVersion: 2
guid: e07d781d2760434448497ef86cd0d485
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 1
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

8
com.unity.testing.srp.lightweight/Tests/Scenes/052_Lighting_Attenuation.meta


fileFormatVersion: 2
guid: 12028400f279f18429395c8b46a8b90b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

8
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.meta


fileFormatVersion: 2
guid: 079312de60075284cb8f893462f9b6f8
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1001
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.unity
文件差异内容过多而无法显示
查看文件

7
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation.unity.meta


fileFormatVersion: 2
guid: 93a99004f07ca6f4dbbc9ccb319c7698
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

8
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/LightingData.asset.meta


fileFormatVersion: 2
guid: 56ea575a5cc3efc4f9ffc9f1b53eb566
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 25800000
userData:
assetBundleName:
assetBundleVariant:

88
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_dir.png.meta


fileFormatVersion: 2
guid: 4d3edf7b91d25034c888da1f58ae39e7
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 0
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 1
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 1
aniso: 3
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 2
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

88
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_light.exr.meta


fileFormatVersion: 2
guid: 3e6496c2664a90341b98b6a16d506002
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 7
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 1
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 1
aniso: 3
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 6
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 2
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

7
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/LightingData.asset


5�9H62018.3.0b1�������-%)oZæu���_��7���������� E� ��"�.�+�4a� ��"�.�+���r� ��" �.�+
�H������� �1�1����� @��� Q�j�IX�����_z�������������1�1�����@����j�P������������������������0������" ��&!��*"��.#��2$��6%��:&��>'��B(��F)��J����*�1�1�����+@���,��j� - ��U. ��W/ ��Y0 ��[����1�1�1�����2@���3gj�4u}5���6���7���8���9���:���;���<���=���>���?���@���A���B���C��D��E��F��#G��*����H�1�1�����I@���J��j� K ��UL ��WM ��YN ��6����O1�1�����P���Q�j�Ru��S���T���U���V���W���X���Y���Z���[���\���]���^���_���`���a��b��c��
�d��[����e�1�1�����f@���goj�lh���i���j���k���l���m���n���o���p���q���r���s���t���u���v���w���x���y���z��{�� |��}��~����%���,���3���:���A������1�1������@����Wj������;�k�����1�1������������j��;�������1�1������������j��;�������1�1����������5�j��LightProbesPPtr<EditorExtension>m_FileIDm_PathIDPPtr<PrefabInstance>LightProbeDatam_DataProbeSetTetrahedralizationm_Tetrahedralizationm_TetrahedraTetrahedronindices[0]indices[1]indices[2]indices[3]neighbors[0]neighbors[1]neighbors[2]neighbors[3]Matrix3x4fmatrixe00e01e02e03e10e11e12e13e20e21e22e23m_HullRaysxyzm_ProbeSetsProbeSetIndexHash128m_Hashbytes[0]bytes[1]bytes[2]bytes[3]bytes[4]bytes[5]bytes[6]bytes[7]bytes[8]bytes[9]bytes[10]bytes[11]bytes[12]bytes[13]bytes[14]bytes[15]m_Offsetm_Sizem_Positionsm_NonTetrahedralizedProbeSetIndexMapm_BakedCoefficientsSphericalHarmonicsL2sh[ 0]sh[ 1]sh[ 2]sh[ 3]sh[ 4]sh[ 5]sh[ 6]sh[ 7]sh[ 8]sh[ 9]sh[10]sh[11]sh[12]sh[13]sh[14]sh[15]sh[16]sh[17]sh[18]sh[19]sh[20]sh[21]sh[22]sh[23]sh[24]sh[25]sh[26]m_BakedLightOcclusionLightProbeOcclusionm_ProbeOcclusionLightIndexm_Occlusionm_OcclusionMaskChannel`���I���]�O楃_���5�7����������E� ��(�.�1�:a� ��(�.�1���r� ��( �.�1
�H������� �1�1����� @��� Q�j�O` ��(�.�1���h�����1�1�����@���tj�$��� ��(�.�1���� ��(�.�1���� ��(�.�1���������1�1����� ����!H�j�����"�1�1�����#@���$Q�j�%�� &��('�.�1(����)�l*��+��%,��,-��3.��:/��A0��H1��O2��V3��]4��d5��k6��r7��y8���9���:���;���<���=���>���?���@���A���B���C���D���E�������F�1�1�����G@���H�j�`I  J��(K�.�1L���M ��'N ��)O ��+P ��-Q ��/R ��'S ��)T ��+U ��-V ��GW��UX��jY ��'Z ��)[ ��+\ ��-] ��u^ ��'_ ��)` ��+a ��-b ��c���d���e���f���g���h���i���j���k���l���m���n��o��p��q��&r��0s��:����t�1�1�����u@���vWj�w.�mx.�zy������z��������{�1�1�����|@���}�j�@~y�� ��(��.�1������ ��'� ��)� ��+� ��-� ����
��������������������������������������������������������&���0�������������������������������������������������������������������������&����0����$������1�1������@����.j�4���I���W���d���o���|�����������������������������������������������������������&���0�����������������������������������������������������������&���0����������1�1������@�����j���������������������������������������������������������&���0����������1�1������@�����j����������������������������������������������������������������&���0������� �����1�1�����@���j� �����=��J��W�����1�1�����@��� Wj�
.�m .�z ��z���� �1�1�����@���Wj�.�m.�z��������1�1����������j��������������� L�@��%�����1�1���������H�j����� �1�1�����!@���"Q�j�#��M����$�1�1�����%@���&��j� '��((�.�1)���l����*�1�1�����+@���,Wj�-.�m..�z/�������0�1�1�����1@���2��j�3���4LightingDataAssetPPtr<EditorExtension>m_FileIDm_PathIDPPtr<PrefabInstance>PPtr<SceneAsset>m_Scenem_LightmapsLightmapDatam_Lightmapm_DirLightmapm_ShadowMaskm_LightmapsCacheFilesPPtr<LightProbes>m_LightProbesm_LightmapsModeSphericalHarmonicsL2m_BakedAmbientProbeInLinearsh[ 0]sh[ 1]sh[ 2]sh[ 3]sh[ 4]sh[ 5]sh[ 6]sh[ 7]sh[ 8]sh[ 9]sh[10]sh[11]sh[12]sh[13]sh[14]sh[15]sh[16]sh[17]sh[18]sh[19]sh[20]sh[21]sh[22]sh[23]sh[24]sh[25]sh[26]m_LightmappedRendererDataRendererDataPPtr<Mesh>uvMeshterrainDynamicUVSTxyzwterrainChunkDynamicUVSTlightmapIndexlightmapIndexDynamiclightmapSTlightmapSTDynamicHash128explicitProbeSetHashbytes[0]bytes[1]bytes[2]bytes[3]bytes[4]bytes[5]bytes[6]bytes[7]bytes[8]bytes[9]bytes[10]bytes[11]bytes[12]bytes[13]bytes[14]bytes[15]m_LightmappedRendererDataIDsSceneObjectIdentifiertargetObjecttargetPrefabEnlightenSceneMappingm_EnlightenSceneMappingm_RenderersEnlightenRendererInformationrendererdynamicLightmapSTInSystemsystemIdinstanceHashgeometryHashm_SystemsEnlightenSystemInformationrendererIndexrendererSizeatlasIndexatlasOffsetXatlasOffsetYinputSystemHashradiositySystemHashm_Probesetsm_SystemAtlasesEnlightenSystemAtlasInformationatlasSizeatlasHashfirstSystemIdm_TerrainChunksEnlightenTerrainChunksInformationnumChunksInXnumChunksInYm_EnlightenSceneMappingRendererIDsm_Lightsm_LightBakingOutputsLightBakingOutputprobeOcclusionLightIndexocclusionMaskChannelLightmapBakeModelightmapBakeModelightmapBakeTypemixedLightingModeisBakedm_BakedReflectionProbeCubemapCacheFilesm_BakedReflectionProbeCubemapsm_BakedReflectionProbesm_EnlightenDatam_EnlightenDataVersion@��T��X�9� @�jO��ɼ�g��Fi,f� C��k� ����-C�������~ LightingDataLightingData-0 !"��*��*@���?�?�?�?���>�>�?�?�?�?�?�?����->��->J?�?�?�?�?�?�?���>�>�>�?�?4$� ���7G4Pn��g����;UnityFS5.x.x2018.3.0b1;
�Pp

87
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_dir.png

之前 之后
宽度: 512  |  高度: 512  |  大小: 21 KiB

408
com.unity.testing.srp.lightweight/Tests/Scenes/054_Lighting_Attenuation/Lightmap-0_comp_light.exr
文件差异内容过多而无法显示
查看文件

11
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs.meta


fileFormatVersion: 2
guid: 5555ed542465a3a42aa9f58eccc62c68
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

236
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


using System.Collections.Generic;
using UnityEngine.Assertions;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class BufferPyramidProcessor
{
static readonly int _Size = Shader.PropertyToID("_Size");
static readonly int _Source = Shader.PropertyToID("_Source");
static readonly int _Result = Shader.PropertyToID("_Result");
static readonly int _SrcSize = Shader.PropertyToID("_SrcSize");
const int k_DepthBlockSize = 4;
GPUCopy m_GPUCopy;
TexturePadding m_TexturePadding;
ComputeShader m_ColorPyramidCS;
int m_ColorPyramidKernel;
ComputeShader m_DepthPyramidCS;
int[] m_DepthKernels = null;
int depthKernel8 { get { return m_DepthKernels[0]; } }
int depthKernel1 { get { return m_DepthKernels[1]; } }
List<RenderTexture> m_RenderColorPyramid_CastTmp = new List<RenderTexture>();
public BufferPyramidProcessor(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS,
GPUCopy gpuCopy,
TexturePadding texturePadding
)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthKernels = new int[]
{
m_DepthPyramidCS.FindKernel("KDepthDownSample8"),
m_DepthPyramidCS.FindKernel("KDepthDownSample1")
};
m_TexturePadding = texturePadding;
}
public void RenderDepthPyramid(
int width, int height,
CommandBuffer cmd,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)
{
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, sourceTexture, targetTexture, new RectInt(0, 0, width, height));
var src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
var dest = mips[i];
var srcMip = new RectInt(0, 0, width >> i, height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var kernel = depthKernel1;
var kernelSize = 1;
var srcWorkMip = srcMip;
var dstWorkMip = dstMip;
if (dstWorkMip.width >= 8 && dstWorkMip.height >= 8)
{
srcWorkMip.width = Mathf.CeilToInt(srcWorkMip.width / 16.0f) * 16;
srcWorkMip.height = Mathf.CeilToInt(srcWorkMip.height / 16.0f) * 16;
dstWorkMip.width = srcWorkMip.width >> 1;
dstWorkMip.height = srcWorkMip.height >> 1;
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
kernel = depthKernel8;
kernelSize = 8;
}
else
{
m_TexturePadding.Pad(cmd, src, srcMip, new RectInt(0, 0, src.rt.width, src.rt.height));
}
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
// The compute shader work in texture space
// So we must provide the texture's size
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
src.rt.width, src.rt.height,
(1.0f / src.rt.width), (1.0f / src.rt.height))
);
cmd.DispatchCompute(
m_DepthPyramidCS,
kernel,
Mathf.CeilToInt(dstWorkMip.width / (float)kernelSize),
Mathf.CeilToInt(dstWorkMip.height / (float)kernelSize),
1
);
var dstMipWidthToCopy = Mathf.Min(Mathf.Min(targetTexture.rt.width >> (i + 1), dstWorkMip.width), mips[i].rt.width);
var dstMipHeightToCopy = Mathf.Min(Mathf.Min(targetTexture.rt.height >> (i + 1), dstWorkMip.height), mips[i].rt.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);
src = dest;
}
}
public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)
{
// Copy mip 0
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, sourceTexture, targetTexture, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
m_RenderColorPyramid_CastTmp.Clear();
for (var i = 0; i < mips.Count; ++i)
m_RenderColorPyramid_CastTmp.Add(mips[i]);
RenderColorPyramidMips(
new RectInt(0, 0, hdCamera.actualWidth, hdCamera.actualHeight),
cmd,
targetTexture,
m_RenderColorPyramid_CastTmp,
lodCount,
scale
);
}
public void RenderColorPyramid(
RectInt srcRect,
CommandBuffer cmd,
Texture sourceTexture,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var scale = new Vector2(
sourceTexture.width / (float)srcRect.width,
sourceTexture.height / (float)srcRect.height
);
cmd.Blit(sourceTexture, targetTexture, scale, Vector2.zero);
RenderColorPyramidMips(
srcRect,
cmd,
targetTexture,
mips,
lodCount,
scale
);
}
void RenderColorPyramidMips(
RectInt srcRect,
CommandBuffer cmd,
RenderTexture targetTexture,
List<RenderTexture> mips,
int lodCount,
Vector2 scale
)
{
Assert.AreEqual(0, srcRect.x, "Offset are not supported");
Assert.AreEqual(0, srcRect.y, "Offset are not supported");
Assert.IsTrue(srcRect.width > 0);
Assert.IsTrue(srcRect.height > 0);
var src = targetTexture;
for (var i = 0; i < lodCount; i++)
{
var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
var srcWorkMip = new RectInt(
0,
0,
Mathf.CeilToInt(srcMip.width / 16.0f) * 16,
Mathf.CeilToInt(srcMip.height / 16.0f) * 16
);
var dstWorkMip = new RectInt(0, 0, srcWorkMip.width >> 1, srcWorkMip.height >> 1);
m_TexturePadding.Pad(cmd, src, srcMip, srcWorkMip);
// TODO: Add proper stereo support to the compute job
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(
m_ColorPyramidCS,
_Size,
new Vector4(src.width >> 1, src.height >> 1, 1f / (src.width >> 1), 1f / (src.height >> 1))
);
cmd.DispatchCompute(
m_ColorPyramidCS,
m_ColorPyramidKernel,
dstWorkMip.width / 8,
dstWorkMip.height / 8,
1
);
var dstMipWidthToCopy = Mathf.Min(Mathf.Min(targetTexture.width >> (i + 1), dstWorkMip.width), mips[i].width);
var dstMipHeightToCopy = Mathf.Min(Mathf.Min(targetTexture.height >> (i + 1), dstWorkMip.height), mips[i].height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(
mips[i],
0, 0, 0, 0,
dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0
);
src = dest;
}
}
}
}

11
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs.meta


fileFormatVersion: 2
guid: ea3e7945ee7dc7a479b9e6846a0c544c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

176
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/BufferPyramid.cs


using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
class BufferPyramid
{
List<RTHandleSystem.RTHandle> m_ColorPyramidMips = new List<RTHandleSystem.RTHandle>();
List<RTHandleSystem.RTHandle> m_DepthPyramidMips = new List<RTHandleSystem.RTHandle>();
BufferPyramidProcessor m_Processor;
public BufferPyramid(BufferPyramidProcessor processor)
{
m_Processor = processor;
}
float GetXRscale()
{
// for stereo double-wide, each half of the texture will represent a single eye's pyramid
float scale = 1.0f;
//if (m_Asset.renderPipelineSettings.supportsStereo && (desc.dimension != TextureDimension.Tex2DArray))
// scale = 2.0f; // double-wide
return scale;
}
public void DestroyBuffers()
{
foreach (var rth in m_ColorPyramidMips)
RTHandles.Release(rth);
foreach (var rth in m_DepthPyramidMips)
RTHandles.Release(rth);
}
public int GetPyramidLodCount(Vector2Int size)
{
var minSize = Mathf.Min(size.x, size.y);
return Mathf.Max(0, Mathf.FloorToInt(Mathf.Log(minSize, 2f)));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)
{
return new Vector2Int(baseMipSize.x >> mipIndex, baseMipSize.y >> mipIndex);
}
Vector2Int CalculatePyramidSize(Vector2Int size)
{
// Instead of using the screen size, we round up to the next power of 2 because currently some platforms don't support NPOT Render Texture with mip maps (PS4 for example)
// Then we render in a Screen Sized viewport.
// Note that even if PS4 supported POT Mips, the buffers would be padded to the next power of 2 anyway (TODO: check with other platforms...)
int pyramidSize = (int)Mathf.NextPowerOfTwo(Mathf.Max(size.x, size.y));
return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandleSystem.RTHandle> mipList, int lodCount)
{
int currentLodCount = mipList.Count;
if (lodCount > currentLodCount)
{
for (int i = currentLodCount; i < lodCount; ++i)
{
int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
var newMip = RTHandles.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
mipList.Add(newMip);
}
}
}
public Vector2 GetPyramidToScreenScale(HDCamera camera, RTHandleSystem.RTHandle rth)
{
return new Vector2((float)camera.actualWidth / rth.rt.width, (float)camera.actualHeight / rth.rt.height);
}
public void RenderDepthPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
ScriptableRenderContext renderContext,
RTHandleSystem.RTHandle sourceDepthTexture,
RTHandleSystem.RTHandle targetDepthTexture)
{
int lodCount = Mathf.Min(
GetPyramidLodCount(targetDepthTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping DepthPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetDepthTexture.rt.format, m_DepthPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetDepthTexture);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
m_Processor.RenderDepthPyramid(
hdCamera.actualWidth, hdCamera.actualHeight,
cmd,
sourceDepthTexture,
targetDepthTexture,
m_DepthPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, targetDepthTexture);
}
public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
ScriptableRenderContext renderContext,
RTHandleSystem.RTHandle sourceColorTexture,
RTHandleSystem.RTHandle targetColorTexture)
{
int lodCount = Mathf.Min(
GetPyramidLodCount(targetColorTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping ColorPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetColorTexture.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetColorTexture);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
m_Processor.RenderColorPyramid(
hdCamera,
cmd,
sourceColorTexture,
targetColorTexture,
m_ColorPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, targetColorTexture);
}
public RTHandleSystem.RTHandle AllocColorRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true,
name: string.Format("ColorPyramid-{0}-{1}", id, frameIndex)
);
}
public RTHandleSystem.RTHandle AllocDepthRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.RGFloat,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true, // Need randomReadWrite because we downsample the first mip with a compute shader.
name: string.Format("DepthPyramid-{0}-{1}", id, frameIndex)
);
}
}
}

173
com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightCameraEditor.cs


using System;
using UnityEditor.AnimatedValues;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Experimental.Rendering.LightweightPipeline;
namespace UnityEditor
{
[CustomEditorForRenderPipeline(typeof(Camera), typeof(LightweightPipelineAsset))]
[CanEditMultipleObjects]
public class LightweightameraEditor : CameraEditor
{
public class Styles
{
public readonly GUIContent renderingPathLabel = new GUIContent("Rendering Path");
public readonly GUIContent[] renderingPathOptions = { new GUIContent("Forward") };
public readonly GUIContent renderingPathInfo = new GUIContent("Lightweight Pipeline only supports Forward rendering path.");
public readonly GUIContent fixNow = new GUIContent("Fix now");
public readonly GUIContent additionalCameraDataLabel = new GUIContent("Add Additional Camera Data");
public readonly string mssaDisabledWarning = "Anti Aliasing is disabled in Lightweight Pipeline settings.";
};
public Camera camera { get { return target as Camera; } }
// Animation Properties
public bool isSameClearFlags { get { return !settings.clearFlags.hasMultipleDifferentValues; } }
public bool isSameOrthographic { get { return !settings.orthographic.hasMultipleDifferentValues; } }
static readonly int[] s_RenderingPathValues = {0};
static Styles s_Styles;
LightweightPipelineAsset m_LightweightPipeline;
readonly AnimBool m_ShowBGColorAnim = new AnimBool();
readonly AnimBool m_ShowOrthoAnim = new AnimBool();
readonly AnimBool m_ShowTargetEyeAnim = new AnimBool();
void SetAnimationTarget(AnimBool anim, bool initialize, bool targetValue)
{
if (initialize)
{
anim.value = targetValue;
anim.valueChanged.AddListener(Repaint);
}
else
{
anim.target = targetValue;
}
}
void UpdateAnimationValues(bool initialize)
{
SetAnimationTarget(m_ShowBGColorAnim, initialize, isSameClearFlags && (camera.clearFlags == CameraClearFlags.SolidColor || camera.clearFlags == CameraClearFlags.Skybox));
SetAnimationTarget(m_ShowOrthoAnim, initialize, isSameOrthographic && camera.orthographic);
SetAnimationTarget(m_ShowTargetEyeAnim, initialize, settings.targetEye.intValue != (int)StereoTargetEyeMask.Both || PlayerSettings.virtualRealitySupported);
}
public new void OnEnable()
{
m_LightweightPipeline = GraphicsSettings.renderPipelineAsset as LightweightPipelineAsset;
settings.OnEnable();
UpdateAnimationValues(true);
}
public void OnDisable()
{
m_ShowBGColorAnim.valueChanged.RemoveListener(Repaint);
m_ShowOrthoAnim.valueChanged.RemoveListener(Repaint);
m_ShowTargetEyeAnim.valueChanged.RemoveListener(Repaint);
m_LightweightPipeline = null;
}
public override void OnInspectorGUI()
{
if (s_Styles == null)
s_Styles = new Styles();
settings.Update();
UpdateAnimationValues(false);
settings.DrawClearFlags();
using (var group = new EditorGUILayout.FadeGroupScope(m_ShowBGColorAnim.faded))
if (group.visible) settings.DrawBackgroundColor();
settings.DrawCullingMask();
EditorGUILayout.Space();
settings.DrawProjection();
settings.DrawClippingPlanes();
settings.DrawNormalizedViewPort();
EditorGUILayout.Space();
settings.DrawDepth();
DrawRenderingPath();
DrawTargetTexture();
settings.DrawOcclusionCulling();
DrawHDR();
DrawMSAA();
settings.DrawVR();
settings.DrawMultiDisplay();
using (var group = new EditorGUILayout.FadeGroupScope(m_ShowTargetEyeAnim.faded))
if (group.visible) settings.DrawTargetEye();
EditorGUILayout.Space();
EditorGUILayout.Space();
GameObject gameObject = camera.gameObject;
if (gameObject.GetComponent<LightweightAdditionalCameraData>() == null)
{
if (GUILayout.Button(s_Styles.additionalCameraDataLabel))
{
gameObject.AddComponent<LightweightAdditionalCameraData>();
}
}
settings.ApplyModifiedProperties();
}
void DrawRenderingPath()
{
using (new EditorGUI.DisabledScope(true))
{
EditorGUILayout.IntPopup(s_Styles.renderingPathLabel, 0, s_Styles.renderingPathOptions, s_RenderingPathValues);
}
EditorGUILayout.HelpBox(s_Styles.renderingPathInfo.text, MessageType.Info);
}
void DrawHDR()
{
settings.DrawHDR();
if (settings.HDR.boolValue && !m_LightweightPipeline.supportsHDR)
EditorGUILayout.HelpBox("HDR rendering is disabled in Lightweight Pipeline asset.", MessageType.Warning);
}
void DrawTargetTexture()
{
EditorGUILayout.PropertyField(settings.targetTexture);
if (!settings.targetTexture.hasMultipleDifferentValues)
{
var texture = settings.targetTexture.objectReferenceValue as RenderTexture;
int pipelineSamplesCount = m_LightweightPipeline.msaaSampleCount;
if (texture && texture.antiAliasing > pipelineSamplesCount)
{
string pipelineMSAACaps = (pipelineSamplesCount > 1)
? String.Format("is set to support {0}x", pipelineSamplesCount)
: "has MSAA disabled";
EditorGUILayout.HelpBox(String.Format("Camera target texture requires {0}x MSAA. Lightweight pipeline {1}.", texture.antiAliasing, pipelineMSAACaps),
MessageType.Warning, true);
if (GUILayout.Button(s_Styles.fixNow))
m_LightweightPipeline.msaaSampleCount = texture.antiAliasing;
}
}
}
void DrawMSAA()
{
EditorGUILayout.PropertyField(settings.allowMSAA);
if (settings.allowMSAA.boolValue && m_LightweightPipeline.msaaSampleCount <= 1)
{
EditorGUILayout.HelpBox(s_Styles.mssaDisabledWarning, MessageType.Warning);
if (GUILayout.Button(s_Styles.fixNow))
m_LightweightPipeline.msaaSampleCount = 4;
}
}
}
}

/com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightLightEditor.cs.meta → /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs.meta

/com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightLightEditor.cs → /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineLightEditor.cs

/com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightCameraEditor.cs.meta → /com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightRenderPipelineCameraEditor.cs.meta

正在加载...
取消
保存