浏览代码

Merge branch 'master' into stacklit

/main
Jean-François F Fortin 7 年前
当前提交
a2ee3145
共有 19 个文件被更改,包括 140 次插入134 次删除
  1. 7
      ScriptableRenderPipeline/Core/CoreRP/Shadow/Shadow.cs
  2. 7
      ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md
  3. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDAdditionalCameraData.cs
  4. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.Handlers.cs
  6. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.cs
  7. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs
  8. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/FrameSettingsUI.cs
  9. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedFrameSettings.cs
  10. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  11. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  12. 35
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/GlobalIlluminationUtils.cs
  13. 64
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs
  14. 46
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  15. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl
  16. 27
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  17. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs
  18. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs
  19. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs

7
ScriptableRenderPipeline/Core/CoreRP/Shadow/Shadow.cs


int requestIdx = shadowRequests[i];
VisibleLight vl = lights[requestIdx];
int facecount = 0;
GPUShadowType shadowType = GPUShadowType.Point;
GPUShadowType shadowType = GetShadowLightType( vl.light );
bool add = (distToCam < asd.shadowFadeDistance || vl.lightType == LightType.Directional) && m_ShadowSettings.enabled;
bool add = shadowType != GPUShadowType.Unknown && (distToCam < asd.shadowFadeDistance || vl.lightType == LightType.Directional) && m_ShadowSettings.enabled;
if( add )
{

add = --m_MaxShadows[(int)GPUShadowType.Directional, 0] >= 0;
shadowType = GPUShadowType.Directional;
shadowType = GPUShadowType.Point;
shadowType = GPUShadowType.Spot;
facecount = 1;
break;
}

7
ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md


- Rename _MainDepthTexture to _CameraDepthTexture
- The VolumetricLightingController has been moved to the Interpolation Volume framework and now functions similarly to the VolumetricFog settings
- Update of UI of cookie, CubeCookie, Reflection probe and planar reflection probe to combo box
- Allow enabling/disabling shadows for area lights when they are set to baked.
### Bug fixes
- Fix ConvertPhysicalLightIntensityToLightIntensity() function used when creating light from script to match HDLightEditor behavior

- Fix meta pass with triplanar (was not handling correctly the normal)
- Fix preview when a planar reflection is present
- Fix Camera preview, it is now a Preview cameraType (was a SceneView)
- Fix handling unknown GPUShadowTypes in the shadow manager.
- Fix area light shapes sent as point lights to the baking backends when they are set to baked.
- Fix unnecessary division by PI for baked area lights.
- Fix line lights sent to the lightmappers. The backends don't support this light type.
## [2018.1.0f2]

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDAdditionalCameraData.cs


public Color backgroundColorHDR = new Color(0.025f, 0.07f, 0.19f, 0.0f);
public bool clearDepth = true;
public RenderingPath renderingPath;
public RenderingPath renderingPath;
public LayerMask volumeLayerMask = -1;
public LayerMask volumeLayerMask = -1;
// Physical parameters
public float aperture = 8f;

// we create a runtime copy (m_ActiveFrameSettings that is used, and any parametrization is done on serialized frameSettings)
[SerializeField]
[FormerlySerializedAs("serializedFrameSettings")]
FrameSettings m_FrameSettings = new FrameSettings(); // Serialize frameSettings
FrameSettings m_FrameSettings = new FrameSettings(); // Serialize frameSettings
// Not serialized, visible only in the debug windows
FrameSettings m_FrameSettingsRuntime = new FrameSettings();

// Use for debug windows
// When camera name change we need to update the name in DebugWindows.
// This is the purpose of this class
bool m_IsDebugRegistered = false;
string m_CameraRegisterName;
bool m_IsDebugRegistered = false;
string m_CameraRegisterName;
// When we are a preview, there is no way inside Unity to make a disctinctoin between camera preview and material preview.
// This property allow to say that we are an editor camera preview when the type is preview.
public bool isEditorCameraPreview { get; set; }
// This is the function use outside to access FrameSettings. It return the current state of FrameSettings for the camera
// taking into account the customization via the debug menu

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


s_Cleanup.Clear();
}
// Look for any camera that hasn't been used in the last frame and remove them for the pool.
// Look for any camera that hasn't been used in the last frame and remove them from the pool.
public static void CleanUnused()
{
int frameCheck = Time.frameCount - 1;

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.Handlers.cs


EditorUtility.CopySerialized(cameraData, m_PreviewAdditionalCameraData);
var layer = c.GetComponent<PostProcessLayer>() ?? ComponentSingleton<PostProcessLayer>.instance;
EditorUtility.CopySerialized(layer, m_PreviewPostProcessLayer);
m_PreviewCamera.cameraType = CameraType.SceneView; // This is required else if we use Preview the image is flipped... (Unity...)
// We need to call UpdateDirtyFrameSettings to update the dirty flags that was set in the CopySerialized call
m_PreviewAdditionalCameraData.UpdateDirtyFrameSettings(true, cameraData.GetFrameSettings());
// And then to copy the runtime frame settings
// So this includes the runtime frame settings properly
cameraData.GetFrameSettings().CopyTo(m_PreviewAdditionalCameraData.GetFrameSettings());
m_PreviewHDCamera.Update(m_PreviewAdditionalCameraData.GetFrameSettings(), m_PreviewPostProcessLayer, null);
var previewTexture = GetPreviewTextureWithSize((int)previewSize.x, (int)previewSize.y);
m_PreviewCamera.targetTexture = previewTexture;

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.cs


Camera m_PreviewCamera;
HDAdditionalCameraData m_PreviewAdditionalCameraData;
PostProcessLayer m_PreviewPostProcessLayer;
HDCamera m_PreviewHDCamera;
void OnEnable()
{

m_PreviewCamera.enabled = false;
m_PreviewCamera.cameraType = CameraType.Preview; // Must be init before adding HDAdditionalCameraData
m_PreviewAdditionalCameraData = m_PreviewCamera.gameObject.AddComponent<HDAdditionalCameraData>();
// Say that we are a camera editor preview and not just a regular preview
m_PreviewAdditionalCameraData.isEditorCameraPreview = true;
m_PreviewHDCamera = new HDCamera(m_PreviewCamera);
m_PreviewHDCamera.Update(m_PreviewAdditionalCameraData.GetFrameSettings(), m_PreviewPostProcessLayer, null);
}
void OnDisable()

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs


void DrawFeatures()
{
bool disabledScope = settings.isCompletelyBaked
|| m_LightShape == LightShape.Line
|| m_LightShape == LightShape.Rectangle;
bool disabledScope = m_LightShape == LightShape.Line || (m_LightShape == LightShape.Rectangle && settings.isRealtime);
using (new EditorGUI.DisabledScope(disabledScope))
{

m_AdditionalLightData.shapeHeight.floatValue = Mathf.Max(m_AdditionalLightData.shapeHeight.floatValue, k_MinAreaWidth);
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.areaSizeY.floatValue = m_AdditionalLightData.shapeHeight.floatValue;
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
if (settings.isRealtime)
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
break;
case LightShape.Line:

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/FrameSettingsUI.cs


EditorGUILayout.PropertyField(p.enableMotionVectors, _.GetContent("Enable Motion Vectors"));
EditorGUILayout.PropertyField(p.enableObjectMotionVectors, _.GetContent("Enable Object Motion Vectors"));
EditorGUILayout.PropertyField(p.enableDBuffer, _.GetContent("Enable DBuffer"));
EditorGUILayout.PropertyField(p.enableAtmosphericScattering, _.GetContent("Enable Atmospheric Scattering"));
EditorGUILayout.PropertyField(p.enableRoughRefraction, _.GetContent("Enable Rough Refraction"));
EditorGUILayout.PropertyField(p.enableDistortion, _.GetContent("Enable Distortion"));
EditorGUILayout.PropertyField(p.enablePostprocess, _.GetContent("Enable Postprocess"));

EditorGUILayout.PropertyField(p.enableSSAO, _.GetContent("Enable SSAO"));
EditorGUILayout.PropertyField(p.enableSubsurfaceScattering, _.GetContent("Enable Subsurface Scattering"));
EditorGUILayout.PropertyField(p.enableTransmission, _.GetContent("Enable Transmission"));
EditorGUILayout.PropertyField(p.enableAtmosphericScattering, _.GetContent("Enable Atmospheric Scattering"));
EditorGUILayout.PropertyField(p.enableVolumetric, _.GetContent(" Enable Volumetric"));
EditorGUILayout.PropertyField(p.enableShadow, _.GetContent("Enable Shadow"));
EditorGUILayout.PropertyField(p.enableContactShadow, _.GetContent("Enable Contact Shadows"));
EditorGUILayout.PropertyField(p.enableShadowMask, _.GetContent("Enable Shadow Masks"));

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedFrameSettings.cs


public SerializedProperty enableSSAO;
public SerializedProperty enableSubsurfaceScattering;
public SerializedProperty enableTransmission;
public SerializedProperty enableAtmosphericScattering;
public SerializedProperty enableVolumetric;
public SerializedProperty diffuseGlobalDimmer;
public SerializedProperty specularGlobalDimmer;

public SerializedProperty enableMotionVectors;
public SerializedProperty enableObjectMotionVectors;
public SerializedProperty enableDBuffer;
public SerializedProperty enableAtmosphericScattering;
public SerializedProperty enableRoughRefraction;
public SerializedProperty enableTransparentPostpass;
public SerializedProperty enableDistortion;

enableSSAO = root.Find((FrameSettings d) => d.enableSSAO);
enableSubsurfaceScattering = root.Find((FrameSettings d) => d.enableSubsurfaceScattering);
enableTransmission = root.Find((FrameSettings d) => d.enableTransmission);
enableAtmosphericScattering = root.Find((FrameSettings d) => d.enableAtmosphericScattering);
enableVolumetric = root.Find((FrameSettings d) => d.enableVolumetric);
diffuseGlobalDimmer = root.Find((FrameSettings d) => d.diffuseGlobalDimmer);
specularGlobalDimmer = root.Find((FrameSettings d) => d.specularGlobalDimmer);
enableForwardRenderingOnly = root.Find((FrameSettings d) => d.enableForwardRenderingOnly);

enableObjectMotionVectors = root.Find((FrameSettings d) => d.enableObjectMotionVectors);
enableDBuffer = root.Find((FrameSettings d) => d.enableDBuffer);
enableAtmosphericScattering = root.Find((FrameSettings d) => d.enableAtmosphericScattering);
enableRoughRefraction = root.Find((FrameSettings d) => d.enableRoughRefraction);
enableTransparentPostpass = root.Find((FrameSettings d) => d.enableTransparentPostpass);
enableDistortion = root.Find((FrameSettings d) => d.enableDistortion);

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


continue;
}
if (camera.cameraType != CameraType.Reflection
// Don't render reflection in Preview, it prevent them to display
if (camera.cameraType != CameraType.Reflection && camera.cameraType != CameraType.Preview
// Planar probes rendering is not currently supported for orthographic camera
// Avoid rendering to prevent error log spamming
&& !camera.orthographic)

(hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky && !m_SkyManager.IsVisualSkyValid()) ||
// Special handling for Preview we force to clear with background color (i.e black)
// Note that the sky use in this case is the last one setup. If there is no scene or game, there is no sky use as reflection in the preview
hdCamera.camera.cameraType == CameraType.Preview
HDUtils.IsRegularPreviewCamera(hdCamera.camera)
)
{
Color clearColor = hdCamera.backgroundColorHDR;

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


Vector2 mousePixelCoord = MousePositionDebug.instance.GetMouseClickPosition(camera.screenSize.y);
return new Vector4(mousePixelCoord.x, mousePixelCoord.y, camera.viewportScale.x * mousePixelCoord.x / camera.screenSize.x, camera.viewportScale.y * mousePixelCoord.y / camera.screenSize.y);
}
// This function check if camera is a CameraPreview, then check if this preview is a regular preview (i.e not a preview from the camera editor)
public static bool IsRegularPreviewCamera(Camera camera)
{
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
return camera.cameraType == CameraType.Preview && ((additionalCameraData == null) || (additionalCameraData && !additionalCameraData.isEditorCameraPreview));
}
}
}

35
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/GlobalIlluminationUtils.cs


ld.color = add.affectDiffuse ? LinearColor.Convert(l.color, l.intensity) : LinearColor.Black();
ld.indirectColor = add.affectDiffuse ? LightmapperUtils.ExtractIndirect(l) : LinearColor.Black();
// For HDRP we need to divide the analytic light color by PI (HDRP do explicit PI division for Lambert, but built in Unity and the GI don't)
// We apply it on both direct and indirect are they are separated, seems that direct is no used if we used mixed mode with indirect or shadowmask bake.
ld.color.red /= Mathf.PI;
ld.color.green /= Mathf.PI;
ld.color.blue /= Mathf.PI;
ld.indirectColor.red /= Mathf.PI;
ld.indirectColor.green /= Mathf.PI;
ld.indirectColor.blue /= Mathf.PI;
#if UNITY_EDITOR
#else
ld.mode = LightMode.Realtime;
#endif
// For HDRP we need to divide the analytic light color by PI (HDRP do explicit PI division for Lambert, but built in Unity and the GI don't for punctual lights)
// We apply it on both direct and indirect are they are separated, seems that direct is no used if we used mixed mode with indirect or shadowmask bake.
ld.color.intensity /= Mathf.PI;
ld.indirectColor.intensity /= Mathf.PI;
switch (l.type)
{
case LightType.Directional:

ld.shape1 = 0.0f;
#endif
// TEMP: for now, if we bake a rectangle type this will disable the light for runtime, need to speak with GI team about it!
// ld.type = UnityEngine.Experimental.GlobalIllumination.LightType.Rectangle;
ld.type = UnityEngine.Experimental.GlobalIllumination.LightType.Point;
ld.type = UnityEngine.Experimental.GlobalIllumination.LightType.Rectangle;
ld.InitNoBake(ld.instanceID);
}
else
{

for (int i = 0; i < requests.Length; i++)
{
Light l = requests[i];
LightDataGIExtract(l, ref ld);
#if UNITY_EDITOR
if (LightmapperUtils.Extract(l.lightmapBakeType) == LightMode.Realtime)
ld.InitNoBake(l.GetInstanceID());
else
LightDataGIExtract(l, ref ld);
#else
ld.InitNoBake(l.GetInstanceID());
#endif
lightsOutput[i] = ld;
}
};

64
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs


#endif
// 1. Allocate if necessary target texture
var renderCamera = GetRenderCamera();
var hdCamera = HDCamera.Get(renderCamera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(renderCamera, null);
}
hdCamera.Update(probe.frameSettings, null, null);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
if (!IsRealtimeTextureValid(probe.realtimeTexture))
{
if (probe.realtimeTexture != null)
probe.realtimeTexture.Release();

m_PlanarReflectionProbe_RequestRealtimeRender.Clear();
// 1. Allocate if necessary target texture
var camera = GetRenderCamera();
var hdCamera = HDCamera.Get(camera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(probe.frameSettings, null, null);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
if (!IsRealtimeTextureValid(probe.realtimeTexture))
{
if (probe.realtimeTexture != null)
probe.realtimeTexture.Release();

// }
//}
bool IsRealtimeTextureValid(RenderTexture renderTexture, HDCamera hdCamera)
bool IsRealtimeTextureValid(RenderTexture renderTexture)
{
return renderTexture != null
&& renderTexture.width == m_Parameters.planarReflectionProbeSize

public void Render(PlanarReflectionProbe probe, RenderTexture target, Camera viewerCamera = null)
{
var renderCamera = GetRenderHDCamera(probe);
renderCamera.camera.targetTexture = target;
var renderCamera = GetRenderCamera();
SetupCameraForRender(renderCamera.camera, probe, viewerCamera);
// Copy current frameSettings of this probe to the HDAdditionalData of the render camera
probe.frameSettings.CopyTo(s_RenderCameraData.GetFrameSettings());
renderCamera.targetTexture = target;
SetupCameraForRender(renderCamera, probe, viewerCamera);
renderCamera.camera.Render();
renderCamera.Render();
renderCamera.camera.targetTexture = null;
renderCamera.targetTexture = null;
target.IncrementUpdateCount();
}

var forward = reflectionMatrix.MultiplyVector(viewerCamera.transform.forward);
var up = reflectionMatrix.MultiplyVector(viewerCamera.transform.up);
captureRotation = Quaternion.LookRotation(forward, up);
}
public static HDCamera GetRenderHDCamera(PlanarReflectionProbe probe)
{
var camera = GetRenderCamera();
probe.frameSettings.CopyTo(s_RenderCameraData.GetFrameSettings());
var hdCamera = HDCamera.Get(camera);
if (hdCamera == null)
{
// Warning: this is a bad design pattern.
// An individual system should not create an HDCamera (which is a shared resource).
hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(probe.frameSettings, null, null);
return hdCamera;
}
static Camera GetRenderCamera()

46
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


RTHandleSystem.RTHandle m_DensityBufferHandle;
RTHandleSystem.RTHandle m_LightingBufferHandle;
// Do we support volumetric or not?
bool m_supportVolumetric = false;
if (preset == VolumetricLightingPreset.Off) return;
m_supportVolumetric = asset.renderPipelineSettings.supportVolumetric;
if (!m_supportVolumetric)
return;
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;

public void InitializePerCameraData(HDCamera hdCamera)
{
if (preset == VolumetricLightingPreset.Off) return;
// Note: Here we can't test framesettings as they are not initialize yet
// TODO: Here we allocate history even for camera that may not use volumetric
if (!m_supportVolumetric)
return;
// Start with the same parameters for both frames. Then update them one by one every frame.
var parameters = ComputeVBufferParameters(hdCamera, true);

// The results are undefined otherwise.
public void UpdatePerCameraData(HDCamera hdCamera)
{
if (preset == VolumetricLightingPreset.Off) return;
if (!hdCamera.frameSettings.enableVolumetric)
return;
var parameters = ComputeVBufferParameters(hdCamera, false);

void DestroyBuffers()
{
RTHandles.Release(m_DensityBufferHandle);
RTHandles.Release(m_LightingBufferHandle);
if (m_DensityBufferHandle != null)
RTHandles.Release(m_DensityBufferHandle);
if (m_LightingBufferHandle != null)
RTHandles.Release(m_LightingBufferHandle);
CoreUtils.SafeRelease(s_VisibleVolumeBoundsBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);

public void Cleanup()
{
if (preset == VolumetricLightingPreset.Off) return;
// Note: No need to test for support volumetric here, we do saferelease and null assignation
DestroyBuffers();
m_VolumeVoxelizationCS = null;

public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
{
if (preset == VolumetricLightingPreset.Off) return;
if (!hdCamera.frameSettings.enableVolumetric)
return;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();

{
DensityVolumeList densityVolumes = new DensityVolumeList();
if (preset == VolumetricLightingPreset.Off) return densityVolumes;
if (!hdCamera.frameSettings.enableVolumetric)
return densityVolumes;
if (visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
if (visualEnvironment.fogType != FogType.Volumetric)
return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{

public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes)
{
if (preset == VolumetricLightingPreset.Off) return;
if (!hdCamera.frameSettings.enableVolumetric)
return;
if (visualEnvironment.fogType != FogType.Volumetric) return;
if (visualEnvironment.fogType != FogType.Volumetric)
return;
using (new ProfilingSample(cmd, "Volume Voxelization"))
{

public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
{
if (preset == VolumetricLightingPreset.Off) return;
if (!hdCamera.frameSettings.enableVolumetric)
return;
if (visualEnvironment.fogType != FogType.Volumetric) return;
if (visualEnvironment.fogType != FogType.Volumetric)
return;
using (new ProfilingSample(cmd, "Volumetric Lighting"))
{

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl


#define DEBUGVIEW_STACKLIT_SURFACEDATA_IOR (1304)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL (1305)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE (1306)
<<<<<<< HEAD
#define DEBUGVIEW_STACKLIT_SURFACEDATA_GEOMETRIC_NORMAL (1307)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_GEOMETRIC_NORMAL_VIEW_SPACE (1308)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_NORMAL (1309)

#define DEBUGVIEW_STACKLIT_SURFACEDATA_DIFFUSION_PROFILE (1322)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SUBSURFACE_MASK (1323)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_THICKNESS (1324)
=======
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A (1307)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B (1308)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING (1309)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_TANGENT (1310)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_ANISOTROPY (1311)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_IOR (1312)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_THICKNESS (1313)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_SMOOTHNESS (1314)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_IOR (1315)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_THICKNESS (1316)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_EXTINCTION_COEFFICIENT (1317)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_DIFFUSION_PROFILE (1318)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SUBSURFACE_MASK (1319)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_THICKNESS (1320)
>>>>>>> master
//
// UnityEngine.Experimental.Rendering.HDPipeline.StackLit+BSDFData: static fields

27
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


public bool enableSSAO = true;
public bool enableSubsurfaceScattering = true;
public bool enableTransmission = true; // Caution: this is only for debug, it doesn't save the cost of Transmission execution
public bool enableAtmosphericScattering = true;
public bool enableVolumetric = true;
// Setup by system

public bool enableMotionVectors = true; // Enable/disable whole motion vectors pass (Camera + Object).
public bool enableObjectMotionVectors = true;
public bool enableDBuffer = true;
public bool enableAtmosphericScattering = true;
public bool enableRoughRefraction = true; // Depends on DepthPyramid - If not enable, just do a copy of the scene color (?) - how to disable rough refraction ?
public bool enableTransparentPostpass = true;
public bool enableDistortion = true;

frameSettings.enableSSAO = this.enableSSAO;
frameSettings.enableSubsurfaceScattering = this.enableSubsurfaceScattering;
frameSettings.enableTransmission = this.enableTransmission;
frameSettings.enableAtmosphericScattering = this.enableAtmosphericScattering;
frameSettings.enableVolumetric = this.enableVolumetric;
frameSettings.diffuseGlobalDimmer = this.diffuseGlobalDimmer;
frameSettings.specularGlobalDimmer = this.specularGlobalDimmer;

frameSettings.enableMotionVectors = this.enableMotionVectors;
frameSettings.enableObjectMotionVectors = this.enableObjectMotionVectors;
frameSettings.enableDBuffer = this.enableDBuffer;
frameSettings.enableAtmosphericScattering = this.enableAtmosphericScattering;
frameSettings.enableRoughRefraction = this.enableRoughRefraction;
frameSettings.enableTransparentPostpass = this.enableTransparentPostpass;
frameSettings.enableDistortion = this.enableDistortion;

aggregate.enableSSAO = srcFrameSettings.enableSSAO && renderPipelineSettings.supportSSAO;
aggregate.enableSubsurfaceScattering = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableSubsurfaceScattering && renderPipelineSettings.supportSubsurfaceScattering;
aggregate.enableTransmission = srcFrameSettings.enableTransmission;
aggregate.enableVolumetric = srcFrameSettings.enableVolumetric && renderPipelineSettings.supportVolumetric;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
// We must take care of the scene view fog flags in the editor
if (!CoreUtils.IsSceneViewFogEnabled(camera))
aggregate.enableAtmosphericScattering = false;
// Volumetric are disabled if there is no atmospheric scattering
aggregate.enableVolumetric = srcFrameSettings.enableVolumetric && renderPipelineSettings.supportVolumetric && aggregate.enableAtmosphericScattering;
// TODO: Add support of volumetric in planar reflection
if (camera.cameraType == CameraType.Reflection)

aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
// We must take care of the scene view fog flags in the editor
if (!CoreUtils.IsSceneViewFogEnabled(camera))
aggregate.enableAtmosphericScattering = false;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;
aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

aggregate.ConfigureMSAADependentSettings();
aggregate.ConfigureStereoDependentSettings();
if (camera.cameraType == CameraType.Preview)
// Disable various option for the preview except if we are a Camera Editor preview
if (HDUtils.IsRegularPreviewCamera(camera))
// remove undesired feature in preview
aggregate.enableAtmosphericScattering = false;
aggregate.enableVolumetric = false;
aggregate.enableAtmosphericScattering = false;
aggregate.enableVolumetric = false;
}
LightLoopSettings.InitializeLightLoopSettings(camera, aggregate, renderPipelineSettings, srcFrameSettings, ref aggregate.lightLoopSettings);

new DebugUI.BoolField { displayName = "Enable Motion Vectors", getter = () => frameSettings.enableMotionVectors, setter = value => frameSettings.enableMotionVectors = value },
new DebugUI.BoolField { displayName = "Enable Object Motion Vectors", getter = () => frameSettings.enableObjectMotionVectors, setter = value => frameSettings.enableObjectMotionVectors = value },
new DebugUI.BoolField { displayName = "Enable DBuffer", getter = () => frameSettings.enableDBuffer, setter = value => frameSettings.enableDBuffer = value },
new DebugUI.BoolField { displayName = "Enable Atmospheric Scattering", getter = () => frameSettings.enableAtmosphericScattering, setter = value => frameSettings.enableAtmosphericScattering = value },
new DebugUI.BoolField { displayName = "Enable Rough Refraction", getter = () => frameSettings.enableRoughRefraction, setter = value => frameSettings.enableRoughRefraction = value },
new DebugUI.BoolField { displayName = "Enable Distortion", getter = () => frameSettings.enableDistortion, setter = value => frameSettings.enableDistortion = value },
new DebugUI.BoolField { displayName = "Enable Postprocess", getter = () => frameSettings.enablePostprocess, setter = value => frameSettings.enablePostprocess = value },

new DebugUI.BoolField { displayName = "Enable Shadows", getter = () => frameSettings.enableShadow, setter = value => frameSettings.enableShadow = value },
new DebugUI.BoolField { displayName = "Enable Contact Shadows", getter = () => frameSettings.enableContactShadows, setter = value => frameSettings.enableContactShadows = value },
new DebugUI.BoolField { displayName = "Enable ShadowMask", getter = () => frameSettings.enableShadowMask, setter = value => frameSettings.enableShadowMask = value },
new DebugUI.BoolField { displayName = "Enable Atmospheric Scattering", getter = () => frameSettings.enableAtmosphericScattering, setter = value => frameSettings.enableAtmosphericScattering = value },
new DebugUI.BoolField { displayName = " Enable volumetric", getter = () => frameSettings.enableVolumetric, setter = value => frameSettings.enableVolumetric = value },
}
}
});

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs


public abstract void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd);
public static void PushNeutralShaderParameters(CommandBuffer cmd)
public static void PushNeutralShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
if (ShaderConfig.s_VolumetricLightingPreset != 0)
if (hdCamera.frameSettings.enableVolumetric)
{
var data = DensityVolumeData.GetNeutralValues();

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs


}
}
public void UpdateCurrentSkySettings(HDCamera camera)
public void UpdateCurrentSkySettings(HDCamera hdCamera)
if (camera.camera.cameraType == CameraType.Preview)
if (HDUtils.IsRegularPreviewCamera(hdCamera.camera))
{
m_VisualSky.skySettings = GetDefaultPreviewSkyInstance();
}

// Update needs to happen before testing if the component is active other internal data structure are not properly updated yet.
VolumeManager.instance.Update(m_LightingOverrideVolumeStack, camera.camera.transform, m_LightingOverrideLayerMask);
VolumeManager.instance.Update(m_LightingOverrideVolumeStack, hdCamera.camera.transform, m_LightingOverrideLayerMask);
if(VolumeManager.instance.IsComponentActiveInMask<VisualEnvironment>(m_LightingOverrideLayerMask))
{
SkySettings newSkyOverride = GetSkySetting(m_LightingOverrideVolumeStack);

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs


{
if (!hdCamera.frameSettings.enableAtmosphericScattering)
{
AtmosphericScattering.PushNeutralShaderParameters(cmd);
AtmosphericScattering.PushNeutralShaderParameters(hdCamera, cmd);
return;
}

{
AtmosphericScattering.PushNeutralShaderParameters(cmd);
AtmosphericScattering.PushNeutralShaderParameters(hdCamera, cmd);
break;
}
case FogType.Linear:

正在加载...
取消
保存