浏览代码

Merge pull request #303 from Unity-Technologies/movecs

Motion vectors support
/RenderPassXR_Sandbox
GitHub 8 年前
当前提交
36881128
共有 11 个文件被更改,包括 234 次插入73 次删除
  1. 174
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  2. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
  3. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/MaterialUtilities.hlsl
  4. 1
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset
  5. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset.meta
  6. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/RenderPipelineResources.cs
  7. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl
  8. 11
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl
  9. 42
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Utilities.cs
  10. 54
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader
  11. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader.meta

174
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine.Rendering;
using System;
using System.Linq;

}
}
public struct HDCamera
// This holds all the matrix data we need for rendering, including data from the previous frame
// (which is the main reason why we need to keep them around for a minimum of one frame).
// HDCameras are automatically created & updated from a source camera and will be destroyed if
// not used during a frame.
public class HDCamera
public Camera camera;
public Vector4 screenSize;
public Matrix4x4 viewProjectionMatrix;
public Matrix4x4 invViewProjectionMatrix;
public Matrix4x4 invProjectionMatrix;
public Vector4 invProjectionParam;
public readonly Camera camera;
public Vector4 screenSize { get; private set; }
public Matrix4x4 viewProjectionMatrix { get; private set; }
public Matrix4x4 prevViewProjectionMatrix { get; private set; }
public Matrix4x4 invViewProjectionMatrix { get; private set; }
public Matrix4x4 invProjectionMatrix { get; private set; }
public Vector4 invProjectionParam { get; private set; }
// The only way to reliably keep track of a frame change right now is to compare the frame
// count Unity gives us. We need this as a single camera could be rendered several times per
// frame and some matrices only have to be computed once. Realistically this shouldn't
// happen, but you never know...
int m_LastFrameActive;
// Always true for cameras that just got added to the pool - needed for previous matrices to
// avoid one-frame jumps/hiccups with temporal effects (motion blur, TAA...)
bool m_FirstFrame;
public HDCamera(Camera camera)
{
this.camera = camera;
Reset();
}
public void Update()
{
screenSize = new Vector4(camera.pixelWidth, camera.pixelHeight, 1.0f / camera.pixelWidth, 1.0f / camera.pixelHeight);
// The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
// (different Z value ranges etc.)
var gpuProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false);
var gpuVP = gpuProj * camera.worldToCameraMatrix;
// A camera could be rendered multiple time per frame, only updates the previous viewproj if needed
if (m_LastFrameActive != Time.frameCount)
{
prevViewProjectionMatrix = !m_FirstFrame
? viewProjectionMatrix
: gpuVP;
m_FirstFrame = false;
}
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.
var invProjParam = new Vector4(
gpuProj.m20 / (gpuProj.m00 * gpuProj.m23),
gpuProj.m21 / (gpuProj.m11 * gpuProj.m23),
-1.0f / gpuProj.m23,
(-gpuProj.m22
+ gpuProj.m20 * gpuProj.m02 / gpuProj.m00
+ gpuProj.m21 * gpuProj.m12 / gpuProj.m11) / gpuProj.m23
);
viewProjectionMatrix = gpuVP;
invViewProjectionMatrix = gpuVP.inverse;
invProjectionMatrix = gpuProj.inverse;
invProjectionParam = invProjParam;
m_LastFrameActive = Time.frameCount;
}
public void SetupMaterial(Material material)
{
material.SetVector("_ScreenSize", screenSize);
material.SetMatrix("_ViewProjMatrix", viewProjectionMatrix);
material.SetMatrix("_PrevViewProjMatrix", prevViewProjectionMatrix);
material.SetMatrix("_InvViewProjMatrix", invViewProjectionMatrix);
material.SetMatrix("_InvProjMatrix", invProjectionMatrix);
material.SetVector("_InvProjParam", invProjectionParam);
}
public void Reset()
{
m_LastFrameActive = -1;
m_FirstFrame = true;
}
static Dictionary<Camera, HDCamera> m_Cameras = new Dictionary<Camera, HDCamera>();
static List<Camera> m_Cleanup = new List<Camera>(); // Recycled to reduce GC pressure
// Grab the HDCamera tied to a given Camera and update it.
public static HDCamera Get(Camera camera)
{
HDCamera hdcam;
if (!m_Cameras.TryGetValue(camera, out hdcam))
{
hdcam = new HDCamera(camera);
m_Cameras.Add(camera, hdcam);
}
hdcam.Update();
return hdcam;
}
// Look for any camera that hasn't been used in the last frame and remove them for the pool.
public static void CleanUnused()
{
int frameCheck = Time.frameCount - 1;
foreach (var kvp in m_Cameras)
{
if (kvp.Value.m_LastFrameActive != frameCheck)
m_Cleanup.Add(kvp.Key);
}
foreach (var cam in m_Cleanup)
m_Cameras.Remove(cam);
m_Cleanup.Clear();
}
}
public class GBufferManager

Material m_FilterSubsurfaceScattering;
// <<< Old SSS Model
Material m_CameraMotionVectorsMaterial;
Material m_DebugViewMaterialGBuffer;
Material m_DebugDisplayLatlong;
Material m_DebugFullScreen;

// Detect when windows size is changing
int m_CurrentWidth;
int m_CurrentHeight;
// Use to detect frame changes
int m_FrameCount;
public int GetCurrentShadowCount() { return m_LightLoop.GetCurrentShadowCount(); }
public int GetShadowAtlasCount() { return m_LightLoop.GetShadowAtlasCount(); }

CreateSssMaterials(sssSettings.useDisneySSS);
// <<< Old SSS Model
m_CameraMotionVectorsMaterial = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/CameraMotionVectors");
InitializeDebugMaterials();
// Init Gbuffer description

{
var cmd = new CommandBuffer {name = "Push Global Parameters"};
cmd.SetGlobalVector("_ScreenSize", hdCamera.screenSize);
cmd.SetGlobalMatrix("_ViewProjMatrix", hdCamera.viewProjectionMatrix);
cmd.SetGlobalMatrix("_InvViewProjMatrix", hdCamera.invViewProjectionMatrix);
cmd.SetGlobalMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);
cmd.SetGlobalVector("_InvProjParam", hdCamera.invProjectionParam);
cmd.SetGlobalVector("_ScreenSize", hdCamera.screenSize);
cmd.SetGlobalMatrix("_ViewProjMatrix", hdCamera.viewProjectionMatrix);
cmd.SetGlobalMatrix("_PrevViewProjMatrix", hdCamera.prevViewProjectionMatrix);
cmd.SetGlobalMatrix("_InvViewProjMatrix", hdCamera.invViewProjectionMatrix);
cmd.SetGlobalMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);
cmd.SetGlobalVector("_InvProjParam", hdCamera.invProjectionParam);
// TODO: cmd.SetGlobalInt() does not exist, so we are forced to use Shader.SetGlobalInt() instead.

SupportedRenderingFeatures.active = s_NeededFeatures;
#endif
if (m_FrameCount != Time.frameCount)
{
HDCamera.CleanUnused();
m_FrameCount = Time.frameCount;
}
GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;

renderContext.SetupCameraProperties(camera);
HDCamera hdCamera = Utilities.GetHDCamera(camera);
var hdCamera = HDCamera.Get(camera);
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode

}
else
{
RenderVelocity(cullResults, camera, renderContext); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
RenderVelocity(cullResults, hdCamera, renderContext); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
// TODO: Check with VFX team.
// Rendering distortion here have off course lot of artifact.

// Render GBuffer opaque
if (!m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
{
Utilities.SetupMaterialHDCamera(hdCamera, m_DebugViewMaterialGBuffer);
hdCamera.SetupMaterial(m_DebugViewMaterialGBuffer);
// TODO: Bind depth textures
var cmd = new CommandBuffer { name = "DebugViewMaterialGBuffer" };

}
}
void RenderVelocity(CullResults cullResults, Camera camera, ScriptableRenderContext renderContext)
void RenderVelocity(CullResults cullResults, HDCamera hdcam, ScriptableRenderContext renderContext)
{
using (new Utilities.ProfilingSample("Velocity", renderContext))
{

int w = camera.pixelWidth;
int h = camera.pixelHeight;
// These flags are still required in SRP or the engine won't compute previous model matrices...
// If the flag hasn't been set yet on this camera, motion vectors will skip a frame.
hdcam.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
int w = (int)hdcam.screenSize.x;
int h = (int)hdcam.screenSize.y;
cmd.Blit(BuiltinRenderTextureType.None, m_VelocityBufferRT, m_CameraMotionVectorsMaterial, 0);
RenderOpaqueRenderList(cullResults, camera, renderContext, "MotionVectors");
RenderOpaqueRenderList(cullResults, hdcam.camera, renderContext, "MotionVectors", RendererConfiguration.PerObjectMotionVectors);
}
}

if (postProcessLayer != null && postProcessLayer.enabled)
{
cmd.SetGlobalTexture("_CameraDepthTexture", GetDepthTexture());
cmd.SetGlobalTexture("_CameraMotionVectorsTexture", m_VelocityBufferRT);
var context = m_PostProcessContext;
context.Reset();

context.camera = camera;
context.sourceFormat = RenderTextureFormat.ARGBHalf; // ?
context.sourceFormat = RenderTextureFormat.ARGBHalf;
context.flip = true;
postProcessLayer.Render(context);

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs


if (GetFeatureVariantsEnabled())
{
// featureVariants
Utilities.SetupMaterialHDCamera(hdCamera, m_DebugViewTilesMaterial);
hdCamera.SetupMaterial(m_DebugViewTilesMaterial);
m_DebugViewTilesMaterial.SetInt("_NumTiles", numTiles);
m_DebugViewTilesMaterial.SetInt("_ViewTilesFlags", (int)m_TileSettings.tileDebugByCategory);
m_DebugViewTilesMaterial.SetVector("_MousePixelCoord", mousePixelCoord);

else if (m_TileSettings.tileDebugByCategory != TileSettings.TileDebug.None)
{
// lightCategories
Utilities.SetupMaterialHDCamera(hdCamera, m_DebugViewTilesMaterial);
hdCamera.SetupMaterial(m_DebugViewTilesMaterial);
m_DebugViewTilesMaterial.SetInt("_ViewTilesFlags", (int)m_TileSettings.tileDebugByCategory);
m_DebugViewTilesMaterial.SetVector("_MousePixelCoord", mousePixelCoord);
m_DebugViewTilesMaterial.EnableKeyword(bUseClusteredForDeferred ? "USE_CLUSTERED_LIGHTLIST" : "USE_FPTL_LIGHTLIST");

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/MaterialUtilities.hlsl


positionCS.xy = positionCS.xy / positionCS.w;
previousPositionCS.xy = previousPositionCS.xy / previousPositionCS.w;
return (positionCS.xy - previousPositionCS.xy) * _ForceNoMotion;
return (positionCS.xy - previousPositionCS.xy) * unity_MotionVectorsParams.y;
#else
return float2(0.0, 0.0);
#endif

1
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset


buildMaterialFlagsShader: {fileID: 7200000, guid: fb3eda953cd6e634e877fb777be2cd08,
type: 3}
shadeOpaqueShader: {fileID: 7200000, guid: 0b64f79746d2daf4198eaf6eab9af259, type: 3}
cameraMotionVectors: {fileID: 4800000, guid: 035941b63024d1943af48811c1db20d9, type: 3}
blitCubemap: {fileID: 4800000, guid: d05913e251bed7a4992c921c62e1b647, type: 3}
buildProbabilityTables: {fileID: 7200000, guid: b9f26cf340afe9145a699753531b2a4c,
type: 3}

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset.meta


fileFormatVersion: 2
guid: 42086e81f4f0c724f96f7f09cc995354
timeCreated: 1497363538
timeCreated: 1497525010
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 11400000

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/RenderPipelineResources.cs


// instance.drawSssProfile = UnityEditor.AssetDatabase.LoadAssetAtPath<Shader>("Assets/ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/DrawSssProfile.shader");
// instance.drawTransmittanceGraphShader = UnityEditor.AssetDatabase.LoadAssetAtPath<Shader>("Assets/ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/DrawTransmittanceGraph.shader");
instance.cameraMotionVectors = UnityEditor.AssetDatabase.LoadAssetAtPath<Shader>("Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader");
// Sky
instance.blitCubemap = UnityEditor.AssetDatabase.LoadAssetAtPath<Shader>("Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/BlitCubemap.shader");
instance.buildProbabilityTables = UnityEditor.AssetDatabase.LoadAssetAtPath<ComputeShader>("Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/BuildProbabilityTables.compute");

// These shaders don't need to be reference by RenderPipelineResource as they are not use at runtime (only to draw in editor)
// public Shader drawSssProfile;
// public Shader drawTransmittanceGraphShader;
public Shader cameraMotionVectors;
// Sky
public Shader blitCubemap;

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl


// It is not possible to correctly generate the motion vector for tesselated geometry as tessellation parameters can change
// from one frame to another (adaptative, lod) + in Unity we only receive information for one non tesselated vertex.
// So motion vetor will be based on interpolate previous position at vertex level instead.
varyingsType.vpass.positionCS = mul(_NonJitteredVP, mul(unity_ObjectToWorld, float4(inputMesh.positionOS, 1.0)));
varyingsType.vpass.previousPositionCS = mul(_PreviousVP, mul(_PreviousM, _HasLastPositionData ? float4(inputPass.previousPositionOS, 1.0) : float4(inputMesh.positionOS, 1.0)));
varyingsType.vpass.positionCS = mul(_ViewProjMatrix, mul(unity_ObjectToWorld, float4(inputMesh.positionOS, 1.0)));
varyingsType.vpass.previousPositionCS = mul(_PrevViewProjMatrix, mul(unity_MatrixPreviousM, unity_MotionVectorsParams.x ? float4(inputPass.previousPositionOS, 1.0) : float4(inputMesh.positionOS, 1.0)));
return PackVaryingsType(varyingsType);
}

11
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl


SAMPLER3D(samplerunity_ProbeVolumeSH);
CBUFFER_START(UnityVelocityPass)
float4x4 _NonJitteredVP;
float4x4 _PreviousVP;
float4x4 _PreviousM;
bool _HasLastPositionData;
bool _ForceNoMotion;
float _MotionVectorDepthBias;
float4x4 unity_MatrixNonJitteredVP;
float4x4 unity_MatrixPreviousVP;
float4x4 unity_MatrixPreviousM;
float4 unity_MotionVectorsParams;
CBUFFER_END
// ----------------------------------------------------------------------------

float4x4 _ViewProjMatrix; // Looks like using UNITY_MATRIX_VP in pixel shader doesn't work ??? need to setup my own...
float4x4 _PrevViewProjMatrix;
float4x4 _InvViewProjMatrix;
float4x4 _InvProjMatrix;
float4 _InvProjParam;

42
Assets/ScriptableRenderPipeline/HDRenderPipeline/Utilities.cs


return gpuVP;
}
public static HDCamera GetHDCamera(Camera camera)
{
HDCamera hdCamera = new HDCamera();
hdCamera.camera = camera;
hdCamera.screenSize = new Vector4(camera.pixelWidth, camera.pixelHeight, 1.0f / camera.pixelWidth, 1.0f / camera.pixelHeight);
// The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
// (different Z value ranges etc.)
var gpuProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false);
var gpuVP = gpuProj * camera.worldToCameraMatrix;
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.
Vector4 invProjectionParam = new Vector4(gpuProj.m20 / (gpuProj.m00 * gpuProj.m23),
gpuProj.m21 / (gpuProj.m11 * gpuProj.m23),
-1.0f / gpuProj.m23,
(-gpuProj.m22
+ gpuProj.m20 * gpuProj.m02 / gpuProj.m00
+ gpuProj.m21 * gpuProj.m12 / gpuProj.m11) / gpuProj.m23);
hdCamera.viewProjectionMatrix = gpuVP;
hdCamera.invViewProjectionMatrix = gpuVP.inverse;
hdCamera.invProjectionMatrix = gpuProj.inverse;
hdCamera.invProjectionParam = invProjectionParam;
return hdCamera;
}
public static void SetupMaterialHDCamera(HDCamera hdCamera, Material material)
{
material.SetVector("_ScreenSize", hdCamera.screenSize);
material.SetMatrix("_ViewProjMatrix", hdCamera.viewProjectionMatrix);
material.SetMatrix("_InvViewProjMatrix", hdCamera.invViewProjectionMatrix);
material.SetMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);
material.SetVector("_InvProjParam", hdCamera.invProjectionParam);
}
// TEMP: These functions should be implemented C++ side, for now do it in C#
public static void SetMatrixCS(CommandBuffer cmd, ComputeShader shadercs, string name, Matrix4x4 mat)
{

RenderTargetIdentifier colorBuffer,
MaterialPropertyBlock properties = null, int shaderPassID = 0)
{
SetupMaterialHDCamera(camera, material);
camera.SetupMaterial(material);
commandBuffer.SetRenderTarget(colorBuffer);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassID, MeshTopology.Triangles, 3, 1, properties);
}

RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassID = 0)
{
SetupMaterialHDCamera(camera, material);
camera.SetupMaterial(material);
commandBuffer.SetRenderTarget(colorBuffer, depthStencilBuffer);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassID, MeshTopology.Triangles, 3, 1, properties);
}

RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassID = 0)
{
SetupMaterialHDCamera(camera, material);
camera.SetupMaterial(material);
commandBuffer.SetRenderTarget(colorBuffers, depthStencilBuffer);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassID, MeshTopology.Triangles, 3, 1, properties);
}

54
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader


Shader "Hidden/HDRenderPipeline/CameraMotionVectors"
{
HLSLINCLUDE
#pragma target 4.5
#include "../../ShaderLibrary/Common.hlsl"
#include "../ShaderVariables.hlsl"
#include "../ShaderPass/FragInputs.hlsl"
#include "../ShaderPass/VaryingMesh.hlsl"
#include "../ShaderPass/VertMesh.hlsl"
PackedVaryingsType Vert(AttributesMesh inputMesh)
{
VaryingsType varyingsType;
varyingsType.vmesh = VertMesh(inputMesh);
return PackVaryingsType(varyingsType);
}
float4 Frag(PackedVaryingsToPS packedInput) : SV_Target
{
PositionInputs posInput = GetPositionInput(packedInput.vmesh.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float3 vPos = ComputeViewSpacePosition(posInput.positionSS, depth, _InvProjMatrix);
float4 worldPos = mul(unity_CameraToWorld, float4(vPos, 1.0));
float4 prevClipPos = mul(_PrevViewProjMatrix, worldPos);
float4 curClipPos = mul(_ViewProjMatrix, worldPos);
float2 prevHPos = prevClipPos.xy / prevClipPos.w;
float2 curHPos = curClipPos.xy / curClipPos.w;
float2 previousPositionCS = (prevHPos + 1.0) / 2.0;
float2 positionCS = (curHPos + 1.0) / 2.0;
return float4(positionCS - previousPositionCS, 0.0, 1.0);
}
ENDHLSL
SubShader
{
Cull Off ZWrite Off ZTest Always
Pass
{
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader.meta


fileFormatVersion: 2
guid: 035941b63024d1943af48811c1db20d9
timeCreated: 1497432609
licenseType: Pro
ShaderImporter:
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存