浏览代码

rough in single pass forward reflections using compute buffers

/main
Filip Iliescu 8 年前
当前提交
6d43471b
共有 2 个文件被更改,包括 248 次插入51 次删除
  1. 101
      Assets/ScriptableRenderPipeline/MobileRenderPipeline/ClassicDeferred/ClassicDeferredPipeline.cs
  2. 198
      Assets/ScriptableRenderPipeline/MobileRenderPipeline/ClassicDeferred/UnityStandardForwardMobile.cginc

101
Assets/ScriptableRenderPipeline/MobileRenderPipeline/ClassicDeferred/ClassicDeferredPipeline.cs


[NonSerialized]
private TextureCache2D m_CookieTexArray;
//private TextureCacheCubemap m_CubeCookieTexArray;
//private TextureCacheCubemap m_CubeReflTexArray;
private TextureCacheCubemap m_CubeReflTexArray;
private static ComputeBuffer s_LightDataBuffer;
private static int s_GBufferAlbedo;
private static int s_GBufferSpecRough;

if (m_ReflectionNearClipMaterial) DestroyImmediate (m_ReflectionNearClipMaterial);
if (m_ReflectionNearAndFarClipMaterial) DestroyImmediate (m_ReflectionNearAndFarClipMaterial);
s_LightDataBuffer.Release();
//m_CubeReflTexArray.Release();
m_CubeReflTexArray.Release();
DeinitShadowSystem();
}

m_CookieTexArray = new TextureCache2D();
//m_CubeCookieTexArray = new TextureCacheCubemap();
//m_CubeReflTexArray = new TextureCacheCubemap();
m_CubeReflTexArray = new TextureCacheCubemap();
//m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHdrCompressedTextureFormat, true);
m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHdrCompressedTextureFormat, true);
// TODO: decide on better max reflection probes
s_LightDataBuffer = new ComputeBuffer(k_MaxLights, System.Runtime.InteropServices.Marshal.SizeOf(typeof(SFiniteLightData)));
//shadows
m_MatWorldToShadow = new Matrix4x4[k_MaxLights * k_MaxShadowmapPerLights];

// update texture caches
m_CookieTexArray.NewFrame();
//m_CubeCookieTexArray.NewFrame();
//m_CubeReflTexArray.NewFrame();
m_CubeReflTexArray.NewFrame();
}
public void Render(ScriptableRenderContext context, IEnumerable<Camera> cameras)

var w = camera.pixelWidth;
var h = camera.pixelHeight;
Matrix4x4 viewToWorld = CameraToWorld (camera);
var viewToWorld = CameraToWorld (camera);
var worldToView = WorldToCamera(camera);
// camera to screen matrix (and it's inverse)
var proj = CameraProjection(camera);

}
}
//var viewDir = viewToWorld.GetColumn(2);
//var viewDirNormalized = -1 * Vector3.Normalize(new Vector3 (viewDir.x, viewDir.y, viewDir.z));
//Plane eyePlane = new Plane ();
//eyePlane.SetNormalAndPosition(viewDirNormalized, camera.transform.position);
int probeCount = cull.visibleReflectionProbes.Length;
var lightData = new SFiniteLightData[probeCount];
for (int i = 0; i < probeCount; ++i) {
var rl = cull.visibleReflectionProbes [i];
// always a box for now
var cubemap = rl.texture;
if (cubemap == null)
continue;
var lgtData = new SFiniteLightData();
lgtData.flags = 0;
var bnds = rl.bounds;
var boxOffset = rl.center; // reflection volume offset relative to cube map capture point
var blendDistance = rl.blendDistance;
var mat = rl.localToWorld;
var boxProj = (rl.boxProjection != 0);
var decodeVals = rl.hdr;
// C is reflection volume center in world space (NOT same as cube map capture point)
var e = bnds.extents; // 0.5f * Vector3.Max(-boxSizes[p], boxSizes[p]);
//Vector3 C = bnds.center; // P + boxOffset;
var C = mat.MultiplyPoint(boxOffset); // same as commented out line above when rot is identity
var combinedExtent = e + new Vector3(blendDistance, blendDistance, blendDistance);
Vector3 vx = mat.GetColumn(0);
Vector3 vy = mat.GetColumn(1);
Vector3 vz = mat.GetColumn(2);
// transform to camera space (becomes a left hand coordinate frame in Unity since Determinant(worldToView)<0)
vx = worldToView.MultiplyVector(vx);
vy = worldToView.MultiplyVector(vy);
vz = worldToView.MultiplyVector(vz);
var Cw = worldToView.MultiplyPoint(C);
if (boxProj) lgtData.flags |= LightDefinitions.IS_BOX_PROJECTED;
lgtData.lightPos = Cw;
lgtData.lightAxisX = vx;
lgtData.lightAxisY = vy;
lgtData.lightAxisZ = vz;
lgtData.localCubeCapturePoint = -boxOffset;
lgtData.probeBlendDistance = blendDistance;
lgtData.lightIntensity = decodeVals.x;
lgtData.decodeExp = decodeVals.y;
lgtData.sliceIndex = m_CubeReflTexArray.FetchSlice(cubemap);
var delta = combinedExtent - e;
lgtData.boxInnerDist = e;
lgtData.boxInvRange.Set(1.0f / delta.x, 1.0f / delta.y, 1.0f / delta.z);
lgtData.lightType = (uint)LightDefinitions.BOX_LIGHT;
lgtData.lightModel = (uint)LightDefinitions.REFLECTION_LIGHT;
lightData [i] = lgtData;
}
s_LightDataBuffer.SetData(lightData);
CommandBuffer cmd = new CommandBuffer() {name = "SetupShaderConstants"};
cmd.SetGlobalMatrix("g_mViewToWorld", viewToWorld);

cmd.SetGlobalVectorArray("gLightPos", m_LightPositions);
cmd.SetGlobalMatrixArray("gLightMatrix", m_LightMatrix);
cmd.SetGlobalMatrixArray("gWorldToLightMatrix", m_WorldToLightMatrix);
cmd.SetGlobalVector("gLightData", new Vector4(totalLightCount, 0, 0, 0));
cmd.SetGlobalVector("gLightData", new Vector4(totalLightCount, probeCount, 0, 0));
//cmd.SetGlobalTexture("_reflCubeTextures", m_CubeReflTexArray.GetTexCache());
cmd.SetGlobalTexture("_reflCubeTextures", m_CubeReflTexArray.GetTexCache());
cmd.SetGlobalBuffer("g_vProbeData", s_LightDataBuffer);
var topCube = ReflectionProbe.defaultTexture;
var defdecode = ReflectionProbe.defaultTextureHDRDecodeValues;
cmd.SetGlobalTexture("_reflRootCubeTexture", topCube);
cmd.SetGlobalFloat("_reflRootHdrDecodeMult", defdecode.x);
cmd.SetGlobalFloat("_reflRootHdrDecodeExp", defdecode.y);
context.ExecuteCommandBuffer(cmd);
cmd.Dispose();

198
Assets/ScriptableRenderPipeline/MobileRenderPipeline/ClassicDeferred/UnityStandardForwardMobile.cginc


#include "UnityStandardCore.cginc"
#include "ShaderBase.h"
#include "../../fptl/LightDefinitions.cs.hlsl"
#define MAX_SHADOW_LIGHTS 10
#define MAX_SHADOWMAP_PER_LIGHT 6

half4 ambientOrLightmapUV : TEXCOORD1; // SH or Lightmap UV
half4 tangentToWorldAndParallax[3] : TEXCOORD2; // [3x3:tangentToWorld | 1x3:empty]
float4 posWorld : TEXCOORD8;
float4 posView : TEXCOORD9;
LIGHTING_COORDS(5,6)
UNITY_FOG_COORDS(7)

float4 posWorld = mul(unity_ObjectToWorld, v.vertex);
o.posWorld = posWorld;
o.posView = mul(unity_WorldToCamera, posWorld);
o.pos = UnityObjectToClipPos(v.vertex);
o.tex = TexCoords(v);

static FragmentCommonData gdata;
static float occlusion;
struct LightInput
// reflections
UNITY_DECLARE_ABSTRACT_CUBE_ARRAY(_reflCubeTextures);
UNITY_DECLARE_TEXCUBE(_reflRootCubeTexture);
uniform float _reflRootHdrDecodeMult;
uniform float _reflRootHdrDecodeExp;
StructuredBuffer<SFiniteLightData> g_vProbeData;
// ---- Utilities ---- //
void GetCountAndStart(out uint start, out uint nrLights, uint model)
float4 lightData;
half4 pos;
half4 color;
half4 lightDir;
float4x4 lightMat;
float4x4 worldToLightMat;
};
start = model==REFLECTION_LIGHT ? g_numLights : 0; // offset by numLights entries
nrLights = model==REFLECTION_LIGHT ? g_numReflectionProbes : g_numLights;
}
float GetLinearZFromSVPosW(float posW)
// ---- Reflections ---- //
half3 Unity_GlossyEnvironment (UNITY_ARGS_ABSTRACT_CUBE_ARRAY(tex), int sliceIndex, half4 hdr, Unity_GlossyEnvironmentData glossIn);
half3 distanceFromAABB(half3 p, half3 aabbMin, half3 aabbMax)
{
return max(max(p - aabbMax, aabbMin - p), half3(0.0, 0.0, 0.0));
}
float3 EvalIndirectSpecular(UnityLight light, UnityIndirect ind)
{
return occlusion * UNITY_BRDF_PBS(gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, light, ind);
}
float3 RenderReflectionList(uint start, uint numReflProbes, float3 vP, float3 vNw, float3 Vworld, float smoothness)
#if USE_LEFTHAND_CAMERASPACE
float linZ = posW;
float3 worldNormalRefl = reflect(-Vworld, vNw);
float3 vspaceRefl = mul((float3x3) g_mWorldToView, worldNormalRefl).xyz;
float percRoughness = SmoothnessToPerceptualRoughness(smoothness);
UnityLight light;
light.color = 0;
light.dir = 0;
float3 ints = 0;
// root ibl begin
{
Unity_GlossyEnvironmentData g;
g.roughness = percRoughness;
g.reflUVW = worldNormalRefl;
half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_TEXCUBE(_reflRootCubeTexture), float4(_reflRootHdrDecodeMult, _reflRootHdrDecodeExp, 0.0, 0.0), g);
//half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_TEXCUBEARRAY(_reflCubeTextures), _reflRootSliceIndex, float4(_reflRootHdrDecodeMult, _reflRootHdrDecodeExp, 0.0, 0.0), g);
UnityIndirect ind;
ind.diffuse = 0;
ind.specular = env0;// * data.occlusion;
ints = EvalIndirectSpecular(light, ind);
}
// root ibl end
for (int uIndex=0; uIndex<gLightData.y; uIndex++)
{
SFiniteLightData lgtDat = g_vProbeData[uIndex];
float3 vLp = lgtDat.lightPos.xyz;
float3 vecToSurfPos = vP - vLp; // vector from reflection volume to surface position in camera space
float3 posInReflVolumeSpace = float3( dot(vecToSurfPos, lgtDat.lightAxisX), dot(vecToSurfPos, lgtDat.lightAxisY), dot(vecToSurfPos, lgtDat.lightAxisZ) );
float blendDistance = lgtDat.probeBlendDistance;//unity_SpecCube1_ProbePosition.w; // will be set to blend distance for this probe
float3 sampleDir;
if((lgtDat.flags&IS_BOX_PROJECTED)!=0)
{
// For box projection, use expanded bounds as they are rendered; otherwise
// box projection artifacts when outside of the box.
//float4 boxMin = unity_SpecCube0_BoxMin - float4(blendDistance,blendDistance,blendDistance,0);
//float4 boxMax = unity_SpecCube0_BoxMax + float4(blendDistance,blendDistance,blendDistance,0);
//sampleDir = BoxProjectedCubemapDirection (worldNormalRefl, worldPos, unity_SpecCube0_ProbePosition, boxMin, boxMax);
float4 boxOuterDistance = float4( lgtDat.boxInnerDist + float3(blendDistance, blendDistance, blendDistance), 0.0 );
#if 0
// if rotation is NOT supported
sampleDir = BoxProjectedCubemapDirection(worldNormalRefl, posInReflVolumeSpace, float4(lgtDat.localCubeCapturePoint, 1.0), -boxOuterDistance, boxOuterDistance);
float linZ = -posW;
float3 volumeSpaceRefl = float3( dot(vspaceRefl, lgtDat.lightAxisX), dot(vspaceRefl, lgtDat.lightAxisY), dot(vspaceRefl, lgtDat.lightAxisZ) );
float3 vPR = BoxProjectedCubemapDirection(volumeSpaceRefl, posInReflVolumeSpace, float4(lgtDat.localCubeCapturePoint, 1.0), -boxOuterDistance, boxOuterDistance); // Volume space corrected reflection vector
sampleDir = mul( (float3x3) g_mViewToWorld, vPR.x*lgtDat.lightAxisX + vPR.y*lgtDat.lightAxisY + vPR.z*lgtDat.lightAxisZ );
}
else
sampleDir = worldNormalRefl;
return linZ;
Unity_GlossyEnvironmentData g;
g.roughness = percRoughness;
g.reflUVW = sampleDir;
half3 env0 = Unity_GlossyEnvironment(UNITY_PASS_ABSTRACT_CUBE_ARRAY(_reflCubeTextures), lgtDat.sliceIndex, float4(lgtDat.lightIntensity, lgtDat.decodeExp, 0.0, 0.0), g);
UnityIndirect ind;
ind.diffuse = 0;
ind.specular = env0;// * data.occlusion;
//half3 rgb = UNITY_BRDF_PBS(0, data.specularColor, oneMinusReflectivity, data.smoothness, data.normalWorld, vWSpaceVDir, light, ind).rgb;
half3 rgb = EvalIndirectSpecular(light, ind);
// Calculate falloff value, so reflections on the edges of the Volume would gradually blend to previous reflection.
// Also this ensures that pixels not located in the reflection Volume AABB won't
// accidentally pick up reflections from this Volume.
//half3 distance = distanceFromAABB(worldPos, unity_SpecCube0_BoxMin.xyz, unity_SpecCube0_BoxMax.xyz);
half3 distance = distanceFromAABB(posInReflVolumeSpace, -lgtDat.boxInnerDist, lgtDat.boxInnerDist);
half falloff = saturate(1.0 - length(distance)/blendDistance);
ints = lerp(ints, rgb, falloff);
}
return ints;
float3 GetViewPosFromLinDepth(float2 v2ScrPos, float fLinDepth)
half3 Unity_GlossyEnvironment (UNITY_ARGS_ABSTRACT_CUBE_ARRAY(tex), int sliceIndex, half4 hdr, Unity_GlossyEnvironmentData glossIn)
float fSx = g_mScrProjection[0].x;
//float fCx = g_mScrProjection[2].x;
float fCx = g_mScrProjection[0].z;
float fSy = g_mScrProjection[1].y;
//float fCy = g_mScrProjection[2].y;
float fCy = g_mScrProjection[1].z;
#if UNITY_GLOSS_MATCHES_MARMOSET_TOOLBAG2 && (SHADER_TARGET >= 30)
// TODO: remove pow, store cubemap mips differently
half perceptualRoughness = pow(glossIn.roughness, 3.0/4.0);
#else
half perceptualRoughness = glossIn.roughness; // MM: switched to this
#endif
//perceptualRoughness = sqrt(sqrt(2/(64.0+2))); // spec power to the square root of real roughness
#if 0
float m = perceptualRoughness*perceptualRoughness; // m is the real roughness parameter
const float fEps = 1.192092896e-07F; // smallest such that 1.0+FLT_EPSILON != 1.0 (+1e-4h is NOT good here. is visibly very wrong)
float n = (2.0/max(fEps, m*m))-2.0; // remap to spec power. See eq. 21 in --> https://dl.dropboxusercontent.com/u/55891920/papers/mm_brdf.pdf
#if USE_LEFTHAND_CAMERASPACE
return fLinDepth*float3( ((v2ScrPos.x-fCx)/fSx), ((v2ScrPos.y-fCy)/fSy), 1.0 );
n /= 4; // remap from n_dot_h formulatino to n_dot_r. See section "Pre-convolved Cube Maps vs Path Tracers" --> https://s3.amazonaws.com/docs.knaldtech.com/knald/1.0.0/lys_power_drops.html
perceptualRoughness = pow( 2/(n+2), 0.25); // remap back to square root of real roughness
return fLinDepth*float3( -((v2ScrPos.x+fCx)/fSx), -((v2ScrPos.y+fCy)/fSy), 1.0 );
// MM: came up with a surprisingly close approximation to what the #if 0'ed out code above does.
perceptualRoughness = perceptualRoughness*(1.7 - 0.7*perceptualRoughness);
half mip = perceptualRoughness * UNITY_SPECCUBE_LOD_STEPS;
half4 rgbm = UNITY_SAMPLE_ABSTRACT_CUBE_ARRAY_LOD(tex, float4(glossIn.reflUVW.xyz, sliceIndex), mip);
return DecodeHDR(rgbm, hdr);
#define INITIALIZE_LIGHT(light, lightIndex) \
light.lightData = gPerLightData[lightIndex]; \
light.pos = gLightPos[lightIndex]; \
light.color = gLightColor[lightIndex]; \
light.lightDir = gLightDirection[lightIndex]; \
light.lightMat = gLightMatrix[lightIndex]; \
light.worldToLightMat = gWorldToLightMatrix[lightIndex];
float3 ExecuteReflectionList(out uint numReflectionProbesProcessed, uint2 pixCoord, float3 vP, float3 vNw, float3 Vworld, float smoothness)
{
uint start = 0, numReflectionProbes = 0;
GetCountAndStart(start, numReflectionProbes, REFLECTION_LIGHT);
numReflectionProbesProcessed = numReflectionProbes; // mainly for debugging/heat maps
return RenderReflectionList(start, numReflectionProbes, vP, vNw, Vworld, smoothness);
}
// ---- Lights ---- //
}
float3 EvalIndirectSpecular(UnityLight light, UnityIndirect ind)
{
return occlusion * UNITY_BRDF_PBS(gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, light, ind);
}
float3 RenderLightList(uint start, uint numLights, float3 vPw, float3 Vworld)

return ints;
}
void GetCountAndStart(out uint start, out uint nrLights, uint model)
{
start = model==REFLECTION_LIGHT ? g_numLights : 0; // offset by numLights entries
nrLights = model==REFLECTION_LIGHT ? g_numReflectionProbes : g_numLights;
}
float3 ExecuteLightList(out uint numLightsProcessed, uint2 pixCoord, float3 vPw, float3 Vworld)
{
uint start = 0, numLights = 0;

return RenderLightList(start, numLights, vPw, Vworld);
}
// fragment shader main
half4 fragForward(VertexOutputForwardNew i) : SV_Target
{
//float linZ = GetLinearZFromSVPosW(i.pos.w); // matching script side where camera space is right handed.

float3 vP = i.posView;
float3 vPw = i.posWorld;
float3 Vworld = normalize(_WorldSpaceCameraPos.xyz - vPw);

res += ExecuteLightList(numLightsProcessed, pixCoord, vPw, Vworld);
// specular GI
//res += ExecuteReflectionList(numReflectionsProcessed, pixCoord, vP, gdata.normalWorld, Vworld, gdata.smoothness);
res += ExecuteReflectionList(numReflectionsProcessed, pixCoord, vP, gdata.normalWorld, Vworld, gdata.smoothness);
// diffuse GI
res += UNITY_BRDF_PBS (gdata.diffColor, gdata.specColor, gdata.oneMinusReflectivity, gdata.smoothness, gdata.normalWorld, -gdata.eyeVec, gi.light, gi.indirect).xyz;

正在加载...
取消
保存