浏览代码

Merge pull request #1373 from Unity-Technologies/stacklit

Stacklit
/main
GitHub 7 年前
当前提交
faac9fcb
共有 8 个文件被更改,包括 234 次插入69 次删除
  1. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs
  2. 38
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl
  3. 115
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl
  4. 21
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors.meta
  6. 104
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors/NormalMapVarianceTexturePostprocessor.cs
  7. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors/NormalMapVarianceTexturePostprocessor.cs.meta

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs


[SurfaceDataAttributes(new string[]{"Coat Normal", "Coat Normal View Space"}, true)]
public Vector3 coatNormalWS;
[SurfaceDataAttributes("Average Normal Length A")]
public float averageNormalLengthA;
[SurfaceDataAttributes("Average Normal Length B")]
public float averageNormalLengthB;
[SurfaceDataAttributes("Smoothness A")]
public float perceptualSmoothnessA;

38
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl


#define DEBUGVIEW_STACKLIT_SURFACEDATA_GEOMETRIC_NORMAL_VIEW_SPACE (1308)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_NORMAL (1309)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_NORMAL_VIEW_SPACE (1310)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A (1311)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B (1312)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING (1313)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_TANGENT (1314)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_ANISOTROPY (1315)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_IOR (1316)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_THICKNESS (1317)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_SMOOTHNESS (1318)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_IOR (1319)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_THICKNESS (1320)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_EXTINCTION_COEFFICIENT (1321)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_DIFFUSION_PROFILE (1322)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SUBSURFACE_MASK (1323)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_THICKNESS (1324)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_AVERAGE_NORMAL_LENGTH_A (1311)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_AVERAGE_NORMAL_LENGTH_B (1312)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A (1313)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B (1314)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING (1315)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_TANGENT (1316)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_ANISOTROPY (1317)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_IOR (1318)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_IRIDESCENCE_THICKNESS (1319)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_SMOOTHNESS (1320)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_IOR (1321)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_THICKNESS (1322)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_EXTINCTION_COEFFICIENT (1323)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_DIFFUSION_PROFILE (1324)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SUBSURFACE_MASK (1325)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_THICKNESS (1326)
//
// UnityEngine.Experimental.Rendering.HDPipeline.StackLit+BSDFData: static fields

float3 normalWS;
float3 geomNormalWS;
float3 coatNormalWS;
float averageNormalLengthA;
float averageNormalLengthB;
float perceptualSmoothnessA;
float perceptualSmoothnessB;
float lobeMix;

break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_COAT_NORMAL_VIEW_SPACE:
result = surfacedata.coatNormalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_AVERAGE_NORMAL_LENGTH_A:
result = surfacedata.averageNormalLengthA.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_AVERAGE_NORMAL_LENGTH_B:
result = surfacedata.averageNormalLengthB.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A:
result = surfacedata.perceptualSmoothnessA.xxx;

115
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl


float variance = _SpecularAntiAliasingScreenSpaceVariance * (dot(deltaU, deltaU) + dot(deltaV, deltaV));
float squaredRoughness = saturate(r * r + min(2.0 * variance, _SpecularAntiAliasingThreshold));
return variance;
}
// Based on The Order : 1886 SIGGRAPH course notes implementation.
float AdjustRoughness(float avgNormalLength)
{
if (avgNormalLength < 1.0)
{
float avgNormLen2 = avgNormalLength * avgNormalLength;
float kappa = (3 * avgNormalLength - avgNormalLength * avgNormLen2) / (1 - avgNormLen2);
return 1.0 / (2.0 * kappa);
}
return sqrt(squaredRoughness);
return 0.0f;
float FilterRoughness(float r, float3 geomNormalWS, float averageNormalLength)
{
// Specular AA: NormalCurvatureToRoughness
r = lerp(r, max(NormalCurvatureToRoughness(geomNormalWS), r), _NormalCurvatureToRoughnessEnabled);
// Specular AA: Tokuyoshi Filtering + 1866 normal filtering.
float varianceGeom = FilterRoughness_TOKUYOSHI(geomNormalWS, r);
float varianceNorm = AdjustRoughness(averageNormalLength);
float filteredRoughness = sqrt(saturate(r * r + min(2.0 * (varianceGeom + varianceNorm), _SpecularAntiAliasingThreshold)));
return lerp(r, filteredRoughness, _SpecularAntiAliasingEnabled);
}
//-----------------------------------------------------------------------------
// conversion function for forward

bsdfData.perceptualRoughnessA = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessA);
bsdfData.perceptualRoughnessB = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessB);
// Specular AA: NormalCurvatureToRoughness
float normalCurvatureToRoughnessA = max(NormalCurvatureToRoughness(bsdfData.normalWS), bsdfData.perceptualRoughnessA);
float normalCurvatureToRoughnessB = max(NormalCurvatureToRoughness(bsdfData.normalWS), bsdfData.perceptualRoughnessB);
bsdfData.perceptualRoughnessA = lerp(bsdfData.perceptualRoughnessA, normalCurvatureToRoughnessA, _NormalCurvatureToRoughnessEnabled);
bsdfData.perceptualRoughnessB = lerp(bsdfData.perceptualRoughnessB, normalCurvatureToRoughnessB, _NormalCurvatureToRoughnessEnabled);
// Specular AA: Tokuyoshi Filtering.
float filterRoughnessA = FilterRoughness_TOKUYOSHI(bsdfData.normalWS, bsdfData.perceptualRoughnessA);
float filterRoughnessB = FilterRoughness_TOKUYOSHI(bsdfData.normalWS, bsdfData.perceptualRoughnessB);
bsdfData.perceptualRoughnessA = lerp(bsdfData.perceptualRoughnessA, filterRoughnessA, _SpecularAntiAliasingEnabled);
bsdfData.perceptualRoughnessB = lerp(bsdfData.perceptualRoughnessB, filterRoughnessB, _SpecularAntiAliasingEnabled);
// Specular AA / Filtering.
bsdfData.perceptualRoughnessA = FilterRoughness(bsdfData.perceptualRoughnessA, bsdfData.geomNormalWS, surfaceData.averageNormalLengthA);
bsdfData.perceptualRoughnessB = FilterRoughness(bsdfData.perceptualRoughnessB, bsdfData.geomNormalWS, surfaceData.averageNormalLengthB);
bsdfData.lobeMix = surfaceData.lobeMix;
// There is no metallic with SSS and specular color mode

struct PreLightData
{
float NdotV[NB_NORMALS]; // Could be negative due to normal mapping, use ClampNdotV()
//float NdotV;
float geomNdotV;
float bottomAngleFGD;
float TdotV; // Stored only when VLAYERED_RECOMPUTE_PERLIGHT
float BdotV;

// slnote dual map
float PreLightData_GetBaseNdotVForFGD(BSDFData bsdfData, PreLightData preLightData, float NdotV[NB_NORMALS])
{
float baseLayerNdotV;

return baseLayerNdotV;
}
// slnote dual map
void PreLightData_SetupNormals(BSDFData bsdfData, inout PreLightData preLightData, float3 V, out float3 N[NB_NORMALS], out float NdotV[NB_NORMALS])
{
N[BASE_NORMAL_IDX] = bsdfData.normalWS;

N[COAT_NORMAL_IDX] = bsdfData.coatNormalWS;
preLightData.NdotV[COAT_NORMAL_IDX] = dot(N[COAT_NORMAL_IDX], V);
NdotV[COAT_NORMAL_IDX] = ClampNdotV(preLightData.NdotV[COAT_NORMAL_IDX]);
preLightData.geomNdotV = dot(bsdfData.geomNormalWS, V);
}
#endif
}

IF_DEBUG( if(_DebugLobeMask.y == 0.0) DV[BASE_LOBEA_IDX] = (float3)0; )
IF_DEBUG( if(_DebugLobeMask.z == 0.0) DV[BASE_LOBEB_IDX] = (float3)0; )
specularLighting = preLightData.vLayerEnergyCoeff[BOTTOM_VLAYER_IDX]
specularLighting = max(0, NdotL[DNLV_BASE_IDX]) * preLightData.vLayerEnergyCoeff[BOTTOM_VLAYER_IDX]
specularLighting += preLightData.vLayerEnergyCoeff[TOP_VLAYER_IDX]
specularLighting += max(0, NdotL[DNLV_COAT_IDX]) * preLightData.vLayerEnergyCoeff[TOP_VLAYER_IDX]
* preLightData.energyCompensationFactor[COAT_LOBE_IDX]
* DV[COAT_LOBE_IDX];

IF_DEBUG( if(_DebugLobeMask.y == 0.0) DV[BASE_LOBEA_IDX] = (float3)0; )
IF_DEBUG( if(_DebugLobeMask.z == 0.0) DV[BASE_LOBEB_IDX] = (float3)0; )
specularLighting = F * lerp(DV[0]*preLightData.energyCompensationFactor[BASE_LOBEA_IDX],
DV[1]*preLightData.energyCompensationFactor[BASE_LOBEB_IDX],
bsdfData.lobeMix);
specularLighting = max(0, NdotL[0]) * F * lerp(DV[0]*preLightData.energyCompensationFactor[BASE_LOBEA_IDX],
DV[1]*preLightData.energyCompensationFactor[BASE_LOBEB_IDX],
bsdfData.lobeMix);
float3 diffuseTerm = Lambert();
float3 diffuseTerm = Lambert() * max(0, NdotL[DNLV_BASE_IDX]);
#ifdef VLAYERED_DIFFUSE_ENERGY_HACKED_TERM
// TODOTODO: Energy when vlayered.

void EvaluateBSDF_GetNormalUnclampedNdotV(BSDFData bsdfData, PreLightData preLightData, float3 V, out float3 N, out float unclampedNdotV)
{
//TODO: This affects transmission and SSS, choose the normal the use when we have
// both. For now, just use the base:
N = bsdfData.normalWS;
unclampedNdotV = preLightData.NdotV[BASE_NORMAL_IDX];
//TODOWIP for now just return geometric normal:
N = bsdfData.geomNormalWS;
unclampedNdotV = dot(N, V);
}
else
#ifdef _STACKLIT_DEBUG
if(_DebugLobeMask.w == 2.0)
{
N = bsdfData.coatNormalWS;
unclampedNdotV = preLightData.NdotV[COAT_NORMAL_IDX];
}
else if(_DebugLobeMask.w == 3.0)
{
N = bsdfData.geomNormalWS;
unclampedNdotV = preLightData.geomNdotV;
}
{
// TODOWIP, for now, preserve previous behavior
N = bsdfData.normalWS;
unclampedNdotV = preLightData.NdotV[BASE_NORMAL_IDX];
#endif // _MATERIAL_FEATURE_COAT_NORMALMAP
}
//-----------------------------------------------------------------------------

DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
//slnote dual map
//float3 N = bsdfData.normalWS;
//float NdotV = ClampNdotV(preLightData.NdotV);
float NdotV = ClampNdotV(unclampedNdotV);
float NdotL = dot(N, L);
float LdotV = dot(L, V);

float attenuation;
EvaluateLight_Directional(lightLoopContext, posInput, lightData, bakeLightingData, N, L, color, attenuation);
float intensity = max(0, attenuation * NdotL); // Warning: attenuation can be greater than 1 due to the inverse square attenuation (when position is close to light)
// For shadow attenuation (ie receiver bias), always use the geometric normal:
EvaluateLight_Directional(lightLoopContext, posInput, lightData, bakeLightingData, bsdfData.geomNormalWS, L, color, attenuation);
// Note: We use NdotL here to early out, but in case of coat this is not correct. But we are ok with this
float intensity = max(0, attenuation); // Warning: attenuation can be greater than 1 due to the inverse square attenuation (when position is close to light)
// Note: the NdotL term is now applied in the BSDF() eval itself to account for different normals.
UNITY_BRANCH if (intensity > 0.0)
{
BSDF(V, L, NdotL, posInput.positionWS, preLightData, bsdfData, lighting.diffuse, lighting.specular);

if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
{
// Only lighting, not BSDF
intensity = max(0, attenuation * NdotL);
lighting.diffuse = color * intensity * lightData.diffuseScale;
}
#endif

distances.xyz = float3(dist, distSq, distRcp);
}
//slnote dual map
//float3 N = bsdfData.normalWS;
//float NdotV = ClampNdotV(preLightData.NdotV);
float NdotV = ClampNdotV(unclampedNdotV);
float NdotL = dot(N, L);
float LdotV = dot(L, V);

float3 color;
float attenuation;
EvaluateLight_Punctual(lightLoopContext, posInput, lightData, bakeLightingData, N, L,
// For shadow attenuation (ie receiver bias), always use the geometric normal:
EvaluateLight_Punctual(lightLoopContext, posInput, lightData, bakeLightingData, bsdfData.geomNormalWS, L,
float intensity = max(0, attenuation * NdotL); // Warning: attenuation can be greater than 1 due to the inverse square attenuation (when position is close to light)
float intensity = max(0, attenuation); // Warning: attenuation can be greater than 1 due to the inverse square attenuation (when position is close to light)
// Note: We use NdotL here to early out, but in case of coat this is not correct. But we are ok with this
// Note: the NdotL term is now applied in the BSDF() eval itself to account for different normals.
UNITY_BRANCH if (intensity > 0.0)
{
// Simulate a sphere light with this hack

if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
{
// Only lighting, not BSDF
intensity = max(0, attenuation * NdotL);
lighting.diffuse = color * intensity * lightData.diffuseScale;
}
#endif

if( (i == (0 IF_FEATURE_COAT(+1))) && _DebugEnvLobeMask.y == 0.0) continue;
if( (i == (1 IF_FEATURE_COAT(+1))) && _DebugEnvLobeMask.z == 0.0) continue;
#endif
//slnote dual map
//EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R[i], tempWeight[i]);
EvaluateLight_EnvIntersection(positionWS, influenceNormal, lightData, influenceShapeType, R[i], tempWeight[i]);
// When we are rough, we tend to see outward shifting of the reflection when at the boundary of the projection volume

21
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl


#endif
}
float3 SampleTexture2DTriplanarNormalScaleBias(TEXTURE2D_ARGS(textureName, samplerName), float textureNameUV, float textureNameUVLocal, float4 textureNameST, float textureNameObjSpace, TextureUVMapping uvMapping, float scale)
float4 SampleTexture2DTriplanarNormalScaleBias(TEXTURE2D_ARGS(textureName, samplerName), float textureNameUV, float textureNameUVLocal, float4 textureNameST, float textureNameObjSpace, TextureUVMapping uvMapping, float scale)
{
if (textureNameObjSpace)
{

// Decompress normal ourselve
float3 normalOS = SampleTexture2DTriplanarScaleBias(TEXTURE2D_PARAM(textureName, samplerName), textureNameUV, textureNameUVLocal, textureNameST, uvMapping).xyz * 2.0 - 1.0;
// no need to renormalize normalOS for SurfaceGradientFromPerturbedNormal
return SurfaceGradientFromPerturbedNormal(uvMapping.vertexNormalWS, TransformObjectToWorldDir(normalOS));
return float4(SurfaceGradientFromPerturbedNormal(uvMapping.vertexNormalWS, TransformObjectToWorldDir(normalOS)), 1.0);
}
else
{

// Assume derivXplane, derivYPlane and derivZPlane sampled using (z,y), (z,x) and (x,y) respectively.
float3 volumeGrad = float3(derivZPlane.x + derivYPlane.y, derivZPlane.y + derivXplane.y, derivXplane.x + derivYPlane.x);
return SurfaceGradientFromVolumeGradient(uvMapping.vertexNormalWS, volumeGrad);
return float4(SurfaceGradientFromVolumeGradient(uvMapping.vertexNormalWS, volumeGrad), 1.0);
}
#endif

{
return SurfaceGradientFromTBN(deriv, uvMapping.vertexTangentWS[textureNameUV], uvMapping.vertexBitangentWS[textureNameUV]);
return float4(SurfaceGradientFromTBN(deriv, uvMapping.vertexTangentWS[textureNameUV], uvMapping.vertexBitangentWS[textureNameUV]), 1.0);
}
else
{

else if (textureNameUV == TEXCOORD_INDEX_PLANAR_XY)
volumeGrad = float3(deriv.x, deriv.y, 0.0);
return SurfaceGradientFromVolumeGradient(uvMapping.vertexNormalWS, volumeGrad);
return float4(SurfaceGradientFromVolumeGradient(uvMapping.vertexNormalWS, volumeGrad), 1.0f);
}
}
}

// Standard
surfaceData.baseColor = SAMPLE_TEXTURE2D_SCALE_BIAS(_BaseColorMap).rgb * _BaseColor.rgb;
float3 gradient = SAMPLE_TEXTURE2D_NORMAL_SCALE_BIAS(_NormalMap, _NormalScale);
float4 gradient = SAMPLE_TEXTURE2D_NORMAL_SCALE_BIAS(_NormalMap, _NormalScale);
//TODO: bentNormalTS
surfaceData.perceptualSmoothnessA = dot(SAMPLE_TEXTURE2D_SCALE_BIAS(_SmoothnessAMap), _SmoothnessAMapChannelMask);

#endif
surfaceData.tangentWS = normalize(input.worldToTangent[0].xyz); // The tangent is not normalize in worldToTangent for mikkt. TODO: Check if it expected that we normalize with Morten. Tag: SURFACE_GRADIENT
float3 coatGradient = float3(0.0, 0.0, 0.0);
float4 coatGradient = float4(0.0, 0.0, 0.0, 1.0f);
#ifdef _MATERIAL_FEATURE_COAT
surfaceData.materialFeatures |= MATERIALFEATUREFLAGS_STACK_LIT_COAT;
surfaceData.coatPerceptualSmoothness = dot(SAMPLE_TEXTURE2D_SCALE_BIAS(_CoatSmoothnessMap), _CoatSmoothnessMapChannelMask);

surfaceData.geomNormalWS = input.worldToTangent[2];
// Convert back to world space normal
surfaceData.normalWS = SurfaceGradientResolveNormal(input.worldToTangent[2], gradient);
surfaceData.coatNormalWS = SurfaceGradientResolveNormal(input.worldToTangent[2], coatGradient);
surfaceData.normalWS = SurfaceGradientResolveNormal(input.worldToTangent[2], gradient.xyz);
surfaceData.coatNormalWS = SurfaceGradientResolveNormal(input.worldToTangent[2], coatGradient.xyz);
surfaceData.averageNormalLengthA = gradient.w;
surfaceData.averageNormalLengthB = coatGradient.w;
// TODO: decal etc.

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors.meta


fileFormatVersion: 2
guid: 3505be0d02e25a04f8932a6c2243015b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

104
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors/NormalMapVarianceTexturePostprocessor.cs


using System;
using UnityEditor;
using UnityEngine;
public class NormalMapVarianceTexturePostprocessor : AssetPostprocessor
{
void OnPreprocessTexture()
{
if (assetPath.IndexOf("_variance", StringComparison.InvariantCultureIgnoreCase) != -1)
{
// Make sure we don't convert as a normal map.
TextureImporter textureImporter = (TextureImporter)assetImporter;
textureImporter.convertToNormalmap = false;
textureImporter.textureCompression = TextureImporterCompression.CompressedHQ;
textureImporter.linearTexture = true; // Says deprecated but won't work without it.
textureImporter.sRGBTexture = false; // But we're setting the new property just in case it changes later...
}
}
private static Color GetColor(Color[] source, int x, int y, int width, int height)
{
x = (x + width) % width;
y = (y + height) % height;
int index = y * width + x;
var c = source[index];
return c;
}
private static Vector3 GetNormal(Color[] source, int x, int y, int width, int height)
{
Vector3 n = (Vector4)GetColor(source, x, y, width, height);
n = 2.0f * n - Vector3.one;
n.Normalize();
return n;
}
private static Vector3 GetAverageNormal(Color[] source, int x, int y, int width, int height, int texelFootprint)
{
Vector3 averageNormal = new Vector3(0, 0, 0);
// Calculate the average color over the texel footprint.
for (int i = 0; i < texelFootprint; ++i)
{
for (int j = 0; j < texelFootprint; ++j)
{
averageNormal += GetNormal(source, x + i, y + j, width, height);
}
}
averageNormal /= (texelFootprint * texelFootprint);
return averageNormal;
}
void OnPostprocessTexture(Texture2D texture)
{
if (assetPath.IndexOf("_variance", StringComparison.InvariantCultureIgnoreCase) != -1)
{
// For mip 0, set the normal length to 1.
{
Color[] c = texture.GetPixels(0);
for (int i = 0; i < c.Length; i++)
{
c[i].a = 1.0f;
}
texture.SetPixels(c, 0);
}
// Based on The Order : 1886 SIGGRAPH course notes implementation. Sample all normal map
// texels from the base mip level that are within the footprint of the current mipmap texel.
Color[] source = texture.GetPixels(0);
for (int m = 1; m < texture.mipmapCount; m++)
{
Color[] c = texture.GetPixels(m);
int mipWidth = Math.Max(1, texture.width >> m);
int mipHeight = Math.Max(1, texture.height >> m);
for (int x = 0; x < mipWidth; ++x)
{
for (int y = 0; y < mipHeight; ++y)
{
int texelFootprint = 1 << m;
Vector3 averageNormal = GetAverageNormal(source, x * texelFootprint, y * texelFootprint,
texture.width, texture.height, texelFootprint);
// Store the normal length for the average normal.
int outputPosition = y * mipWidth + x;
//c[outputPosition].a = averageNormal.magnitude;
float normalLength = Math.Max(0.0f, Math.Min(1.0f, averageNormal.magnitude));
c[outputPosition].a = normalLength;
}
}
texture.SetPixels(c, m);
}
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/AssetPostProcessors/NormalMapVarianceTexturePostprocessor.cs.meta


fileFormatVersion: 2
guid: c7467d803a5cf8c479b64fbdac53aa41
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存