浏览代码

Merge pull request #362 from Unity-Technologies/BasicRenderPipelineXR

Add XR Support to BasicRenderPipeline
/RenderPassXR_Sandbox
GitHub 8 年前
当前提交
f11434e9
共有 2 个文件被更改,包括 107 次插入5 次删除
  1. 108
      Assets/ScriptableRenderPipeline/BasicRenderPipeline/BasicRenderPipeline.cs
  2. 4
      Assets/ScriptableRenderPipeline/BasicRenderPipeline/BasicRenderPipelineShader.shader

108
Assets/ScriptableRenderPipeline/BasicRenderPipeline/BasicRenderPipeline.cs


using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Experimental.Rendering;
using UnityEngine.XR;
// Very basic scriptable rendering loop example:
// - Use with BasicRenderPipelineShader.shader (the loop expects "BasicPass" pass type to exist)

[ExecuteInEditMode]
public class BasicRenderPipeline : RenderPipelineAsset
{
public bool UseIntermediateRenderTargetBlit;
#if UNITY_EDITOR
[UnityEditor.MenuItem("RenderPipeline/Create BasicRenderPipeline")]
static void CreateBasicRenderPipeline()

protected override IRenderPipeline InternalCreatePipeline()
{
return new BasicRenderPipelineInstance();
return new BasicRenderPipelineInstance(UseIntermediateRenderTargetBlit);
bool useIntermediateBlit;
public BasicRenderPipelineInstance()
{
useIntermediateBlit = false;
}
public BasicRenderPipelineInstance(bool useIntermediate)
{
useIntermediateBlit = useIntermediate;
}
BasicRendering.Render(renderContext, cameras);
BasicRendering.Render(renderContext, cameras, useIntermediateBlit);
}
}

public static void Render(ScriptableRenderContext context, IEnumerable<Camera> cameras)
static void ConfigureAndBindIntermediateRenderTarget(ScriptableRenderContext context, Camera cam, bool stereoEnabled, out RenderTargetIdentifier intermediateRTID, out bool isRTTexArray)
var intermediateRT = Shader.PropertyToID("_IntermediateTarget");
intermediateRTID = new RenderTargetIdentifier(intermediateRT);
isRTTexArray = false;
var bindIntermediateRTCmd = CommandBufferPool.Get("Bind intermediate RT");
if (stereoEnabled)
{
RenderTextureDescriptor vrDesc = XRSettings.eyeTextureDesc;
vrDesc.depthBufferBits = 24;
if (vrDesc.dimension == TextureDimension.Tex2DArray)
isRTTexArray = true;
bindIntermediateRTCmd.GetTemporaryRT(intermediateRT, vrDesc, FilterMode.Point);
}
else
{
int w = cam.pixelWidth;
int h = cam.pixelHeight;
bindIntermediateRTCmd.GetTemporaryRT(intermediateRT, w, h, 24, FilterMode.Point, RenderTextureFormat.Default, RenderTextureReadWrite.Default, 1, true);
}
if (isRTTexArray)
bindIntermediateRTCmd.SetRenderTarget(intermediateRTID, 0, CubemapFace.Unknown, -1); // depthSlice == -1 => bind all slices
else
bindIntermediateRTCmd.SetRenderTarget(intermediateRTID);
context.ExecuteCommandBuffer(bindIntermediateRTCmd);
CommandBufferPool.Release(bindIntermediateRTCmd);
}
static void BlitFromIntermediateToCameraTarget(ScriptableRenderContext context, RenderTargetIdentifier intermediateRTID, bool isRTTexArray)
{
var blitIntermediateRTCmd = CommandBufferPool.Get("Copy intermediate RT to default RT");
if (isRTTexArray)
{
// Currently, Blit does not allow specification of a slice in a texture array.
// It can use the CurrentActive render texture's bound slices, so we use that
// as a temporary workaround.
blitIntermediateRTCmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, 0, CubemapFace.Unknown, -1);
blitIntermediateRTCmd.Blit(intermediateRTID, BuiltinRenderTextureType.CurrentActive);
}
else
blitIntermediateRTCmd.Blit(intermediateRTID, BuiltinRenderTextureType.CameraTarget);
context.ExecuteCommandBuffer(blitIntermediateRTCmd);
CommandBufferPool.Release(blitIntermediateRTCmd);
}
public static void Render(ScriptableRenderContext context, IEnumerable<Camera> cameras, bool useIntermediateBlitPath)
{
bool stereoEnabled = XRSettings.isDeviceActive;
if (!CullResults.GetCullingParameters(camera, out cullingParams))
// Stereo-aware culling parameters are configured to perform a single cull for both eyes
if (!CullResults.GetCullingParameters(camera, stereoEnabled, out cullingParams))
continue;
CullResults cull = new CullResults();
CullResults.Cull(ref cullingParams, context, ref cull);

context.SetupCameraProperties(camera);
// If stereo is enabled, we also configure stereo matrices, viewports, and XR device render targets
context.SetupCameraProperties(camera, stereoEnabled);
// Draws in-between [Start|Stop]MultiEye are stereo-ized by engine
if (stereoEnabled)
context.StartMultiEye(camera);
RenderTargetIdentifier intermediateRTID = new RenderTargetIdentifier(BuiltinRenderTextureType.CurrentActive);
bool isIntermediateRTTexArray = false;
if (useIntermediateBlitPath)
{
ConfigureAndBindIntermediateRenderTarget(context, camera, stereoEnabled, out intermediateRTID, out isIntermediateRTTexArray);
}
// clear depth buffer
var cmd = CommandBufferPool.Get();

settings.sorting.flags = SortFlags.CommonTransparent;
settings.inputFilter.SetQueuesTransparent();
context.DrawRenderers(ref settings);
if (useIntermediateBlitPath)
{
BlitFromIntermediateToCameraTarget(context, intermediateRTID, isIntermediateRTTexArray);
}
if (stereoEnabled)
{
context.StopMultiEye(camera);
// StereoEndRender will reset state on the camera to pre-Stereo settings,
// and invoke XR based events/callbacks.
context.StereoEndRender(camera);
}
context.Submit();
}

4
Assets/ScriptableRenderPipeline/BasicRenderPipeline/BasicRenderPipelineShader.shader


#pragma target 3.0
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile _ UNITY_SINGLE_PASS_STEREO STEREO_INSTANCING_ON STEREO_MULTIVIEW_ON
#pragma shader_feature _METALLICGLOSSMAP
#include "UnityCG.cginc"
#include "UnityStandardBRDF.cginc"

float3 positionWS : TEXCOORD1;
float3 normalWS : TEXCOORD2;
float4 hpos : SV_POSITION;
UNITY_VERTEX_OUTPUT_STEREO
};
float4 _MainTex_ST;

v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.uv = TRANSFORM_TEX(v.texcoord,_MainTex);
o.hpos = UnityObjectToClipPos(v.vertex);
o.positionWS = mul(unity_ObjectToWorld, v.vertex).xyz;

正在加载...
取消
保存