前两天改造功能效果到urp14框架,发现Pipeline的DepthTexture就算选择Mode:AfterTransparent后采样,也无法绘制TransparentObjects,如下:
可以看出TransparentObjectsDepth并没有被采样,我首先用deepseek重温了一下渲染管线工作流,盲猜因为TransparentObjects没有启用depth write导致depth采样无效的,只改了一下Lit.shader启用了zwrite on就好了。
但是我并不想动urp14的原始资源,索性自己用Pass采样,如下:
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
public class DepthTransparentOutputRenderPass : ScriptableRenderPass
{
private FilteringSettings filterSetting;
private List<ShaderTagId> shaderTagIdList = new();
private ProfilingSampler profileSampler = new ProfilingSampler("DepthTransparent");
private RTHandle outputHandle;
private Material depthMat;
public DepthTransparentOutputRenderPass(RenderPassEvent evt, Material mat)
{
renderPassEvent = evt;
depthMat = mat;
shaderTagIdList = new List<ShaderTagId>
{
new ShaderTagId("UniversalForward"),
new ShaderTagId("UniversalForwardOnly"),
new ShaderTagId("LightweightForward"),
new ShaderTagId("SRPDefaultUnlit")
};
//filterSetting = new FilteringSettings(RenderQueueRange.all, LayerMask.NameToLayer("Everything"));
filterSetting = new FilteringSettings(RenderQueueRange.transparent, LayerMask.NameToLayer("Everything"));
}
public void SetOutput(ref RTHandle dest)
{
outputHandle = dest;
}
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
ConfigureTarget(outputHandle);
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (renderingData.cameraData.camera.cameraType != CameraType.Game)
return;
if (depthMat == null)
return;
CommandBuffer cmd = CommandBufferPool.Get();
DrawingSettings drawSetting = CreateDrawingSettings(shaderTagIdList, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags);
drawSetting.overrideMaterial = depthMat;
using (new ProfilingScope(cmd, profileSampler))
{
//release可避免连帧缓存
outputHandle.rt.Release();
context.DrawRenderers(renderingData.cullResults, ref drawSetting, ref filterSetting);
}
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
CommandBufferPool.Release(cmd);
}
}
//通过齐次裁剪空间进行深度计算
Shader "URP/DepthOutputURPUnlitShader"
{
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline" }
Pass
{
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
struct Attributes
{
float4 positionOS : POSITION;
};
struct Varyings
{
float4 positionHCS : SV_POSITION;
float depth : TEXCOORD0;
};
Varyings vert(Attributes input)
{
Varyings output;
output.positionHCS = TransformObjectToHClip(input.positionOS.xyz);
output.depth = output.positionHCS.z / output.positionHCS.w;
return output;
}
half4 frag(Varyings input) : SV_Target
{
float4 dcol = float4(input.depth,input.depth,input.depth,1);
return dcol;
}
ENDHLSL
}
}
}
原理并不复杂,就是通过Pass采样TransparentObjects到齐次裁剪空间计算depth,输出一张TransparentDepthTexture。
后面就是将我buildin和hdrp框架中的功能改造到urp就行。
慢慢把能用上的功能持续改造到urp14框架中。