SRP

8.3k words

Desc:

可编辑渲染管线,可以据此自定义修改管线内容,也可以学习渲染管线中实现

UniversalRenderPipelineAsset

渲染管线资源
继承自 RenderPipelineAsset

RenderPipelineAsset.CreatePipeline

RP 资产的主要目的是为 Unity 提供一种方法来获取负责渲染的管道对象实例.资产本身只是一个句柄和一个存储设置的地方
该函数返回一个管道对象实例

UniversalRenderPipeline

渲染管线实例
继承自 RenderPipeline

RenderPipeline.Render

此方法是可编写脚本的渲染管道 (SRP) 的入口点。此功能与内置渲染管道不兼容。

Unity 会自动调用此方法。在独立应用程序中,Unity 每帧调用一次此方法来渲染主视图,每帧调用一次Camera.Render。在 Unity 编辑器中,对于每个可见的场景视图或游戏视图,Unity 每帧调用一次此方法,如果场景摄像机预览可见,则每帧调用一次,对于每个摄像机每帧调用一次Camera.Render

URP实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#if UNITY_2021_1_OR_NEWER
protected override void Render(ScriptableRenderContext renderContext, List<Camera> cameras)
#else
protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
#endif
{
// TODO: Would be better to add Profiling name hooks into RenderPipelineManager.
// C#8 feature, only in >= 2020.2
using var profScope = new ProfilingScope(null, ProfilingSampler.Get(URPProfileId.UniversalRenderTotal));

#if UNITY_2021_1_OR_NEWER
using (new ProfilingScope(null, Profiling.Pipeline.beginContextRendering))
{
BeginContextRendering(renderContext, cameras);
}
#else
using(new ProfilingScope(null, Profiling.Pipeline.beginFrameRendering))
{
BeginFrameRendering(renderContext, cameras);
}
#endif

GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
SetupPerFrameShaderConstants();

...

SortCameras(cameras);
#if UNITY_2021_1_OR_NEWER
for (int i = 0; i < cameras.Count; ++i)
#else
for (int i = 0; i < cameras.Length; ++i)
#endif
{
var camera = cameras[i];
if (IsGameCamera(camera))
{
// Renders a camera stack. This method calls RenderSingleCamera for each valid camera in the stack.
// The last camera resolves the final target to screen.
RenderCameraStack(renderContext, camera);
}
else
{
using (new ProfilingScope(null, Profiling.Pipeline.beginCameraRendering))
{
BeginCameraRendering(renderContext, camera);
}
#if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
//It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
VFX.VFXManager.PrepareCamera(camera);
#endif
...

RenderSingleCamera(renderContext, camera);

using (new ProfilingScope(null, Profiling.Pipeline.endCameraRendering))
{
EndCameraRendering(renderContext, camera);
}
}
}
#if UNITY_2021_1_OR_NEWER
using (new ProfilingScope(null, Profiling.Pipeline.endContextRendering))
{
EndContextRendering(renderContext, cameras);
}
#else
using(new ProfilingScope(null, Profiling.Pipeline.endFrameRendering))
{
EndFrameRendering(renderContext, cameras);
}
#endif
}

catlikecoding中记录的方法 Source

1
2
3
4
5
6
7
8
9
10
11
12
13
static ShaderTagId unlitShaderTagId=new ShaderTagId("SRPDefaultUnlit");
static ShaderTagId[] legacyShaderTagId={
new ShaderTagId("Always"),
new ShaderTagId("ForwardBase"),
new ShaderTagId("PrepassBase"),
new ShaderTagId("Vertex"),
new ShaderTagId("VertexLMRGBM"),
new ShaderTagId("VertexLM")
};
static Material errorMaterial;

CullingResults cullingResults;//剔除结果
CommandBuff buffer=new CommandBuff();

设置Buffer信息

buffer.name=camera.name

绘制UI

//只在编辑器内需要
if(camera.cameraType==CameraType.SceneView){
//将UI显式添加到世界几何体中
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}

剔除

if(!Cull()){
return;
}

设置属性

//设置视图投影矩阵,将相机的位置和方向与相机的透视或正交投影结合一起(unity_MatrixVP)
Context.SetupCameraProperties(camera)

清除标志

//通过调整被渲染的第二个相机的清除标志来组合两个相机的结果
CameraClearFlags flags=camera.clearFlags;
//清除渲染目标,防止上一帧结果影响当前渲染目标
buffer.ClearRenderTarget(
flags<=CameraClearFlags.Delpth,
flags==CameraClearFlags.Color,
flags==CameraClearFlgas.Color?camera.backgroundColor.linear:Color.clear);//是否清除深度、是否清除颜色、用于清除的颜色(使用 Hidden/InternalClear 写入渲染目标的着色器,返回顶点色,实现在下面)
//注入分析器样本
buffer.BeginSample(bufferName)
ExecuteBuffer()

//某些任务(如绘制天空盒)可以通过专用方法发出,但其他命令必须通过单独的命令缓冲区间接发出

绘制(通过DrawRenderers使用剔除结果作为参数调用context完成,执行渲染哪些,提供 DrawingSettings、FilteringSettings)

var sortingSettings=new SortingSettings(camera){//camera用于确认是正交排序还是基于距离的排序
criteria=SortingCriteria.CommonOpaque//设置排序设置的属性,强制一个特定的绘制顺序
};
var drawingSetting=new DrawingSettings(unlitShaderTagId,sortingSettings);//指定允许哪些着色器通道
//指定允许哪些渲染队列
var filteringSettings=new FilteringSettings(RenderQueueRange.opaque);

绘制不透明物体

context.DrawRenderers(cullingResults,ref drawingSetting,ref filteringSettings);
//绘制天空盒(在透明之前是因为绘制透明物体不会写入深度,天空盒会覆盖透明结果)
context.DrawSkybox(camera);

绘制透明物体

sortingSettings.criteria=SortingCriteria.CommonTransparent;
drawingSetting.sortingSetting=sortingSettings;
filteringSettings.renderQueueRange=RenderQueueRange.Transparent;
context.DrawRenderers(cullingResults,ref drawingSetting,ref filteringSettings);

绘制错误的着色器

//只在编辑器内需要
#if UNITY_EDITOR
if(errorMaterial==null){
errorMaterial=new Material(Shader.Find(“Hidden/InternalErrorShader”));
}
drawingSettings=new DrawingSettings(legacyShaderTagIds[0],new SortingSettings(camera)){
overrideMaterial=errorMaterial
};
for (int i = 1; i < legacyShaderTagIds.Length; i++) {
drawingSettings.SetShaderPassName(i, legacyShaderTagIds[i]);
}
filteringSettings=FilteringSettings.defaultValue;
context.DrawRenderers(cullingResults,ref drawingSettings,filteringSettings);
#endif

绘制Gizmos

//只在编辑器内需要
#if UNITY_EDITOR
if(Handles.ShouldRenderGizmos()){
context.DrawGizmos(camera,GizmoSubset.PreImageEffects);
context.DrawGizmos(camera,GizmoSubset.PostImageEffects);
}

#endif

结束

buffer.EndSample(buffName)
ExecuteBuffer()
//提交排队的工作并执行
Content.submit()

执行

void ExecuteBuffer(){
//执行缓冲区
context.ExecuteCommandBuffer(buffer)
//清除命令
buffer.Clear()
}

剔除

//不会绘制每个对象,而是只渲染对于相机可见的对象。从场景中具有Renderer组件的所有对象开始,然后剔除在相机视锥体之外的对象
bool Cull(){
ScriptableCullingParameters p;
if(camera.TryGetCullingParameters(out p)){
cullingResults=context.Cull(ref p);
return true;
}
return false;
}

引用

Hidden/InternalClear.shader

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
v2f vert (appdata_t v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.vertex = UnityObjectToClipPos(v.vertex);
o.color = v.color;
return o;
}
fixed4 frag (v2f i) : SV_Target
{
return i.color;
}
SubShader {
ZTest Always Cull Off
Pass {
ColorMask 0 ZWrite Off
CGPROGRAM
ENDCG
}
Pass {
ZWrite Off
CGPROGRAM
ENDCG
}
Pass {
ColorMask 0
CGPROGRAM
ENDCG
}
Pass {
CGPROGRAM
ENDCG
}
Pass {
ColorMask 0 ZWrite Off
Stencil { Comp Always Pass Zero }
CGPROGRAM
ENDCG
}
Pass {
ZWrite Off
Stencil { Comp Always Pass Zero }
CGPROGRAM
ENDCG
}
Pass {
ColorMask 0
Stencil { Comp Always Pass Zero }
CGPROGRAM
ENDCG
}
Pass {
Stencil { Comp Always Pass Zero }
CGPROGRAM
ENDCG
}
}

Internal-ErrorShader.shader

1
2
3
4
fixed4 frag (v2f i) : SV_Target
{
return fixed4(1,0,1,1);
}

后处理

如果您使用的是通用渲染管道 (URP) 或高清渲染管道 (HDRP),则可以使用

RenderPipelineManager.beginFrameRendering

在RenderPipeline.Render开始时调用自定义代码的委托

1
2
3
4
5
6
7
8
9
10
11
12
13
14
void Start()
{
RenderPipelineManager.beginFrameRendering += OnBeginFrameRendering;
}

void OnBeginFrameRendering(ScriptableRenderContext context, Camera[] cameras)
{

}

void OnDestroy()
{
RenderPipelineManager.beginFrameRendering -= OnBeginFrameRendering;
}
RenderPipelineManager.beginCameraRendering

在 Unity 渲染单个相机之前调用自定义代码的委托

RenderPipelineManager.endCameraRendering

在 Unity 渲染单个相机后,您可以使用它来调用自定义代码的委托

RenderPipelineManager.endFrameRendering

可用于在RenderPipeline.Render末尾调用自定义代码的委托

编写自定义 SRP,您可以直接在此处添加代码,或者使用

在RenderPipeline.Render开始时调用自定义代码的委托

RenderPipeline.BeginFrameRendering
RenderPipeline.BeginCameraRendering
RenderPipeline.EndCameraRendering
RenderPipeline.EndFrameRendering
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
override protected void Render(ScriptableRenderContext context, Camera[] cameras)
{
// Call the RenderPipelineManager.beginFrameRendering delegate
BeginFrameRendering(context, cameras);
for (var i = 0; i < cameras.Length; i++)
{
var camera = cameras[i];

// Call the RenderPipelineManager.beginCameraRendering delegate
BeginCameraRendering(context, camera);

// Call the RenderPipelineManager.endCameraRendering delegate
EndCameraRendering(context, camera);
}
// Call the RenderPipelineManager.endFrameRendering delegate
EndFrameRendering(context, cameras);
}