Unity 自定义Postprocess
前言
虽然Untiy 提供了不少postprocess功能,但这些功能是固定的,没法修改,这让我很不舒服。本文将介绍如果通过Render Feature来实现自定义PostProcess——Box Blur
测试环境
- Unity 2021.3.32
- Win 10
- JetBrains Rider 2023.2
Render Feature
-
什么是Render Feature
- Render Feature类似于Pass,可以自定义渲染顺序、渲染对象、材质等
- 说白了就是对于unity既定的渲染流程,可以在任意位置添加某个额外的功能(如描边),也可以自定义数据
-
添加Render Feature
先创建"Universal Render Pipeline Asset"(并导入"Graphics Setting"),在创建的同时会自动生成一个"Universal Render Pipeline Asset_Renderer",在其中点击"Add Renderer Feature",再点击"Render Objects"
当然现在还未编写我们的Render Feature,因此找不到
Render Feature实现
-
创建Render Feature
-
创建后你将会看见两个class,一个是renderfeature,一个是renderpass。这里为什么pass是在renderfeature内呢,因为renderfeature负责将pass添加到Renderer 里面
-
在renderfeature中,包含两个需要oveeride的函数,分别是Create()和AddRenderPasses()
Create():初始化renderfeature
AddRenderPasses():在 Renderer 中插入一个或多个 RenderPass
-
renderfeature实现
public class BoxBlurPassFeature : ScriptableRendererFeature { // Inspector显示内容 [System.Serializable] public class PassSetting { // 安插位置 public RenderPassEvent m_passEvent = RenderPassEvent.AfterRenderingTransparents; // 控制分辨率 [Range(1, 4)] public int m_sampleWeaken = 1; // 模糊强度 [Range(0, 20)] public int m_blurIntensity = 5; } }
在renderfeature中,我创建了一个setting class,用于管理renderpass和shader中的数据
public PassSetting m_Setting = new PassSetting(); BoxBlurRenderPass m_HSCPass; // 自定义的renderpass public override void Create() { // renderpass构造函数 m_HSCPass = new BoxBlurRenderPass(m_Setting); } // Here you can inject one or multiple render passes in the renderer. // This method is called when setting up the renderer once per-camera. public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) { // can queue up multiple passes after each other renderer.EnqueuePass(m_HSCPass); }
在Create()中,添加renderpass的构造函数;在AddRenderPasses()添加具体的pass,可以是多个
-
RenderPass实现
-
RenderPass才是灵魂,它的实现如下
class BoxBlurRenderPass : ScriptableRenderPass { // profiler tag will show up in frame debugger private const string m_ProfilerTag = "HSC Pass"; // 用于存储pass setting private BoxBlurPassFeature.PassSetting m_passSetting; // 目标图像和临时图像 private RenderTargetIdentifier m_TargetBuffer, m_TempBuffer; private int m_TempBufferID = Shader.PropertyToID("_TemporaryBuffer"); private Material m_Material; // 来自shader // int 相较于 string可以获得更好的性能,因为这是预处理的 private static readonly int m_BlurIntensityProperty = Shader.PropertyToID("_BlurIntensity"); }
先定义一系列RenderPass需要的数据
// 用于设置material 属性 public BoxBlurRenderPass(BoxBlurPassFeature.PassSetting passSetting) { this.m_passSetting = passSetting; // 定义安插位置 renderPassEvent = m_passSetting.m_passEvent; // 使用的shader if (m_Material == null) m_Material = CoreUtils.CreateEngineMaterial("Custom/PP_BoxBlur"); // 基于pass setting设置material Properties m_Material.SetInt(m_BlurIntensityProperty, m_passSetting.m_blurIntensity); }
// Gets called by the renderer before executing the pass. // Can be used to configure render targets and their clearing state. // Can be used to create temporary render target textures. // If this method is not overriden, the render pass will render to the active camera render target. public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData) { // camera target descriptor will be used when creating a temporary render texture RenderTextureDescriptor descriptor = renderingData.cameraData.cameraTargetDescriptor; // Downsample original camera target descriptor descriptor.width /= m_passSetting.m_sampleWeaken; descriptor.height /= m_passSetting.m_sampleWeaken; // Set the number of depth bits we need for temporary render texture descriptor.depthBufferBits = 0; // Enable these if pass requires access to the CameraDepthTexture or the CameraNormalsTexture. // ConfigureInput(ScriptableRenderPassInput.Depth); // ConfigureInput(ScriptableRenderPassInput.Normal); // Grab the color buffer from the renderer camera color target m_TargetBuffer = renderingData.cameraData.renderer.cameraColorTarget; // Create a temporary render texture using the descriptor from above cmd.GetTemporaryRT(m_TempBufferID, descriptor, FilterMode.Bilinear); m_TempBuffer = new RenderTargetIdentifier(m_TempBufferID); }
在该函数中,主要用于控制camera render target图像的分辨率,depth buffer的精度,及获取camera render target和创建临时的渲染纹理
// The actual execution of the pass. This is where custom rendering occurs public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { // Grab a command buffer. We put the actual execution of the pass inside of a profiling scope CommandBuffer cmd = CommandBufferPool.Get(); using (new ProfilingScope(cmd, new ProfilingSampler(m_ProfilerTag))) { // Blit from the color buffer to a temporary buffer and back. This is needed for a two-pass shader Blit(cmd, m_TargetBuffer, m_TempBuffer, m_Material, 0); Blit(cmd, m_TempBuffer, m_TargetBuffer, m_Material, 1); } // Execute the command buffer and release it context.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
Execute()定义了pass的执行规则,我们正是在该函数中实现后处理的。将需要执行的pass放置于profiling scope,Blit()用于将源纹理计算后复制到目标渲染纹理,最后执行并释放命令缓冲区
// Called when the camera has finished rendering // release/cleanup any allocated resources that were created by this pass public override void OnCameraCleanup(CommandBuffer cmd) { if(cmd == null) throw new ArgumentNullException("cmd"); // Since created a temporary render texture in OnCameraSetup, we need to release the memory here to avoid a leak cmd.ReleaseTemporaryRT(m_TempBufferID); }
执行完pass后,释放render target占用的内存
Shader
-
shader部分就很简单
{ Properties { _MainTex("Main Tex", 2D) = "white"{} _BlurIntensity("Box Blur Intensity", Int) = 1 } SubShader { Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline" } HLSLINCLUDE #pragma vertex VS #pragma fragment PS #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" CBUFFER_START(UnityPerMaterial) float4 _MainTex_ST; float4 _MainTex_TexelSize; int _BlurIntensity; CBUFFER_END TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex); struct VSInput { float4 positionL : POSITION; float2 uv : TEXCOORD0; }; struct PSInput { float4 positionH : SV_POSITION; float2 uv : TEXCOORD0; }; PSInput VS(VSInput vsInput) { PSInput vsOutput; vsOutput.positionH = TransformObjectToHClip(vsInput.positionL); vsOutput.uv = TRANSFORM_TEX(vsInput.uv, _MainTex); return vsOutput; } ENDHLSL Pass { Name "VERTICAL BOX BLUR" HLSLPROGRAM float4 PS(PSInput psInput) : SV_TARGET { float2 texelSize = _MainTex_TexelSize.xy; float4 sum = 0; int sampleRange = 2 * _BlurIntensity + 1; for(float y = 0; y < sampleRange; ++y) { float2 offset = float2(0, y - _BlurIntensity); sum += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv + offset * texelSize); } return sum / sampleRange; } ENDHLSL } Pass { Name "HORIZONTAL BOX BLUR" HLSLPROGRAM float4 PS(PSInput psInput) : SV_TARGET { float2 texelSize = _MainTex_TexelSize.xy; float4 sum = 0; int sampleRange = 2 * _BlurIntensity + 1; for(float x = 0; x < sampleRange; ++x) { float2 offset = float2(x - _BlurIntensity, 0); sum += SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, psInput.uv + offset * texelSize); } return sum / sampleRange; } ENDHLSL } } }
效果
reference
https://docs.unity.cn/cn/2020.3/Manual/script-Serialization.html
https://blog.csdn.net/qq_15020543/article/details/82761416
https://zhuanlan.zhihu.com/p/348500968
https://zhuanlan.zhihu.com/p/396965255
https://zhuanlan.zhihu.com/p/373273390
https://docs.unity3d.com/Packages/[email protected]/manual/upgrade-guide-2022-1.html