Before 优化 机场

This commit is contained in:
CortexCore
2025-03-10 18:06:44 +08:00
parent 350e6d67b2
commit 1f4e20f512
178 changed files with 17534 additions and 821 deletions

View File

@@ -0,0 +1,153 @@
#if !KAMGAM_RENDER_PIPELINE_HDRP && !KAMGAM_RENDER_PIPELINE_URP
using System;
using UnityEngine;
using UnityEngine.Rendering;
namespace Kamgam.UIToolkitBlurredBackground
{
/// <summary>
/// Uses command buffers to hook into the rendering camera and extract a blurred image.
/// </summary>
public class BlurRendererBuiltIn : IBlurRenderer
{
public event Action OnPostRender;
protected BlurredBackgroundBufferBuiltIn _renderBuffer;
public BlurredBackgroundBufferBuiltIn RenderBuffer
{
get
{
if (_renderBuffer == null)
{
_renderBuffer = new BlurredBackgroundBufferBuiltIn(BlurredBackgroundBufferBuiltIn.CameraEventForBlur);
}
return _renderBuffer;
}
}
protected bool _active;
/// <summary>
/// Activate or deactivate the renderer. Disable to save performance (no rendering will be done).
/// </summary>
public bool Active
{
get => _active;
set
{
if (value != _active)
{
_active = value;
if (!_active)
{
RenderBuffer.Active = value;
RenderBuffer.ClearBuffers();
}
else
{
var cam = RenderUtils.GetGameViewCamera();
RenderBuffer.Active = value;
RenderBuffer.AddBuffer(cam);
}
}
}
}
public int Iterations
{
get
{
return RenderBuffer.Iterations;
}
set
{
RenderBuffer.Iterations = value;
}
}
public float Offset
{
get
{
return RenderBuffer.Offset;
}
set
{
RenderBuffer.Offset = value;
}
}
public Vector2Int Resolution
{
get
{
return RenderBuffer.Resolution;
}
set
{
RenderBuffer.Resolution = value;
}
}
public ShaderQuality Quality
{
get
{
return RenderBuffer.Quality;
}
set
{
RenderBuffer.Quality = value;
}
}
/// <summary>
/// The material is used in screen space overlay canvases.
/// </summary>
public Material GetMaterial()
{
return RenderBuffer.Material;
}
public Texture GetBlurredTexture()
{
return RenderBuffer.GetBlurredTexture();
}
protected Color _additiveColor = new Color(0, 0, 0, 0);
public Color AdditiveColor
{
get
{
return _additiveColor;
}
set
{
_additiveColor = value;
RenderBuffer.AdditiveColor = value;
}
}
/// <summary>
/// Called in the Update loop.
/// </summary>
public bool Update()
{
var gameCam = RenderUtils.GetGameViewCamera();
_renderBuffer?.UpdateActiveCamera(gameCam);
OnPostRender?.Invoke();
return true;
}
~BlurRendererBuiltIn()
{
_renderBuffer?.ClearBuffers();
}
}
}
#endif

View File

@@ -0,0 +1,18 @@
fileFormatVersion: 2
guid: d8410541ac426104aa8899f96f1b53fb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
AssetOrigin:
serializedVersion: 1
productId: 254328
packageName: UI Toolkit Blurred Background - Fast translucent background image
packageVersion: 1.0.4
assetPath: Assets/Kamgam/UIToolkitBlurredBackground/Runtime/Scripts/Rendering/BlurRendererBuiltIn.cs
uploadId: 644498

View File

@@ -0,0 +1,216 @@
#if KAMGAM_RENDER_PIPELINE_HDRP && !KAMGAM_RENDER_PIPELINE_URP
using UnityEngine;
using UnityEngine.Rendering.HighDefinition;
namespace Kamgam.UIToolkitBlurredBackground
{
public class BlurRendererHDRP : IBlurRenderer
{
protected int _blurIterations;
public int Iterations
{
get
{
if (Pass != null)
{
return Pass.BlurIterations;
}
return _blurIterations;
}
set
{
_blurIterations = value;
if (Pass != null)
{
Pass.BlurIterations = value;
}
}
}
protected float _offset = 1.5f;
/// <summary>
/// This is only used in the performance shader. Default is 1.5f. You can increase this AND reduce the blur strength to imporve performance. However, the quality will start to degrade rapidly.
/// </summary>
public float Offset
{
get => _offset;
set
{
_offset = value;
if (Pass != null)
{
Pass.Offset = value;
}
}
}
protected Vector2Int _resolution = new Vector2Int(512, 512);
/// <summary>
/// The texture resolution of the blurred image. Default is 512 x 512. Please use 2^n values like 256, 512, 1024, 2048. Reducing this will increase performance but decrease quality. Every frame your rendered image will be copied, resized and then blurred [BlurStrength] times.
/// </summary>
public Vector2Int Resolution
{
get => _resolution;
set
{
_resolution = value;
if (Pass != null)
{
Pass.Resolution = _resolution;
Pass.UpdateRenderTextureResolutions();
}
}
}
protected ShaderQuality _quality = ShaderQuality.Medium;
/// <summary>
/// The used shader variant. If you are having performance problems with the gaussian shader then try the perfrmance one. It's faster yet the quality is worse (especially for low shader strengths).
/// </summary>
public ShaderQuality Quality
{
get => _quality;
set
{
_quality = value;
if (Pass != null)
{
Pass.ShaderQuality = value;
}
}
}
protected bool _active;
public bool Active
{
get => _active;
set
{
_active = value;
if (Pass != null)
{
Pass.enabled = value;
}
}
}
protected GameObject _passGameObject;
protected CustomPassVolume _passVolume;
protected BlurredBackgroundPassHDRP _pass;
public BlurredBackgroundPassHDRP Pass
{
get
{
if (_pass == null || _passVolume == null || _passGameObject == null)
{
_pass = null;
_passVolume = null;
_passGameObject = null;
var volumes = Utils.FindRootObjectsByType<CustomPassVolume>(includeInactive: true);
foreach (var volume in volumes)
{
if (volume.isGlobal)
{
var type = typeof(BlurredBackgroundPassHDRP);
var passes = volume.customPasses;
foreach (var pass in volume.customPasses)
{
var uitkPass = pass as BlurredBackgroundPassHDRP;
if (uitkPass != null)
{
_pass = uitkPass;
_passVolume = volume;
_passGameObject = volume.gameObject;
goto EndOfLoop;
}
}
}
}
}
EndOfLoop:
return _pass;
}
}
public BlurRendererHDRP()
{
var cam = RenderUtils.GetGameViewCamera();
createPassIfNecessary(cam);
}
void createPassIfNecessary(Camera cam = null)
{
if (Pass == null)
{
var go = new GameObject("UITK BlurredBackground Custom Pass Volume");
go.hideFlags = HideFlags.DontSave;
Utils.SmartDontDestroyOnLoad(go);
var volume = go.AddComponent<CustomPassVolume>();
volume.injectionPoint = CustomPassInjectionPoint.AfterPostProcess;
volume.priority = 0;
if (cam == null)
{
volume.isGlobal = true;
}
else
{
volume.isGlobal = false;
volume.targetCamera = cam;
}
var pass = volume.AddPassOfType<BlurredBackgroundPassHDRP>();
pass.enabled = true;
pass.targetColorBuffer = CustomPass.TargetBuffer.Camera;
pass.targetDepthBuffer = CustomPass.TargetBuffer.Camera;
pass.clearFlags = UnityEngine.Rendering.ClearFlag.None;
// Important if HideFlags.HideAndDontSave is used or else the object will not be found by Pass.
_pass = pass as BlurredBackgroundPassHDRP;
_passVolume = volume;
_passGameObject = go;
// Init pass variables
_pass.ShaderQuality = Quality;
_pass.Resolution = Resolution;
_pass.Offset = Offset;
_pass.BlurIterations = Iterations;
}
}
public Texture GetBlurredTexture()
{
if (Pass != null)
return Pass.GetBlurredTexture();
else
return null;
}
/// <summary>
/// Creates the pass objects if needed.
/// </summary>
/// <returns>Always false</returns>
public bool Update()
{
// Create render pass if needed.
// TODO: Investigate if adding the pass dynamically is possible in HDRP
// see (URP): https://forum.unity.com/threads/urp-no-way-to-dynamically-access-modify-the-rendererfeatures-list-at-runtime.1342751/#post-8479169
createPassIfNecessary();
// Keep camera up to date (in case camera stacking is used or the active camera changes at runtime).
if ( _passVolume != null
&& !_passVolume.isGlobal
&& (_passVolume.targetCamera == null || !_passVolume.targetCamera.isActiveAndEnabled))
{
_passVolume.targetCamera = RenderUtils.GetGameViewCamera();
}
return false;
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a08814fffe13b3c4d87f3b2391c2752b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,290 @@
#if KAMGAM_RENDER_PIPELINE_URP
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using UnityEngine.SceneManagement;
#if UNITY_EDITOR
using UnityEditor.SceneManagement;
#endif
namespace Kamgam.UIToolkitBlurredBackground
{
public class BlurRendererURP : IBlurRenderer
{
public event System.Action OnPostRender;
protected BlurredBackgroundPassURP _screenSpacePass;
public BlurredBackgroundPassURP ScreenSpacePass
{
get
{
if (_screenSpacePass == null)
{
_screenSpacePass = new BlurredBackgroundPassURP();
// NOTICE: This is now overridden in onBeginCameraRendering().
_screenSpacePass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing;
_screenSpacePass.OnPostRender += onPostRender;
}
return _screenSpacePass;
}
}
protected bool _active;
/// <summary>
/// Activate or deactivate the renderer. Disable to save performance (no rendering will be done).
/// </summary>
public bool Active
{
get => _active;
set
{
if (value != _active)
{
_active = value;
ScreenSpacePass.Active = value;
}
}
}
protected int _iterations = 1;
public int Iterations
{
get
{
return _iterations;
}
set
{
_iterations = value;
ScreenSpacePass.Iterations = value;
}
}
protected float _offset = 1.5f;
public float Offset
{
get
{
return _offset;
}
set
{
_offset = value;
ScreenSpacePass.Offset = value;
}
}
protected Vector2Int _resolution = new Vector2Int(512, 512);
public Vector2Int Resolution
{
get
{
return _resolution;
}
set
{
_resolution = value;
ScreenSpacePass.Resolution = value;
}
}
protected ShaderQuality _quality = ShaderQuality.Medium;
public ShaderQuality Quality
{
get
{
return _quality;
}
set
{
_quality = value;
ScreenSpacePass.Quality = value;
}
}
protected Color _additiveColor = new Color(0,0,0,0);
public Color AdditiveColor
{
get
{
return _additiveColor;
}
set
{
_additiveColor = value;
ScreenSpacePass.AdditiveColor = value;
}
}
/// <summary>
/// The material is used in screen space overlay canvases.
/// </summary>
public Material GetMaterial(RenderMode renderMode)
{
return ScreenSpacePass.Material;
}
public Texture GetBlurredTexture()
{
return ScreenSpacePass.GetBlurredTexture();
}
public BlurRendererURP()
{
RenderPipelineManager.beginCameraRendering += onBeginCameraRendering;
if (ScreenSpacePass != null)
ScreenSpacePass.OnPostRender += onPostRender;
// Needed to avoid "Render Pipeline error : the XR layout still contains active passes. Executing XRSystem.EndLayout() right" Errors in Unity 2023
// Also needed in normal URP to reset the render textures after play mode.
#if UNITY_EDITOR
UnityEditor.EditorApplication.playModeStateChanged += onPlayModeChanged;
UnityEditor.SceneManagement.EditorSceneManager.sceneOpened += onSceneOpened;
#endif
}
~BlurRendererURP()
{
if (_screenSpacePass != null)
_screenSpacePass.OnPostRender -= onPostRender;
}
protected void clearRenderTargets()
{
_screenSpacePass?.ClearRenderTargets();
}
#if UNITY_EDITOR
void onPlayModeChanged(UnityEditor.PlayModeStateChange obj)
{
if (obj == UnityEditor.PlayModeStateChange.ExitingPlayMode || obj == UnityEditor.PlayModeStateChange.EnteredEditMode)
{
clearRenderTargets();
}
}
void onSceneOpened(Scene scene, OpenSceneMode mode)
{
if (!UnityEditor.EditorApplication.isPlayingOrWillChangePlaymode)
{
clearRenderTargets();
}
}
#endif
const string Renderer2DTypeName = "Renderer2D";
private Camera[] _tmpAllCameras = new Camera[10];
void onBeginCameraRendering(ScriptableRenderContext context, Camera cam)
{
if ( cam == null
|| !cam.isActiveAndEnabled)
return;
// All of this is only to support multiple-camera setups with render textures.
// The blur only needs to be done on one camera (usually the main camera). That's
// why the stop on all other cameras.
var mainCam = Camera.main;
if (mainCam != null)
{
if (cam != mainCam)
return;
}
else
{
// No main camera -> let's check if there are cameras that
// are NOT rendering into render textures.
Camera firstCamWithoutRenderTexture = null;
int camCount = Camera.allCamerasCount;
int maxCamCount = _tmpAllCameras.Length;
// alloc new array if needed
if(camCount > maxCamCount)
{
_tmpAllCameras = new Camera[camCount + 5];
}
Camera.GetAllCameras(_tmpAllCameras);
for (int i = 0; i < maxCamCount; i++)
{
// Null out old references
if(i >= camCount)
{
_tmpAllCameras[i] = null;
continue;
}
var cCam = _tmpAllCameras[i];
if (cCam == null || !cCam.isActiveAndEnabled)
continue;
if (cCam != null && cCam.targetTexture == null)
{
firstCamWithoutRenderTexture = cCam;
break;
}
}
// If there are some then use the first we an find. Which means we abort the blur pass on all others.
if (firstCamWithoutRenderTexture != null && cam != firstCamWithoutRenderTexture)
return;
// If there are only cameras with render textures then we ignore it.
// This means that in setups with cameras that are only rendered in to textures
// no blur will occur.
if (firstCamWithoutRenderTexture == null)
return;
}
var data = cam.GetUniversalAdditionalCameraData();
if (data == null)
return;
// Turns out the list is always empty and the enqueuing is a per frame action.
// Check if we are using the 2D renderer (skip check if already using "BeforeRenderingPostProcessing" event).
if (cam.orthographic && ScreenSpacePass.renderPassEvent == RenderPassEvent.AfterRenderingPostProcessing)
{
if (cam.GetUniversalAdditionalCameraData().scriptableRenderer.GetType().Name.EndsWith(Renderer2DTypeName))
{
// If yes then change the event from AfterRenderingPostProcessing to BeforeRenderingPostProcessing.
// Sadly accessing PostPro render results is not supported in URP 2D, see:
// https://forum.unity.com/threads/urp-2d-how-to-access-camera-target-after-post-processing.1465124/
// https://forum.unity.com/threads/7-3-1-renderpassevent-afterrenderingpostprocessing-is-broken.873604/#post-8422710
ScreenSpacePass.renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing;
}
}
data.scriptableRenderer.EnqueuePass(ScreenSpacePass);
}
protected void onPostRender()
{
OnPostRender?.Invoke();
}
/// <summary>
/// Not needed in SRPs.
/// </summary>
public bool Update()
{
return true;
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fa030b7761ae4ff499e9807b82ec3307
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,435 @@
#if !KAMGAM_RENDER_PIPELINE_HDRP && !KAMGAM_RENDER_PIPELINE_URP
using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.Rendering;
namespace Kamgam.UIToolkitBlurredBackground
{
/// <summary>
/// Uses command buffers to hook into the rendering camera and extract a blurred image.
/// </summary>
public class BlurredBackgroundBufferBuiltIn
{
public const string ShaderName = "Kamgam/UI Toolkit/BuiltIn/Blur Shader";
public const CameraEvent CameraEventForBlur = CameraEvent.AfterEverything;
protected Camera _camera;
protected CameraEvent _cameraEvent;
protected CommandBuffer _buffer;
protected bool _active;
/// <summary>
/// Activate or deactivate the renderer. Disable to save performance (no rendering will be done).
/// </summary>
public bool Active
{
get => _active;
set
{
if (value != _active)
{
_active = value;
if (!_active)
{
ClearBuffers();
}
else
{
if (_camera != null)
AddBuffer(_camera, _cameraEvent);
}
}
}
}
protected int _iterations = 1;
public int Iterations
{
get => _iterations;
set
{
if (value != _iterations)
{
_iterations = value;
RecreateBuffers();
}
}
}
protected float _offset = 10f;
public float Offset
{
get => _offset;
set
{
_offset = value;
setOffset(value);
}
}
protected Vector2Int _resolution = new Vector2Int(512, 512);
/// <summary>
/// The texture resolution of the blurred image. Default is 512 x 512. Please use 2^n values like 256, 512, 1024, 2048, 4096. Reducing this will increase performance but decrease quality. Every frame your rendered image will be copied, resized and then blurred [BlurStrength] times.
/// </summary>
public Vector2Int Resolution
{
get => _resolution;
set
{
_resolution = value;
updateRenderTextureResolutions();
setOffset(_offset); // We have to update offset here because the _worldMaterial offset depends on _resolution.
}
}
void updateRenderTextureResolutions()
{
if (_renderTargetBlurredA != null)
{
_renderTargetBlurredA.Release();
_renderTargetBlurredA.width = _resolution.x;
_renderTargetBlurredA.height = _resolution.y;
_renderTargetBlurredA.Create();
}
if (_renderTargetBlurredB != null)
{
_renderTargetBlurredB.Release();
_renderTargetBlurredB.width = _resolution.x;
_renderTargetBlurredB.height = _resolution.y;
_renderTargetBlurredB.Create();
}
}
protected Shader _blurShader;
public Shader BlurShader
{
get
{
if (_blurShader == null)
{
_blurShader = Shader.Find(ShaderName);
}
return _blurShader;
}
}
protected ShaderQuality _quality = ShaderQuality.Medium;
public ShaderQuality Quality
{
get => _quality;
set
{
if (_quality != value)
{
_quality = value;
setQualityOfMaterial(_material, _quality);
}
}
}
protected Color _additiveColor = new Color(0f, 0f, 0f, 0f);
public Color AdditiveColor
{
get => _additiveColor;
set
{
_additiveColor = value;
setAdditiveColor(_material, value);
}
}
/// <summary>
/// The material is used in screen space overlay canvases.
/// </summary>
[System.NonSerialized]
protected Material _material;
public Material Material
{
get
{
if (_material == null)
{
// Create material with shader
var shader = Shader.Find(ShaderName);
if (shader != null)
{
_material = new Material(shader);
_material.color = Color.white;
_material.hideFlags = HideFlags.HideAndDontSave;
setQualityOfMaterial(_material, _quality);
setFlipVerticalOfMaterial(_material, shouldFlipInShaderDependingOnProjectionParams());
setAdditiveColor(_material, AdditiveColor);
setOffset(_offset);
}
}
return _material;
}
set
{
_material = value;
}
}
void setQualityOfMaterial(Material material, ShaderQuality quality)
{
if (material == null)
return;
switch (quality)
{
case ShaderQuality.Low:
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_LOW"), true);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_MEDIUM"), false);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.Medium:
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_LOW"), false);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_MEDIUM"), true);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.High:
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_LOW"), false);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_MEDIUM"), false);
material.SetKeyword(new LocalKeyword(material.shader, "_SAMPLES_HIGH"), true);
break;
default:
break;
}
}
public BlurredBackgroundBufferBuiltIn(CameraEvent evt)
{
if (evt != CameraEventForBlur)
throw new System.Exception("Only " + CameraEventForBlur + " events are supported.");
_cameraEvent = evt;
}
bool shouldFlipInShaderDependingOnProjectionParams()
{
// If I use DirectX (Win 10 Pc) or Vulkan (Win 10 Pc) or Metal (on an M1) it is flipped.
// If I use OpenGL it works fine for all events (CameraEvent.AfterEverything and CameraEvent.BeforeForwardAlpha)
// See: https://forum.unity.com/threads/command-buffer-blit-render-texture-result-is-upside-down.1463063/#post-9159080
// If on OpenGL then always enable flipping because OpenGL platforms do the flipping
// correctly via _ProjectionParams in all cases.
if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore
#if !UNITY_2023_1_OR_NEWER
|| SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2
#endif
|| SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3)
{
return true;
}
// On other platforms enable flipping via _ProjectionParams only if the
// event is CameraEvent.AfterEverything (i.e. after post processing)
return _cameraEvent == CameraEvent.AfterEverything;
}
void setFlipVerticalOfMaterial(Material material, bool flip)
{
if (material == null)
return;
material.SetFloat("_FlipVertical", flip ? 1f : 0f);
}
void setAdditiveColor(Material material, Color color)
{
if (material == null)
return;
material.SetColor("_AdditiveColor", color);
}
void setOffset(float value)
{
if (_material != null)
_material.SetVector("_BlurOffset", new Vector4(value, value, 0f, 0f));
}
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredA;
protected RenderTexture renderTargetBlurredA
{
get
{
#if UNITY_EDITOR
releaseTexturesIfInWrongColorSpace();
#endif
if (_renderTargetBlurredA == null)
_renderTargetBlurredA = createRenderTexture();
return _renderTargetBlurredA;
}
}
#if UNITY_EDITOR
protected void releaseTexturesIfInWrongColorSpace()
{
if (_renderTargetBlurredA != null)
{
// If the current sRGB settings does not match the color space then recreate the render textures.
if ((_renderTargetBlurredA.sRGB && QualitySettings.activeColorSpace == ColorSpace.Gamma)
|| (!_renderTargetBlurredA.sRGB && QualitySettings.activeColorSpace == ColorSpace.Linear))
{
_renderTargetBlurredA?.Release();
_renderTargetBlurredA = null;
_renderTargetBlurredB?.Release();
_renderTargetBlurredB = null;
}
}
}
#endif
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredB;
protected RenderTexture renderTargetBlurredB
{
get
{
if (_renderTargetBlurredB == null)
_renderTargetBlurredB = createRenderTexture();
return _renderTargetBlurredB;
}
}
RenderTexture createRenderTexture()
{
var rw = QualitySettings.activeColorSpace == ColorSpace.Linear ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Default;
var texture = new RenderTexture(Resolution.x, Resolution.y, 0, RenderTextureFormat.Default, rw);
texture.filterMode = FilterMode.Bilinear;
texture.wrapMode = TextureWrapMode.Clamp;
return texture;
}
public Texture GetBlurredTexture()
{
// Debugging textures
//#if UNITY_EDITOR
// var settings = UIToolkitBlurredBackgroundSettings.GetOrCreateSettings();
// if (settings.DebugRenderTextureScreen != null && _cameraEvent == CameraEventForBlur)
// {
// if (renderTargetBlurredA.width == settings.DebugRenderTextureScreen.width)
// {
// Graphics.CopyTexture(renderTargetBlurredA, settings.DebugRenderTextureScreen);
// }
// else
// {
// Debug.LogWarning("Debugging render texture width does not match blur render texture width. Debug texture will remain empty.");
// }
// }
//#endif
return renderTargetBlurredA;
}
public void ClearBuffers()
{
if (_camera != null && _buffer != null)
_camera.RemoveCommandBuffer(_cameraEvent, _buffer);
}
public void AddBuffer(Camera cam)
{
AddBuffer(cam, _cameraEvent);
}
public void AddBuffer(Camera cam, CameraEvent evt)
{
if (cam == null)
return;
// Seach for old buffers and remove them
var buffers = cam.GetCommandBuffers(evt);
foreach (var buf in buffers)
{
if (buf.name.StartsWith("Kamgam.UGUI Blur"))
{
cam.RemoveCommandBuffer(_cameraEvent, buf);
buf.Dispose();
}
}
// Create buffer if needed
// Debug.Log("Creating Command Buffer on " + cam);
_buffer = createBuffer("Kamgam.UGUI Blur (" + evt + ")");
cam.AddCommandBuffer(evt, _buffer);
// Done to avoid flipped (upside down) render results, see:
// https://forum.unity.com/threads/commandbuffer-rendering-scene-flipped-upside-down-in-forward-rendering.415922/#post-3114571
cam.forceIntoRenderTexture = true;
}
public CommandBuffer createBuffer(string name)
{
CommandBuffer buf = new CommandBuffer();
buf.name = name;
// copy screen into temporary RT
int screenCopyID = Shader.PropertyToID("_ScreenCopyTexture");
var desc = new RenderTextureDescriptor(-1, -1);
desc.depthBufferBits = 0;
desc.useMipMap = false;
desc.autoGenerateMips = false;
desc.colorFormat = RenderTextureFormat.Default;
// Makes sure to properly support linear color space.
desc.sRGB = QualitySettings.activeColorSpace == ColorSpace.Linear;
buf.GetTemporaryRT(screenCopyID, desc, FilterMode.Bilinear);
buf.Blit(BuiltinRenderTextureType.CurrentActive, screenCopyID);
// Copy from source to A (Sets _MainTex and scales the target down to our blur texture size).
buf.Blit(screenCopyID, renderTargetBlurredA);
// 2 pass blur (A > B > A)
int iterations = Iterations * 2 - 1; // Necessary do compensate for flipping of Material (iterations need
// to be odd or else the image is upside down if shouldFlip() is true).
for (int i = 0; i < iterations; i++)
{
buf.Blit(renderTargetBlurredA, renderTargetBlurredB, Material, 0);
buf.Blit(renderTargetBlurredB, renderTargetBlurredA, Material, 1);
}
buf.ReleaseTemporaryRT(screenCopyID);
return buf;
}
public void UpdateActiveCamera(Camera cam)
{
if (cam != null && _camera != cam)
{
// Debug.Log("Setting new camera: " + cam);
ClearBuffers();
_camera = cam;
AddBuffer(_camera, _cameraEvent);
}
}
public void RecreateBuffers()
{
ClearBuffers();
if (_camera != null)
AddBuffer(_camera);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a4abb7b676f12cc4c96ef4b45ef6bb3d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,300 @@
#if !KAMGAM_RENDER_PIPELINE_URP && KAMGAM_RENDER_PIPELINE_HDRP
// Based on: https://github.com/alelievr/HDRP-Custom-Passes/blob/2021.2/Assets/CustomPasses/CopyPass/CopyPass.cs#L67
// as recommended by antoinel_unity in https://forum.unity.com/threads/custom-pass-into-render-texture-into-custom-aov.1146872/#post-7362314
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.HighDefinition;
namespace Kamgam.UIToolkitBlurredBackground
{
public class BlurredBackgroundPassHDRP : CustomPass
{
public const string ShaderName = "Kamgam/UI Toolkit/HDRP/Blur Shader";
[System.NonSerialized]
protected Material _material;
public Material Material
{
get
{
if (_material == null)
{
// Create material with shader
var shader = Shader.Find(ShaderName);
if (shader != null)
{
_material = CoreUtils.CreateEngineMaterial(shader);
_material.color = Color.white;
switch (_shaderQuality)
{
case ShaderQuality.Low:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), true);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.Medium:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), true);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.High:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), true);
break;
default:
break;
}
setOffset(Offset);
}
}
return _material;
}
set
{
_material = value;
}
}
void setOffset(float value)
{
if (_material != null)
_material.SetVector("_BlurOffset", new Vector4(value, value, 0f, 0f));
}
[System.NonSerialized]
protected int _blurIterations = 0;
public int BlurIterations
{
get => _blurIterations;
set
{
if (_blurIterations != value)
{
_blurIterations = value;
enabled = _blurIterations > 0;
}
}
}
protected float _offset = 1.5f;
/// <summary>
/// This is only used in the performance shader. Default is 1.5f. You can increase this AND reduce the blur strength to imporve performance. However, the quality will start to degrade rapidly.
/// </summary>
public float Offset
{
get => _offset;
set
{
_offset = value;
setOffset(value);
}
}
protected ShaderQuality _shaderQuality = ShaderQuality.Medium;
/// <summary>
/// The used shader quality. The higher the more performance it will cost.
/// </summary>
public ShaderQuality ShaderQuality
{
get => _shaderQuality;
set
{
_shaderQuality = value;
_material = null;
}
}
/// <summary>
/// The used resolution of the render texture.
/// </summary>
[System.NonSerialized]
public Vector2Int Resolution = new Vector2Int(512, 512);
public void UpdateRenderTextureResolutions()
{
if (_renderTargetBlurredA != null)
{
_renderTargetBlurredA.Release();
_renderTargetBlurredA.width = Resolution.x;
_renderTargetBlurredA.height = Resolution.y;
_renderTargetBlurredA.Create();
}
if (_renderTargetBlurredB != null)
{
_renderTargetBlurredB.Release();
_renderTargetBlurredB.width = Resolution.x;
_renderTargetBlurredB.height = Resolution.y;
_renderTargetBlurredB.Create();
}
}
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredA;
public RenderTexture RenderTargetBlurredA
{
get
{
if (_renderTargetBlurredA == null)
{
_renderTargetBlurredA = createRenderTexture();
if (_renderTargetHandleA != null)
{
_renderTargetHandleA.Release();
_renderTargetHandleA = null;
}
}
return _renderTargetBlurredA;
}
}
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredB;
public RenderTexture RenderTargetBlurredB
{
get
{
if (_renderTargetBlurredB == null)
{
_renderTargetBlurredB = createRenderTexture();
if (_renderTargetHandleB != null)
{
_renderTargetHandleB.Release();
_renderTargetHandleB = null;
}
}
return _renderTargetBlurredB;
}
}
[System.NonSerialized]
protected RTHandle _renderTargetHandleA;
public RTHandle RenderTargetHandleA
{
get
{
if (_renderTargetHandleA == null)
_renderTargetHandleA = RTHandles.Alloc(RenderTargetBlurredA);
return _renderTargetHandleA;
}
}
[System.NonSerialized]
protected RTHandle _renderTargetHandleB;
public RTHandle RenderTargetHandleB
{
get
{
if (_renderTargetHandleB == null)
_renderTargetHandleB = RTHandles.Alloc(RenderTargetBlurredB);
return _renderTargetHandleB;
}
}
RenderTexture createRenderTexture()
{
var texture = new RenderTexture(Resolution.x, Resolution.y, 16);
texture.filterMode = FilterMode.Bilinear;
return texture;
}
[System.NonSerialized]
public bool AreTexturesSwapped;
public Texture GetBlurredTexture()
{
return AreTexturesSwapped ? RenderTargetBlurredB : RenderTargetBlurredA;
}
protected override bool executeInSceneView => false;
protected override void Setup(ScriptableRenderContext renderContext, CommandBuffer cmd)
{
name = "UITK Blurred Background";
}
protected override void Execute(CustomPassContext ctx)
{
if (Material == null || BlurIterations == 0 || Offset == 0)
return;
var source = ctx.cameraColorBuffer;
// First pass is just a copy with the right scale (plus downsampling).
// From: ctx.cmd.Blit(RenderTargetBlurredB, RenderTargetBlurredA, Material);
//
// Sadly the API for copying, scaling AND using a material is not exposed.
//
// TODO: Investigate if this breaks XR compatibility.
// Solution leads: Use 2DArray and SAMPLE_TEXTURE2D_X in the shader and maybe use Blit_Texture() or Blit_Identifier to pass the material.
// See: https://github.com/Unity-Technologies/UnityCsReference/blob/master/Runtime/Export/Graphics/RenderingCommandBuffer.cs#L901
// and: https://github.com/Unity-Technologies/UnityCsReference/blob/master/Runtime/Export/Graphics/RenderingCommandBuffer.bindings.cs#L614
var scale = RTHandles.rtHandleProperties.rtHandleScale;
ctx.cmd.Blit(source, RenderTargetBlurredA, new Vector2(scale.x, scale.y), Vector2.zero, 0, 0);
AreTexturesSwapped = false;
// All other blur passes play ping pong between A and B
for (int i = 0; i < BlurIterations; i++)
{
if (AreTexturesSwapped)
{
ctx.cmd.Blit(RenderTargetBlurredB, RenderTargetBlurredA, Material, 0);
ctx.cmd.Blit(RenderTargetBlurredA, RenderTargetBlurredB, Material, 1);
}
else
{
ctx.cmd.Blit(RenderTargetBlurredA, RenderTargetBlurredB, Material, 0);
ctx.cmd.Blit(RenderTargetBlurredB, RenderTargetBlurredA, Material, 1);
}
}
}
protected override void Cleanup()
{
CoreUtils.Destroy(_material);
if (_renderTargetBlurredA != null)
{
_renderTargetBlurredA.Release();
_renderTargetBlurredA = null;
}
if (_renderTargetBlurredB != null)
{
_renderTargetBlurredB.Release();
_renderTargetBlurredB = null;
}
if (_renderTargetHandleA != null)
{
_renderTargetHandleA.Release();
_renderTargetHandleA = null;
}
if (_renderTargetHandleB != null)
{
_renderTargetHandleB.Release();
_renderTargetHandleB = null;
}
base.Cleanup();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d1931dd95fbdff24f98ab13d7b914752
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,470 @@
#if KAMGAM_RENDER_PIPELINE_URP
using UnityEngine;
using UnityEngine.Rendering;
#if UNITY_6000_0_OR_NEWER
using UnityEngine.Rendering.RenderGraphModule;
#endif
using UnityEngine.Rendering.Universal;
namespace Kamgam.UIToolkitBlurredBackground
{
public class BlurredBackgroundPassURP : ScriptableRenderPass
{
public System.Action OnPostRender;
public bool Active = false;
protected int _iterations;
public int Iterations
{
get => _iterations;
set
{
if (_iterations != value)
{
_iterations = value;
}
}
}
protected float _offset = 1.5f;
public float Offset
{
get => _offset;
set
{
_offset = value;
setOffset(value);
}
}
protected Color _additiveColor = new Color(0f, 0f, 0f, 0f);
public Color AdditiveColor
{
get => _additiveColor;
set
{
_additiveColor = value;
setAdditiveColor(_material, value);
}
}
void setAdditiveColor(Material material, Color color)
{
if (material == null)
return;
material.SetColor("_AdditiveColor", color);
}
protected Vector2Int _resolution = new Vector2Int(512, 512);
/// <summary>
/// The texture resolution of the blurred image. Default is 512 x 512. Please use 2^n values like 256, 512, 1024, 2048. Reducing this will increase performance but decrease quality. Every frame your rendered image will be copied, resized and then blurred [BlurStrength] times.
/// </summary>
public Vector2Int Resolution
{
get => _resolution;
set
{
_resolution = value;
updateRenderTextureResolutions();
}
}
void updateRenderTextureResolutions()
{
if (_renderTargetBlurredA != null)
{
_renderTargetBlurredA.Release();
_renderTargetBlurredA.width = _resolution.x;
_renderTargetBlurredA.height = _resolution.y;
_renderTargetBlurredA.Create();
}
if (_renderTargetBlurredB != null)
{
_renderTargetBlurredB.Release();
_renderTargetBlurredB.width = _resolution.x;
_renderTargetBlurredB.height = _resolution.y;
_renderTargetBlurredB.Create();
}
}
public const string ShaderName = "Kamgam/UI Toolkit/URP/Blur Shader";
protected ShaderQuality _quality = ShaderQuality.Medium;
public ShaderQuality Quality
{
get => _quality;
set
{
_quality = value;
_material = null;
}
}
[System.NonSerialized]
protected Material _material;
public Material Material
{
get
{
if (_material == null)
{
// Create material with shader
var shader = Shader.Find(ShaderName);
if (shader != null)
{
_material = new Material(shader);
_material.color = Color.white;
switch (_quality)
{
case ShaderQuality.Low:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), true);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.Medium:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), true);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), false);
break;
case ShaderQuality.High:
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_LOW"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_MEDIUM"), false);
_material.SetKeyword(new LocalKeyword(shader, "_SAMPLES_HIGH"), true);
break;
default:
break;
}
setOffset(_offset);
setAdditiveColor(_material,AdditiveColor);
}
}
return _material;
}
set
{
_material = value;
}
}
void setOffset(float value)
{
if (_material != null)
_material.SetVector("_BlurOffset", new Vector4(value, value, 0f, 0f));
}
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredA;
public RenderTexture RenderTargetBlurredA
{
get
{
if (_renderTargetBlurredA == null)
{
_renderTargetBlurredA = createRenderTexture();
if (_renderTargetHandleA != null)
{
_renderTargetHandleA.Release();
_renderTargetHandleA = null;
}
}
return _renderTargetBlurredA;
}
}
[System.NonSerialized]
protected RenderTexture _renderTargetBlurredB;
public RenderTexture RenderTargetBlurredB
{
get
{
if (_renderTargetBlurredB == null)
{
_renderTargetBlurredB = createRenderTexture();
if (_renderTargetHandleB != null)
{
_renderTargetHandleB.Release();
_renderTargetHandleB = null;
}
}
return _renderTargetBlurredB;
}
}
[System.NonSerialized]
protected RTHandle _renderTargetHandleA;
public RTHandle RenderTargetHandleA
{
get
{
if (_renderTargetHandleA == null)
_renderTargetHandleA = RTHandles.Alloc(RenderTargetBlurredA);
return _renderTargetHandleA;
}
}
[System.NonSerialized]
protected RTHandle _renderTargetHandleB;
public RTHandle RenderTargetHandleB
{
get
{
if (_renderTargetHandleB == null)
_renderTargetHandleB = RTHandles.Alloc(RenderTargetBlurredB);
return _renderTargetHandleB;
}
}
RenderTexture createRenderTexture()
{
var texture = new RenderTexture(Resolution.x, Resolution.y, 0);
texture.filterMode = FilterMode.Bilinear;
return texture;
}
public void ClearRenderTargets()
{
if (_renderTargetHandleA != null)
{
_renderTargetHandleA.Release();
_renderTargetHandleA = null;
}
if (_renderTargetBlurredA != null)
{
_renderTargetBlurredA.Release();
_renderTargetBlurredA = null;
}
if (_renderTargetHandleB != null)
{
_renderTargetHandleB.Release();
_renderTargetHandleB = null;
}
if (_renderTargetBlurredB != null)
{
_renderTargetBlurredB.Release();
_renderTargetBlurredB = null;
}
}
public Texture GetBlurredTexture()
{
return RenderTargetBlurredA;
}
// Actual Render Pass stuff starts here:
// -------------------------------------------------------------
#region PASS_RENDER_NON_GRAPH_PATH
// Turns out profiling scopes should NOT be mixed with CommandBuffers, see:
// https://forum.unity.com/threads/how-to-use-profilingscope-correctly.1366812/#post-8621289
// ProfilingSampler _profilingSampler = new ProfilingSampler("UGUI Blurred Background Pass");
#if KAMGAM_RENDER_PIPELINE_URP_13
RTHandle _cameraColorTarget;
#endif
#if UNITY_6000_0_OR_NEWER
[System.Obsolete]
#endif
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
ConfigureInput(ScriptableRenderPassInput.Color);
#if KAMGAM_RENDER_PIPELINE_URP_13
_cameraColorTarget = renderingData.cameraData.renderer.cameraColorTargetHandle;
#endif
}
#if UNITY_6000_0_OR_NEWER
[System.Obsolete]
#endif
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (!Active || _iterations == 0 || Offset <= 0f)
return;
// Do not render while switching play modes.
#if UNITY_EDITOR
if (EditorPlayState.State != EditorPlayState.PlayState.Playing && EditorPlayState.State != EditorPlayState.PlayState.Editing)
return;
#endif
// Skip rendering in scene view or preview. Why? Because rendering in these
// makes the scene view flicker if not in play mode.
// See: https://forum.unity.com/threads/urp-custom-pass-blit-flickering-in-scene-view.1461932/
#if UNITY_EDITOR
if ( renderingData.cameraData.cameraType == CameraType.SceneView
|| renderingData.cameraData.cameraType == CameraType.Preview)
return;
#endif
#if !KAMGAM_RENDER_PIPELINE_URP_13
var source = renderingData.cameraData.renderer.cameraColorTarget;
#else
var source = renderingData.cameraData.renderer.cameraColorTargetHandle;
// Check if source is null, if yes then try to fetch it from the set target. Otherwise abort.
if (renderingData.cameraData.cameraType != CameraType.Game || source == null)
{
source = _cameraColorTarget;
if (source == null)
{
#if UNITY_EDITOR
// TODO: Investigate: This is happening in URP 14 though it has no effect (everything works).
// Logger.LogWarning("Camera color target source is null. Will skip blur rendering. Please investigate this issue.");
#endif
return;
}
}
#endif
CommandBuffer cmd = CommandBufferPool.Get(name: "UGUI Blurred Background Pass");
cmd.Clear();
// Notice: Do not use cmd.Blit() in SPRs, see:
// https://forum.unity.com/threads/how-to-blit-in-urp-documentation-unity-blog-post-on-every-blit-function.1211508/#post-7735527
// Blit Implementation can be found here:
// https://github.com/Unity-Technologies/Graphics/blob/b57fcac51bb88e1e589b01e32fd610c991f16de9/Packages/com.unity.render-pipelines.core/Runtime/Utilities/Blitter.cs#L221
// First pass scales down the image
Blit(cmd, source, RenderTargetHandleA);
// 2 pass blur A > B, B > A
for (int i = 0; i < Iterations; i++)
{
// Blur horizontal (pass 0)
Blit(cmd, RenderTargetHandleA, RenderTargetHandleB, Material, 0);
// Blur vertical (pass 1)
Blit(cmd, RenderTargetHandleB, RenderTargetHandleA, Material, 1);
}
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
CommandBufferPool.Release(cmd);
OnPostRender?.Invoke();
}
public override void OnCameraCleanup(CommandBuffer cmd)
{
base.OnCameraCleanup(cmd);
}
#endregion
#if UNITY_6000_0_OR_NEWER
#region PASS_RENDER_GRAPH_PATH
// The custom copy color pass data that will be passed at render graph execution to the lambda we set with "SetRenderFunc" during render graph setup
private class CopyPassData
{
public TextureHandle inputTexture;
}
// The custom main pass data that will be passed at render graph execution to the lambda we set with "SetRenderFunc" during render graph setup
private class BlurPassData
{
public Material material;
public TextureHandle inputTexture;
public int pass;
}
RenderTargetInfo getRenderTargetInfo(RenderTexture texture)
{
RenderTargetInfo info = new RenderTargetInfo();
info.format = texture.descriptor.graphicsFormat;
info.width = texture.width;
info.height = texture.height;
info.volumeDepth = texture.volumeDepth;
info.bindMS = texture.bindTextureMS;
return info;
}
// Here you can implement the rendering logic for the render graph path
public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData)
{
// This works
var infoA = getRenderTargetInfo(RenderTargetBlurredA);
var targetA = renderGraph.ImportTexture(RenderTargetHandleA, infoA);
var infoB = getRenderTargetInfo(RenderTargetBlurredB);
var targetB = renderGraph.ImportTexture(RenderTargetHandleB, infoB);
// This does not. Wth?!?
// see: https://forum.unity.com/threads/introduction-of-render-graph-in-the-universal-render-pipeline-urp.1500833/page-7#post-9822162
//var targetA = renderGraph.ImportTexture(RenderTargetHandleA, getRenderTargetInfo(RenderTargetBlurredA));
//var targetB = renderGraph.ImportTexture(RenderTargetHandleB, getRenderTargetInfo(RenderTargetBlurredB));
UniversalResourceData resourcesData = frameData.Get<UniversalResourceData>();
// Color buffer copy pass
// * This pass makes a temporary copy of the active color target for sampling
// * This is needed as GPU graphics pipelines don't allow to sample the texture bound as the active color target
// * This copy can be avoided if you won't need to sample the color target or will only need to render/blend on top of it
using (var builder = renderGraph.AddRasterRenderPass<CopyPassData>("UITKBlurredBackground_CopyColor", out var passData, profilingSampler))
{
passData.inputTexture = resourcesData.activeColorTexture;
builder.UseTexture(resourcesData.activeColorTexture, AccessFlags.Read);
builder.SetRenderAttachment(targetA, 0, AccessFlags.WriteAll);
builder.SetRenderFunc((CopyPassData data, RasterGraphContext context) => ExecuteCopyColorPass(data, context));
}
// Blur horizontal pass
using (var builder = renderGraph.AddRasterRenderPass<BlurPassData>("UITKBlurredBackground_BlurHori", out var passData, profilingSampler))
{
passData.material = Material;
passData.inputTexture = targetA;
passData.pass = 0;
builder.UseTexture(targetA, AccessFlags.Read);
builder.SetRenderAttachment(targetB, 0, AccessFlags.WriteAll);
builder.SetRenderFunc((BlurPassData data, RasterGraphContext context) => ExecuteBlurPass(data, context));
}
// Blur vertical pass
using (var builder = renderGraph.AddRasterRenderPass<BlurPassData>("UITKBlurredBackground_BlurVerti", out var passData, profilingSampler))
{
passData.material = Material;
passData.inputTexture = targetB;
passData.pass = 1;
builder.UseTexture(targetB, AccessFlags.Read);
builder.SetRenderAttachment(targetA, 0, AccessFlags.WriteAll);
builder.SetRenderFunc((BlurPassData data, RasterGraphContext context) => ExecuteBlurPass(data, context));
}
OnPostRender?.Invoke();
}
private static void ExecuteCopyColorPass(CopyPassData data, RasterGraphContext context)
{
Blitter.BlitTexture(context.cmd, data.inputTexture, new Vector4(1, 1, 0, 0), 0.0f, bilinear: true);
}
private static void ExecuteBlurPass(BlurPassData data, RasterGraphContext context)
{
Blitter.BlitTexture(context.cmd, data.inputTexture, new Vector4(1, 1, 0, 0), data.material, data.pass);
}
#endregion
#endif
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 533e2914ca6dfeb499952168ff6d3c93
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,31 @@
using UnityEngine;
namespace Kamgam.UIToolkitBlurredBackground
{
public interface IBlurRenderer
{
/// <summary>
/// Defines how often the blur will be applied. Use with caution."
/// </summary>
int Iterations { get; set; }
/// <summary>
/// Defines how far out the sampling foes and thus the blur strength for each pass.
/// </summary>
float Offset { get; set; }
/// <summary>
/// The square texture resolution of the blurred image. Default is 512 x 512. Please use 2^n values like 256, 512, 1024, 2048. Reducing this will increase performance but decrease quality. Every frame your rendered image will be copied, resized and then blurred [BlurStrength] times.
/// </summary>
Vector2Int Resolution { get; set; }
/// <summary>
/// Defines how may samples are taken per pass. The higher the quality the more texels will be sampled and the lower the performance will be.
/// </summary>
ShaderQuality Quality { get; set; }
bool Active { get; set; }
Texture GetBlurredTexture();
bool Update();
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3fa6c55ccce6d3348bbe45a160dadac3
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,64 @@
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace Kamgam.UIToolkitBlurredBackground
{
public static class RenderUtils
{
static Camera _cachedGameViewCam;
static Camera[] _tmpAllCameras = new Camera[10];
public static Camera GetGameViewCamera()
{
var cam = Camera.main;
if (cam == null)
{
// Fetch cameras
int allCamerasCount = Camera.allCamerasCount;
// Alloc new array only if needed
if (allCamerasCount > _tmpAllCameras.Length)
{
_tmpAllCameras = new Camera[allCamerasCount + 5];
}
Camera.GetAllCameras(_tmpAllCameras);
// We sort by depth and start from the back because we assume
// that among cameras with equal depth the last takes precedence.
float maxDepth = float.MinValue;
for (int i = _tmpAllCameras.Length - 1; i >= 0; i--)
{
// Null out old references
if (i >= allCamerasCount)
{
_tmpAllCameras[i] = null;
continue;
}
var cCam = _tmpAllCameras[i];
if (!cCam.isActiveAndEnabled)
continue;
// Only take full screen cameras that are not rendering into render textures
if (cCam.depth > maxDepth && cCam.targetTexture == null && cCam.rect.width >= 1f && cCam.rect.height >= 1f)
{
maxDepth = cCam.depth;
cam = cCam;
}
}
}
// cache game view camera
if (cam != null && cam.cameraType == CameraType.Game)
_cachedGameViewCam = cam;
if (cam == null)
return _cachedGameViewCam;
return cam;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 82b708f16d7af5f4e98cc285c4f83173
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,19 @@
using System;
namespace Kamgam.UIToolkitBlurredBackground
{
public enum ShaderQuality { Low, Medium, High };
public static class ShaderQualityTools
{
public static ShaderQuality FromString(string str)
{
if(Enum.TryParse(str, out ShaderQuality result))
{
return result;
}
return default;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f6a1f3f8717848e48b3fe3cce70d9b87
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,88 @@
using UnityEngine;
namespace Kamgam.UIToolkitBlurredBackground
{
public enum SquareResolution
{
_32,
_64,
_128,
_256,
_512,
_1024,
_2048,
_4096
};
public static class SquareResolutionsUtils
{
public static Vector2Int ToResolution(this SquareResolution res)
{
switch (res)
{
case SquareResolution._32:
return new Vector2Int(32, 32);
case SquareResolution._64:
return new Vector2Int(64, 64);
case SquareResolution._128:
return new Vector2Int(128, 128);
case SquareResolution._256:
return new Vector2Int(256, 256);
case SquareResolution._512:
return new Vector2Int(512, 512);
case SquareResolution._1024:
return new Vector2Int(1024, 1024);
case SquareResolution._2048:
return new Vector2Int(2048, 2048);
case SquareResolution._4096:
return new Vector2Int(4096, 4096);
default:
return new Vector2Int(512, 512);
}
}
public static SquareResolution FromResolution(this Vector2Int res)
{
if (res.x >= 4096 && res.y >= 4096)
{
return SquareResolution._4096;
}
else if (res.x >= 2048 && res.y >= 2048)
{
return SquareResolution._2048;
}
else if (res.x >= 1024 && res.y >= 1024)
{
return SquareResolution._1024;
}
else if (res.x >= 512 && res.y >= 512)
{
return SquareResolution._512;
}
else if (res.x >= 256 && res.y >= 256)
{
return SquareResolution._256;
}
else if (res.x >= 128 && res.y >= 128)
{
return SquareResolution._128;
}
else if (res.x >= 64 && res.y >= 64)
{
return SquareResolution._64;
}
else
{
return SquareResolution._32;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 1d1c2d493da892c40aff77e085a20f4e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: