本文同时发布在我的个人博客上:https://dragon_boy.gitee.io
屏幕后处理脚本基类
屏幕后处理,是在渲染完整个场景得到屏幕图像后,对这个图像进行处理,实现一些特效。如景深、运动模糊等。
在Unity中,我们使用OnRenderImage
函数:
MonoBehaviour.OnRenderImage(RenderTexture src, RenderTexture dest)
使用此函数后,Unity会把当前渲染得到的图像存储在第一个参数对应的源渲染纹理,通过函数的一系列操作,再把目标渲染纹理,即第二个参数对应的渲染纹理显示在屏幕上。在RenderImage
函数中,我们利用Graphics.Blit
来进行渲染纹理的处理,即位块运输:
默认情况下,OnRenderImage
函数会在所有的不透明和透明的Pass执行完毕后被调用,以便被场景中所有物体产生影响。如果不希望对透明物体产生影响,可以在OnRenderImage
函数前添加ImageEffectOpaque
属性来实现。
在Unity中实现屏幕后处理效果,过程通常如下:首先在摄像机中添加一个用于屏幕后处理的脚本。在这个脚本中,实现OnRenderImage
函数来获取当前屏幕的渲染纹理。再调用Graphic.Blit
函数来使用特定的Unity Shader来对当前图像进行处理,再把返回的渲染纹理显示到屏幕上。
下面是基类代码:
using UnityEngine;
using System.Collections;
[ExecuteInEditMode]
[RequireComponent(typeof(Camera))]
public class PostEffectsBase : MonoBehaviour
{
// Called when need to create the material used by this effect
protected Material CheckShaderAndCreateMaterial(Shader shader, Material material)
{
if (shader == null)
{
return null;
}
if (shader.isSupported && material && material.shader == shader)
return material;
if (!shader.isSupported)
{
return null;
}
else
{
material = new Material(shader);
material.hideFlags = HideFlags.DontSave;
if (material)
return material;
else
return null;
}
}
}
每个屏幕后处理效果需要一个Shader来实现,我们判断是否指定了一个Shader,以及Shader是否可以在当前显卡运行,然后创建一个材质。
调整亮度、饱和度和对比度
下面创建一个调整亮度、饱和度和对比度的脚本,代码如下:
using UnityEngine;
using System.Collections;
public class BrightnessSaturationAndContrast : PostEffectsBase
{
public Shader briSatConShader;
private Material briSatConMaterial;
public Material material
{
get
{
briSatConMaterial = CheckShaderAndCreateMaterial(briSatConShader, briSatConMaterial);
return briSatConMaterial;
}
}
[Range(0.0f, 3.0f)]
public float brightness = 1.0f;
[Range(0.0f, 3.0f)]
public float saturation = 1.0f;
[Range(0.0f, 3.0f)]
public float contrast = 1.0f;
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (material != null)
{
material.SetFloat("_Brightness", brightness);
material.SetFloat("_Saturation", saturation);
material.SetFloat("_Contrast", contrast);
Graphics.Blit(src, dest, material);
}
else
{
Graphics.Blit(src, dest);
}
}
}
我们首先创建根据Shader创建材质,然后在OnRenderImage
方法中为Shader中的同名属性赋值,然后使用Graphic.Blit
方法,使用这个材质得到处理后的纹理并和之前的纹理之间进行位块传输。注意,Graphic.Blit
方法会把第一个参数传递给Shader中的_MainTex
属性,所以Shader中一定要声明这个属性。
Shaderd代码如下:
Shader "Unlit/BSCmaterial"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_Brightness ("Brightness", Float) = 1
_Saturation ("Saturation", Float) = 1
_Contrast ("Contrast", Float) = 1
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
ZTest Always
Cull Off
ZWrite Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 pos : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
half _Brightness;
half _Saturation;
half _Contrast;
v2f vert (appdata v)
{
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
// sample the texture
fixed4 col = tex2D(_MainTex, i.uv);
// brightness
fixed3 finalColor = col.rgb * _Brightness;
// saturation
fixed luminance = 0.2125 * col.r + 0.7154 * col.g + 0.0721 * col.b;
fixed luminanceColor = fixed3(luminance, luminance, luminance);
finalColor = lerp(luminanceColor, finalColor, _Saturation);
// contrast
fixed avgColor = fixed3(0.5, 0.5, 0.5);
finalColor = lerp(avgColor, finalColor, _Contrast);
return fixed4(finalColor, col.a);
}
ENDCG
}
}
}
我们让其一定通过深度测试,并关闭深度写入,防止其挡住后面被渲染的物体。
边缘检测
这里使用Sobel算子来进行边缘检测。
首先编写相应脚本:
using UnityEngine;
using System.Collections;
public class EdgeDetection : PostEffectsBase
{
public Shader edgeDetectShader;
private Material edgeDetectMaterial = null;
public Material material
{
get
{
edgeDetectMaterial = CheckShaderAndCreateMaterial(edgeDetectShader, edgeDetectMaterial);
return edgeDetectMaterial;
}
}
[Range(0.0f, 1.0f)]
public float edgesOnly = 0.0f;
public Color edgeColor = Color.black;
public Color backgroundColor = Color.white;
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (material != null)
{
material.SetFloat("_EdgeOnly", edgesOnly);
material.SetColor("_EdgeColor", edgeColor);
material.SetColor("_BackgroundColor", backgroundColor);
Graphics.Blit(src, dest, material);
}
else
{
Graphics.Blit(src, dest);
}
}
}
接着Shader代码如下:
Shader "Unlit/EdgeDetection"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_EdgeOnly ("Edge Only", Float) = 1.0
_EdgeColor ("Edge Color", Color) = (0,0,0,1)
_BackgroundColor("Background Color", Color) = (1,1,1,1)
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
ZTest Always
Cull Off
ZWrite Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
half2 uv[9] : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
half4 _MainTex_TexelSize;
fixed _EdgeOnly;
fixed4 _EdgeColor;
fixed4 _BackgroundColor;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
half2 uv = v.uv;
o.uv[0] = uv + _MainTex_TexelSize.xy * float2(-1, -1);
o.uv[1] = uv + _MainTex_TexelSize.xy * float2(0, -1);
o.uv[2] = uv + _MainTex_TexelSize.xy * float2(1, -1);
o.uv[3] = uv + _MainTex_TexelSize.xy * float2(-1, 0);
o.uv[4] = uv + _MainTex_TexelSize.xy * float2(0, 0);
o.uv[5] = uv + _MainTex_TexelSize.xy * float2(1, 0);
o.uv[6] = uv + _MainTex_TexelSize.xy * float2(-1, 1);
o.uv[7] = uv + _MainTex_TexelSize.xy * float2(0, 1);
o.uv[8] = uv + _MainTex_TexelSize.xy * float2(1, 1);
return o;
}
fixed luminance(fixed4 color)
{
return 0.2125 * color.r + 0.7154 * color.g + 0.0721 * color.b;
}
half Sobel(v2f i)
{
const half Gx[9] = { -1,-2,-1,
0,0,0,
1,2,1 };
const half Gy[9] = { -1,0,1,
-2,0,2,
-1,0,1 };
half texColor;
half edgeX = 0;
half edgeY = 0;
for (int it = 0; it < 9; it++)
{
texColor = luminance(tex2D(_MainTex, i.uv[it]));
edgeX += texColor * Gx[it];
edgeY += texColor * Gy[it];
}
half edge = 1 - abs(edgeX) - abs(edgeY);
return edge;
}
fixed4 frag(v2f i) : SV_Target
{
half edge = Sobel(i);
fixed4 withEdgeColor = lerp(_EdgeColor, tex2D(_MainTex, i.uv[4]), edge);
fixed4 onlyEdgeColor = lerp(_EdgeColor, _BackgroundColor, edge);
return lerp(withEdgeColor, onlyEdgeColor, _EdgeOnly);
}
ENDCG
}
}
}
高斯模糊
这里使用高斯核来实现高斯模糊。我们先横向模糊,再纵向模糊,即在Shader中实现两个Pass。
首先实现脚本:
using UnityEngine;
using System.Collections;
public class GaussianBlur : PostEffectsBase
{
public Shader gaussianBlurShader;
private Material gaussianBlurMaterial = null;
public Material material
{
get
{
gaussianBlurMaterial = CheckShaderAndCreateMaterial(gaussianBlurShader, gaussianBlurMaterial);
return gaussianBlurMaterial;
}
}
// Blur iterations - larger number means more blur.
[Range(0, 4)]
public int iterations = 3;
// Blur spread for each iteration - larger value means more blur
[Range(0.2f, 3.0f)]
public float blurSpread = 0.6f;
[Range(1, 8)]
public int downSample = 2;
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (material != null)
{
int rtW = src.width / downSample;
int rtH = src.height / downSample;
RenderTexture buffer0 = RenderTexture.GetTemporary(rtW, rtH, 0);
buffer0.filterMode = FilterMode.Bilinear;
Graphics.Blit(src, buffer0);
for (int i = 0; i < iterations; i++)
{
material.SetFloat("_BlurSize", 1.0f + i * blurSpread);
RenderTexture buffer1 = RenderTexture.GetTemporary(rtW, rtH, 0);
// Render the vertical pass
Graphics.Blit(buffer0, buffer1, material, 0);
RenderTexture.ReleaseTemporary(buffer0);
buffer0 = buffer1;
buffer1 = RenderTexture.GetTemporary(rtW, rtH, 0);
// Render the horizontal pass
Graphics.Blit(buffer0, buffer1, material, 1);
RenderTexture.ReleaseTemporary(buffer0);
buffer0 = buffer1;
}
Graphics.Blit(buffer0, dest);
RenderTexture.ReleaseTemporary(buffer0);
}
else
{
Graphics.Blit(src, dest);
}
}
}
我们使用迭代来决定模糊次数。
Shader代码如下:
Shader "Unlit/GaussianBlur"
{
Properties{
_MainTex("Base (RGB)", 2D) = "white" {}
_BlurSize("Blur Size", Float) = 1.0
}
SubShader{
CGINCLUDE
#include "UnityCG.cginc"
sampler2D _MainTex;
half4 _MainTex_TexelSize;
float _BlurSize;
struct v2f {
float4 pos : SV_POSITION;
half2 uv[5]: TEXCOORD0;
};
v2f vertBlurVertical(appdata_img v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
half2 uv = v.texcoord;
o.uv[0] = uv;
o.uv[1] = uv + float2(0.0, _MainTex_TexelSize.y * 1.0) * _BlurSize;
o.uv[2] = uv - float2(0.0, _MainTex_TexelSize.y * 1.0) * _BlurSize;
o.uv[3] = uv + float2(0.0, _MainTex_TexelSize.y * 2.0) * _BlurSize;
o.uv[4] = uv - float2(0.0, _MainTex_TexelSize.y * 2.0) * _BlurSize;
return o;
}
v2f vertBlurHorizontal(appdata_img v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
half2 uv = v.texcoord;
o.uv[0] = uv;
o.uv[1] = uv + float2(_MainTex_TexelSize.x * 1.0, 0.0) * _BlurSize;
o.uv[2] = uv - float2(_MainTex_TexelSize.x * 1.0, 0.0) * _BlurSize;
o.uv[3] = uv + float2(_MainTex_TexelSize.x * 2.0, 0.0) * _BlurSize;
o.uv[4] = uv - float2(_MainTex_TexelSize.x * 2.0, 0.0) * _BlurSize;
return o;
}
fixed4 fragBlur(v2f i) : SV_Target {
float weight[3] = {0.4026, 0.2442, 0.0545};
fixed3 sum = tex2D(_MainTex, i.uv[0]).rgb * weight[0];
for (int it = 1; it < 3; it++) {
sum += tex2D(_MainTex, i.uv[it * 2 - 1]).rgb * weight[it];
sum += tex2D(_MainTex, i.uv[it * 2]).rgb * weight[it];
}
return fixed4(sum, 1.0);
}
ENDCG
ZTest Always Cull Off ZWrite Off
Pass {
NAME "GAUSSIAN_BLUR_VERTICAL"
CGPROGRAM
#pragma vertex vertBlurVertical
#pragma fragment fragBlur
ENDCG
}
Pass {
NAME "GAUSSIAN_BLUR_HORIZONTAL"
CGPROGRAM
#pragma vertex vertBlurHorizontal
#pragma fragment fragBlur
ENDCG
}
}
}
Bloom
Bloom实现原理是:首先根据一个阈值提出图像中的较亮区域,把它们存储在一张渲染纹理中,再利用高斯模糊对这张渲染纹理进行模糊处理,模拟光线扩散的效果,最后再将其和原图像进行混合,得到最终效果。
首先实现脚本:
using UnityEngine;
using System.Collections;
public class Bloom : PostEffectsBase
{
public Shader bloomShader;
private Material bloomMaterial = null;
public Material material
{
get
{
bloomMaterial = CheckShaderAndCreateMaterial(bloomShader, bloomMaterial);
return bloomMaterial;
}
}
// Blur iterations - larger number means more blur.
[Range(0, 4)]
public int iterations = 3;
// Blur spread for each iteration - larger value means more blur
[Range(0.2f, 3.0f)]
public float blurSpread = 0.6f;
[Range(1, 8)]
public int downSample = 2;
[Range(0.0f, 4.0f)]
public float luminanceThreshold = 0.6f;
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (material != null)
{
material.SetFloat("_LuminanceThreshold", luminanceThreshold);
int rtW = src.width / downSample;
int rtH = src.height / downSample;
RenderTexture buffer0 = RenderTexture.GetTemporary(rtW, rtH, 0);
buffer0.filterMode = FilterMode.Bilinear;
Graphics.Blit(src, buffer0, material, 0);
for (int i = 0; i < iterations; i++)
{
material.SetFloat("_BlurSize", 1.0f + i * blurSpread);
RenderTexture buffer1 = RenderTexture.GetTemporary(rtW, rtH, 0);
// Render the vertical pass
Graphics.Blit(buffer0, buffer1, material, 1);
RenderTexture.ReleaseTemporary(buffer0);
buffer0 = buffer1;
buffer1 = RenderTexture.GetTemporary(rtW, rtH, 0);
// Render the horizontal pass
Graphics.Blit(buffer0, buffer1, material, 2);
RenderTexture.ReleaseTemporary(buffer0);
buffer0 = buffer1;
}
material.SetTexture("_Bloom", buffer0);
Graphics.Blit(src, dest, material, 3);
RenderTexture.ReleaseTemporary(buffer0);
}
else
{
Graphics.Blit(src, dest);
}
}
}
Shader代码如下:
Shader "Unlit/Bloom"
{
Properties{
_MainTex("Base (RGB)", 2D) = "white" {}
_Bloom("Bloom (RGB)", 2D) = "black" {}
_LuminanceThreshold("Luminance Threshold", Float) = 0.5
_BlurSize("Blur Size", Float) = 1.0
}
SubShader{
CGINCLUDE
#include "UnityCG.cginc"
sampler2D _MainTex;
half4 _MainTex_TexelSize;
sampler2D _Bloom;
float _LuminanceThreshold;
float _BlurSize;
struct v2f {
float4 pos : SV_POSITION;
half2 uv : TEXCOORD0;
};
v2f vertExtractBright(appdata_img v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = v.texcoord;
return o;
}
fixed luminance(fixed4 color) {
return 0.2125 * color.r + 0.7154 * color.g + 0.0721 * color.b;
}
fixed4 fragExtractBright(v2f i) : SV_Target {
fixed4 c = tex2D(_MainTex, i.uv);
fixed val = clamp(luminance(c) - _LuminanceThreshold, 0.0, 1.0);
return c * val;
}
struct v2fBloom {
float4 pos : SV_POSITION;
half4 uv : TEXCOORD0;
};
v2fBloom vertBloom(appdata_img v) {
v2fBloom o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv.xy = v.texcoord;
o.uv.zw = v.texcoord;
#if UNITY_UV_STARTS_AT_TOP
if (_MainTex_TexelSize.y < 0.0)
o.uv.w = 1.0 - o.uv.w;
#endif
return o;
}
fixed4 fragBloom(v2fBloom i) : SV_Target {
return tex2D(_MainTex, i.uv.xy) + tex2D(_Bloom, i.uv.zw);
}
ENDCG
ZTest Always Cull Off ZWrite Off
Pass {
CGPROGRAM
#pragma vertex vertExtractBright
#pragma fragment fragExtractBright
ENDCG
}
UsePass "Unlit/GaussianBlur/GAUSSIAN_BLUR_VERTICAL"
UsePass "Unlit/GaussianBlur/GAUSSIAN_BLUR_HORIZONTAL"
Pass {
CGPROGRAM
#pragma vertex vertBloom
#pragma fragment fragBloom
ENDCG
}
}
FallBack Off
}
运动模糊
运动模糊的实现有多种方式,一种方式是利用一块累计缓存类混合多张连续的图像。当物体快速移动产生多张图像后,我们取平均值作为最后的运动模糊图像,但这种方法消耗极大。另一种广泛运用的方法是创建和使用速度缓存,这个缓存中存储了各个像素当前的运动速度,然后利用该值来决定模糊的方向和大小。
这里我们介绍类似第一种方式的模拟方法,我们多次渲染场景,保存之前的渲染结果,不断图像把当前的渲染叠加到之前的渲染图像中,从而产生一种运动轨迹的视觉效果。
先实现脚本:
using UnityEngine;
using System.Collections;
public class MotionBlur : PostEffectsBase
{
public Shader motionBlurShader;
private Material motionBlurMaterial = null;
public Material material
{
get
{
motionBlurMaterial = CheckShaderAndCreateMaterial(motionBlurShader, motionBlurMaterial);
return motionBlurMaterial;
}
}
[Range(0.0f, 0.9f)]
public float blurAmount = 0.5f;
private RenderTexture accumulationTexture;
void OnDisable()
{
DestroyImmediate(accumulationTexture);
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (material != null)
{
// Create the accumulation texture
if (accumulationTexture == null || accumulationTexture.width != src.width || accumulationTexture.height != src.height)
{
DestroyImmediate(accumulationTexture);
accumulationTexture = new RenderTexture(src.width, src.height, 0);
accumulationTexture.hideFlags = HideFlags.HideAndDontSave;
Graphics.Blit(src, accumulationTexture);
}
// We are accumulating motion over frames without clear/discard
// by design, so silence any performance warnings from Unity
accumulationTexture.MarkRestoreExpected();
material.SetFloat("_BlurAmount", 1.0f - blurAmount);
Graphics.Blit(src, accumulationTexture, material);
Graphics.Blit(accumulationTexture, dest);
}
else
{
Graphics.Blit(src, dest);
}
}
}
Shader代码如下:
Shader "Unlit/MotionBlur"
{
Properties{
_MainTex("Base (RGB)", 2D) = "white" {}
_BlurAmount("Blur Amount", Float) = 1.0
}
SubShader{
CGINCLUDE
#include "UnityCG.cginc"
sampler2D _MainTex;
fixed _BlurAmount;
struct v2f {
float4 pos : SV_POSITION;
half2 uv : TEXCOORD0;
};
v2f vert(appdata_img v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = v.texcoord;
return o;
}
fixed4 fragRGB(v2f i) : SV_Target {
return fixed4(tex2D(_MainTex, i.uv).rgb, _BlurAmount);
}
half4 fragA(v2f i) : SV_Target {
return tex2D(_MainTex, i.uv);
}
ENDCG
ZTest Always Cull Off ZWrite Off
Pass {
Blend SrcAlpha OneMinusSrcAlpha
ColorMask RGB
CGPROGRAM
#pragma vertex vert
#pragma fragment fragRGB
ENDCG
}
Pass {
Blend One Zero
ColorMask A
CGPROGRAM
#pragma vertex vert
#pragma fragment fragA
ENDCG
}
}
}