又是一個post-process后期效果,god ray 上帝之光,說起上帝之光就是咱們再看太陽時太陽周圍一圈的針狀光芒
先放組效果,本文的場景資源均來自淺墨大神,效果為本文shader效果


加入了前篇HDR和Bloom,效果大增:鏈接



本文的代碼是來自unity聖典中某大神的分享,博主做了小小的改進 鏈接
然后就來做下講解,共有兩個shader,一個負責制造ray,一個負責和原屏幕圖像混合,於原屏幕圖像混合很簡單,就是單純的把兩個圖像的顏色疊加,控制一下ray的權重,
接下來我們着重講解一下,制造ray的shader
是一個fragement shader
共有4個外部變量
_ScreenLightPos屏幕上光線的位置,這個需要在c#腳本中計算並傳出,稍后會講解
_Density密度
_Decay衰減
_Exposure曝光,用來控制亮度,大家都知道,在相機中,曝光時間越長圖像越亮
先看vertex shader
v2f vert(v2in v)
{
v2f o;
o.pos = mul(UNITY_MATRIX_MVP, v.vertex);
half2 texCoord = v.texcoord;
half2 deltaTexCoord = texCoord - _ScreenLightPos.xy;
deltaTexCoord *= 1.0f / 8 * _Density;
texCoord -= deltaTexCoord;
o.uv0 = texCoord;
texCoord -= deltaTexCoord;
o.uv1 = texCoord;
texCoord -= deltaTexCoord;
o.uv2 = texCoord;
texCoord -= deltaTexCoord;
o.uv3 = texCoord;
texCoord -= deltaTexCoord;
o.uv4 = texCoord;
texCoord -= deltaTexCoord;
o.uv5 = texCoord;
texCoord -= deltaTexCoord;
o.uv6 = texCoord;
texCoord -= deltaTexCoord;
o.uv7 = texCoord;
return o;
}
v.texcoord為當前點的坐標
deltaTexCoord為當前點對光源點的反向向量,長度為兩點間距離
密度越大deltaTexCoord越大,不超過8,deltaTexCoord始終是個分數
第一個采樣點為此處本來位置
采樣點漸漸接進光源處
_Density越大采樣點間距越大
從0到7,點的位置從光源處越來越近,離此處點越來越遠
看看我們的v2f結構體,存了多少坐標點
struct v2f {
float4 pos : POSITION;
float2 uv0 : TEXCOORD0;
float2 uv1 : TEXCOORD1;
float2 uv2 : TEXCOORD2;
float2 uv3 : TEXCOORD3;
float2 uv4 : TEXCOORD4;
float2 uv5 : TEXCOORD5;
float2 uv6 : TEXCOORD6;
float2 uv7 : TEXCOORD7;
};
傳入值的結構體v2in
struct v2in {
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
};
我們就得到了當前點到光源點的一條直線中的八個點的坐標,為fragement shader取色混色用
當然本步驟也可在fragement shader中完成,但效率沒有vertex shader好,因為不用每個像素都取樣,只是每個頂點取樣就好
再看fragement shader
half4 frag(v2f i) : COLOR
{
half illuminationDecay = 1.0f;
half4 color = tex2D(_MainTex, i.uv0)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv1)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv2)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv3)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv4)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv5)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv6)*illuminationDecay;
illuminationDecay *= _Decay;
color += tex2D(_MainTex, i.uv7)*illuminationDecay;
color /= 8;
return half4(color.xyz * _Exposure, 1);
}
illuminationDecay光照衰減,_Decay是我們外部可控衰減
_Exposure增加亮度
調整比重離此處像素點越遠也就是離光源越近越衰減,可能有人會問,為什么會這樣?因為我們還是要保留大部分為此處點的顏色,如果其他像素權重過大,則會造成此處點顏色不准確,甚至不好的模糊效果。
然后就是混色,基本上的原理就是從光源處打出無數條射線,嗯,可以這么理解。
Ray我們就制造好了,接下來我們需要把光線ray與原屏幕圖像混合,這一步就比較簡單了,只給出源代碼,各位自己意會。
Shader "Custom/god ray 2 blend" {
Properties{
_MainTex("Base (RGB)", 2D) = "" {}
_GodRayTex ("God (RGB)", 2D) = ""{}
_Alpha("_Alpha", Float) = 0.5
}
// Shader code pasted into all further CGPROGRAM blocks
CGINCLUDE
#include "UnityCG.cginc"
struct v2in {
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
};
struct v2f {
float4 pos : POSITION;
float2 uv : TEXCOORD0;
};
sampler2D _MainTex;
sampler2D _GodRayTex;
uniform float _Alpha;
v2f vert(v2in v)
{
v2f o;
o.pos = mul(UNITY_MATRIX_MVP, v.vertex);
o.uv = v.texcoord;
return o;
}
half4 frag(v2f i) : COLOR
{
half4 color = tex2D(_MainTex, i.uv) + tex2D(_GodRayTex, i.uv)*_Alpha;
//half4 color = tex2D(_MainTex, i.uv);
return color;
}
ENDCG
Subshader{
Tags{ "Queue" = "Transparent" }
Pass{
ZWrite Off
BindChannels
{
Bind "Vertex", vertex
Bind "texcoord", texcoord0
Bind "texcoord1", texcoord1
}
Fog{ Mode off }
CGPROGRAM
#pragma fragmentoption ARB_precision_hint_fastest
#pragma vertex vert
#pragma fragment frag
ENDCG
}
}
Fallback off
} // shader
然后就是最后一步,也是十分重要的一步就是通過腳本把它弄到屏幕上,
此處的要點就是要求出光源在屏幕中的位置,
Camera類中有這么一個函數可以把世界坐標轉換為屏幕坐標
Camera.WorldToScreenPoint(position)
官網介紹如下
Transforms position from world space into screen space.
把position從世界坐標轉換為屏幕坐標
Screenspace is defined in pixels. The bottom-left of the screen is (0,0); the right-top is (pixelWidth,pixelHeight). The z position is in world units from the camera.
左下角是屏幕坐標系的原點,右上角是屏幕的最大范圍,超出這個范圍的光源我們都不進行god ray渲染了,以此作為判斷,否則就會進行錯誤渲染,屏幕超出光照范圍了仍在閃爍。
我們把光源的transport傳入腳本,然后檢驗光源的position
另 外還有重要一點就是判斷光源在相機前面還是在后面,如果只判斷是否在屏幕內的話,相機轉到光源后面也會被渲染god ray,解決方法在此,WorldToScreenPoint返回的z值為世界空間內光源與相機的距離,為矢量,所以我們就能用z值正負來判斷前后了,為 正則光源在相機前可渲染god ray,為負則光源在相機后不可渲染god ray
if (lightScreenPos.z > 0 && lightScreenPos.x > 0 && lightScreenPos.x < camera.pixelWidth && lightScreenPos.y >0 && lightScreenPos.y < camera.pixelHeight)
其實就這么渲染也可以,但是效果並不好,god ray變成了“god point”,原因剛才分析的,shader的原理是取點到光源的八個點,那渲染的結果也就是出現了好多點,層次很分明,就是因為之混亂和了那8次,解決方式就是多次渲染,點多了,就變成線了
我們要想使效果更好一點就要多次渲染
建立兩個renderTexure tempRtA和tempRtB用來互相傳值
Graphics.Blit(sourceTexture, tempRtA, material);
第一次過濾結果存在tempRtA
傳到下一次渲染做_MainTex
Graphics.Blit(tempRtA, tempRtB, material);
再傳出tempRtB到第三次渲染,再傳出tempRtA。。。
Graphics.Blit(tempRtB, tempRtA, material);
Graphics.Blit(tempRtA, tempRtB, material);
Graphics.Blit(tempRtB, tempRtA, material);
最后做混合,把ray texture傳到blend shader作為GodRayTex。然后得到最終結果
materialBlend.SetTexture("_GodRayTex", tempRtA);
Graphics.Blit(sourceTexture, destTexture, materialBlend, 0);
代碼如下:
using UnityEngine;
using System.Collections;
[ExecuteInEditMode]
public class godRay2 : MonoBehaviour
{
public Transform lightpos;
public Shader curShader;
public Shader curShaderblend;
private Material curMaterial;
private Material curMateriaBlend;
public Vector4 ScreenLightPos = new Vector4(0, 0, 0, 0);
public float Density = 0.01f;
public float Decay = 0.5f;
public float Exposure = 0.5f;
public float Alpha = 1;
public RenderTexture tempRtA = null;
public RenderTexture tempRtB = null;
private Vector3 lightScreenPos;
#region Properties
Material material
{
get
{
if (curMaterial == null)
{
curMaterial = new Material(curShader);
curMaterial.hideFlags = HideFlags.HideAndDontSave;
}
return curMaterial;
}
}
Material materialBlend
{
get
{
if (curMateriaBlend == null)
{
curMateriaBlend = new Material(curShaderblend);
curMateriaBlend.hideFlags = HideFlags.HideAndDontSave;
}
return curMateriaBlend;
}
}
#endregion
void Start()
{
if (!SystemInfo.supportsImageEffects)
{
enabled = false;
return;
}
if (!curShader && !curShader.isSupported)
{
enabled = false;
}
}
void OnRenderImage(RenderTexture sourceTexture, RenderTexture destTexture)
{
if (curShader != null)
{
lightScreenPos = Camera.main.WorldToScreenPoint(lightpos.position);
if (lightScreenPos.z > 0 && lightScreenPos.x > 0 && lightScreenPos.x < camera.pixelWidth && lightScreenPos.y > 0 && lightScreenPos.y < camera.pixelHeight)
{
material.SetVector("ScreenLightPos", new Vector4(lightScreenPos.x / camera.pixelWidth, lightScreenPos.y / camera.pixelHeight, 0, 0));
// material.SetVector("ScreenLightPos", ScreenLightPos);
material.SetFloat("Density", Density);
material.SetFloat("Decay", Decay);
material.SetFloat("Exposure", Exposure);
materialBlend.SetFloat("Alpha", Alpha);
CreateBuffers();
Graphics.Blit(sourceTexture, tempRtA, material);
Graphics.Blit(tempRtA, tempRtB, material);
Graphics.Blit(tempRtB, tempRtA, material);
Graphics.Blit(tempRtA, tempRtB, material);
Graphics.Blit(tempRtB, tempRtA, material);
materialBlend.SetTexture("_GodRayTex", tempRtA);
Graphics.Blit(sourceTexture, destTexture, materialBlend, 0);
// Graphics.Blit(tempRtA, destTexture, material, 0);
}
else
{
Graphics.Blit(sourceTexture, destTexture);
}
}
else
{
Graphics.Blit(sourceTexture, destTexture);
}
}
void CreateBuffers()
{
if (!tempRtA)
{
tempRtA = new RenderTexture(Screen.width / 4, Screen.height / 4, 0);
tempRtA.hideFlags = HideFlags.DontSave;
}
if (!tempRtB)
{
tempRtB = new RenderTexture(Screen.width / 4, Screen.height / 4, 0);
tempRtB.hideFlags = HideFlags.DontSave;
}
}
void OnDisable()
{
if (curMaterial)
{
DestroyImmediate(curMaterial);
}
}
}
本shader有幾個缺點,在比較暗的場景不要使用,因為光源處不亮,所以效果不好,Ray的質量不高,從例子就可以看出來,Ray很不清晰,此處可以和Unity ImageEffect的Sun shafts作比較
最后放上兩組效果


林中閃耀的光芒




------ by wolf96
