主要参考来自 https://github.com/SebLague/Ray-Marching
优化渲染物体
之前的 raymarching 由于使用的是固定的步长,然而物体的边界是连续的,最后渲染的结果会模糊
可以优化下,在步进(marching)的时候,如果计算出当前距离内没有物体,那么直接将射线的原点前进当前的步长,跳过很多无效的计算,可以快速遍历场景
还原物体颜色
现在的最终渲染的场景还是黑白,可以将物体的颜色传入 shader,就得到了还原物体颜色的场景
最后按照链接中的方法加上光照
其中通过 estimateNormal 方法估算物体表面的法线,原理是获得光线和物体表面的交点,然后沿着轴的相反方向各延长一小段距离,重新计算场景深度之后相减,相当于计算轴方向的梯度,近似的计算出法线方向
使用 lambert 光照模型计算出的光照场景
添加光照之后的全部代码
using UnityEngine;
[ExecuteInEditMode]
public class SDF : MonoBehaviour
{
Camera currentCamera;
[SerializeField]
Material effectMaterial;
public GameObject cube;
public GameObject sphere;
public GameObject capsule;
public int stepCount;
public float stepSize;
private void Awake()
{
currentCamera = GetComponent<Camera>();
currentCamera.depthTextureMode = DepthTextureMode.Depth;
}
private void OnValidate()
{
Shader.SetGlobalFloat("_StepCount", stepCount);
Shader.SetGlobalFloat("_StepSize", stepSize);
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
if (effectMaterial == null)
{
Graphics.Blit(src, dest);
}
else
{
Matrix4x4 viewMat = currentCamera.worldToCameraMatrix;
Matrix4x4 projMat = GL.GetGPUProjectionMatrix(currentCamera.projectionMatrix, false);
Matrix4x4 viewProjMat = (projMat * viewMat);
Shader.SetGlobalMatrix("_InverseVPMatrix", viewProjMat.inverse);
Shader.SetGlobalVector("_CubePosInfo", cube.transform.position);
Shader.SetGlobalVector("_CubeScaleInfo", cube.transform.localScale);
Shader.SetGlobalColor("_CubeColorInfo", cube.GetComponent<MeshRenderer>().sharedMaterial.color);
Vector3 t = sphere.transform.position;
Vector4 sphereInfo = new Vector4(t.x, t.y, t.z, sphere.transform.localScale.x);
Shader.SetGlobalVector("_SphereInfo", sphereInfo);
Shader.SetGlobalColor("_SphereColorInfo", sphere.GetComponent<MeshRenderer>().sharedMaterial.color);
Vector3 tc = capsule.transform.position;
Vector4 capsuleInfo = new Vector4(tc.x, tc.y, tc.z, capsule.transform.localScale.x);
Shader.SetGlobalVector("_CapsuleInfo", capsuleInfo);
Shader.SetGlobalColor("_CapsuleColorInfo", capsule.GetComponent<MeshRenderer>().sharedMaterial.color);
Graphics.Blit(src, dest, effectMaterial);
}
}
}
Shader "Unlit/SDF"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
sampler2D _CameraDepthTexture;
float4x4 _InverseVPMatrix;
float _StepCount;
float _StepSize;
float4 _SphereInfo;
float4 _SphereColorInfo;
float4 _CubePosInfo;
float4 _CubeScaleInfo;
float4 _CubeColorInfo;
float4 _CapsuleInfo;
float4 _CapsuleColorInfo;
#define EPSILON 0.001
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
return o;
}
// sphere
float sdSphere(float3 eye, float3 center, float radius)
{
return distance(eye, center)-radius;
}
// cube
float sdCube(float3 eye, float3 center, float3 size)
{
float3 o = abs(eye-center)-size;
float ud = length(max(o,0));
float n = max(max(min(o.x,0),min(o.y,0)), min(o.z,0));
return ud+n;
}
// capsule
float sdCapsule(float3 eye, float3 center, float3 a, float3 b, float radius)
{
float3 pa = (eye-center)-a;
float3 ba = b-a;
float h = clamp(dot(pa,ba)/dot(ba,ba), 0.0, 1.0 );
return length(pa-ba*h)-radius;
}
// cylinder
float sdCylinder(float3 eye, float3 center, float h, float r )
{
float3 p = eye - center;
float2 d = abs(float2(length(p.xz),p.y)) - float2(h,r);
return min(max(d.x,d.y),0.0) + length(max(d,0.0));
}
float4 sdfScene(float3 eye)
{
float ds = sdSphere(eye, _SphereInfo.xyz, _SphereInfo.w * 0.5);
float dc = sdCube(eye, _CubePosInfo.xyz, _CubeScaleInfo.xyz * 0.5);
float3 cc = _CapsuleInfo.xyz;
float dcapsule = sdCapsule(eye, cc, cc + float3(0,0.5,0), cc + float3(0,-0.5,0), _CapsuleInfo.w * 0.5);
float3 color = _SphereColorInfo.xyz;
float d = ds;
if (d > dc) {
color = _CubeColorInfo.xyz;
d = dc;
}
if (d > dcapsule) {
color = _CapsuleColorInfo.xyz;
d = dcapsule;
}
return float4(color, d);
}
float3 estimateNormal(float3 p) {
float e = EPSILON;
float x = sdfScene(float3(p.x+e,p.y,p.z)).w - sdfScene(float3(p.x-e,p.y,p.z)).w;
float y = sdfScene(float3(p.x,p.y+e,p.z)).w - sdfScene(float3(p.x,p.y-e,p.z)).w;
float z = sdfScene(float3(p.x,p.y,p.z+e)).w - sdfScene(float3(p.x,p.y,p.z-e)).w;
return normalize(float3(x,y,z));
}
float3 raymarching(float3 position, float3 direction)
{
float3 startPoint = position;
float3 lightDir = _WorldSpaceLightPos0.xyz;
for (int i = 0; i < _StepCount; i++)
{
float4 sdfInfo = sdfScene(startPoint);
float dst = sdfInfo.w;
if (dst <= EPSILON)
{
float3 pointOnSurface = startPoint + direction * dst;
float3 normal = estimateNormal(pointOnSurface - direction * EPSILON);
float lighting = saturate(dot(normal, lightDir));
return lighting * sdfInfo.xyz;
}
else
{
startPoint += direction * dst;
}
}
return float3(0,0,0);
}
float4 frag (v2f i) : SV_Target
{
float depthTextureValue = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, i.uv);
float4 ndc = float4(i.uv.x * 2 - 1, i.uv.y * 2 - 1, depthTextureValue, 1);
float4 worldPos = mul(_InverseVPMatrix, ndc);
worldPos /= worldPos.w;
// return depthTextureValue;
float3 rayStart = _WorldSpaceCameraPos;
float3 direction = normalize(worldPos.xyz - rayStart);
float3 lighting = raymarching(rayStart, direction);
return float4(lighting, 1);
}
ENDCG
}
}
}