为什么要用Ray Marching

要不还是别用Ray Marching了(除非是SDF Ray Marching),采样次数又多又不好debug,不过写起来比较快(如果要写二分法的话就又复杂了)。如前文所说,使用Ray Marching的体积雾只能在后处理阶段使用了,在处理不写深度的透明物体的时候,会有一些瑕疵。

体积雾相关的就参考前文就好了,这里只是作为一个方法的补充。

相关代码和说明

为了和使用3D纹理的体积雾作区分,这边所有代码的名字前加上了RM(Ray Marching)。

RMVolumetricFog.cs

这个脚本和3D纹理的体积雾的参数几乎完全一致,只是多了用于控制Ray Marching次数的step。

using System;

namespace UnityEngine.Rendering.Universal
{
    [Serializable, VolumeComponentMenu("Post-processing/RM Volumetric Fog")]
    public class RMVolumetricFog : VolumeComponent, IPostProcessComponent
    {
        [Tooltip("是否启用体积雾")]
        public BoolParameter enabled = new BoolParameter(false);
        [Tooltip("整体控制体积雾强度")]
        public ClampedFloatParameter intensity = new ClampedFloatParameter(1.0f, 0f, 1.0f);
        [Tooltip("体积雾最大的透明程度(用于和天空混合)")]
        public ClampedFloatParameter maxTransmittance = new ClampedFloatParameter(1.0f, 0f, 1.0f);

        [Tooltip("体积雾的颜色倾向,目前强度为0.03")]
        public ColorParameter fogTint = new ColorParameter(Color.white);
        [Tooltip("体积雾距离相机最近的距离")]
        public ClampedFloatParameter fogNear = new ClampedFloatParameter(0.1f, 0.01f, 10f);
        [Tooltip("体积雾距离相机最远的距离")]
        public ClampedFloatParameter fogFar = new ClampedFloatParameter(100f, 1.0f, 1000.0f);

        [Tooltip("体积雾的密度,越密效果越明显")]
        public ClampedFloatParameter density = new ClampedFloatParameter(3.0f, 0f, 10.0f);
        [Tooltip("体积雾受光的各向异性程度")]
        public ClampedFloatParameter phase = new ClampedFloatParameter(0.0f, -0.9f, 0.9f);
        [Tooltip("Ray Marching的次数")]
        public ClampedFloatParameter step = new ClampedFloatParameter(20.0f, 10.0f, 200.0f);

        public bool IsActive() => (enabled.value && (density.value > 0.0f) && (intensity.value > 0.0f));

        public bool IsTileCompatible() => false;
    }
}

RMVolumetricRendererFeature.cs

和3D纹理的体积雾除了命名之外完全一致。

namespace UnityEngine.Rendering.Universal
{
    public class RMVolumetricFogRendererFeature : ScriptableRendererFeature
    {
        [System.Serializable]
        public class RMVolumetricFogSetting
        {
            public RenderPassEvent renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing;
            public ComputeShader volumetricFogComputeShader;
        }

        public RMVolumetricFogRenderPass volumetricFogRenderPass;
        public RMVolumetricFogSetting settings = new RMVolumetricFogSetting();

        public override void Create()
        {
            volumetricFogRenderPass = new RMVolumetricFogRenderPass(settings);
        }

        public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
        {
            RMVolumetricFog volumetricFog = VolumeManager.instance.stack.GetComponent<RMVolumetricFog>();
            if(volumetricFog && volumetricFog.IsActive())
            {
                if(renderingData.cameraData.cameraType == CameraType.Game)
                {
                    volumetricFogRenderPass.Setup(volumetricFog);
                    renderer.EnqueuePass(volumetricFogRenderPass);
                }
            }
        }
    }
}

RMVolumetricFogRenderPass.cs

相较于3D纹理的体积雾来说,Ray Marching的体积雾只需要计算体积雾,将体积雾应用到最后相机的Render Texture就好了。不需要获取很多临时的Render Texture,但是没办法在其中进行时空的混合了,只能寄希望于后续的TAA,或者是直接增加Ray Marching的次数了。

namespace UnityEngine.Rendering.Universal
{
    public class RMVolumetricFogRenderPass : ScriptableRenderPass
    {
        private const string profilerTag = "Ray Marched Volumetric Fog Rendering Pass";
        private RMVolumetricFogRendererFeature.RMVolumetricFogSetting settings;
        private ProfilingSampler profilingSampler;

        RenderTargetIdentifier cameraColorIden;
        RenderTargetHandle cameraColorHandle;
        RenderTargetIdentifier cameraDepthIden;
        RenderTargetHandle cameraDepthHandle;

        static string volumetricFogName = "_VolumetrixFogBuffer";
        static int volumetricFogID = Shader.PropertyToID(volumetricFogName);
        RenderTargetIdentifier volumetricFogIden;
        RenderTargetHandle volumetricFogHandle;

        private RMVolumetricFog volumetricFog;
        private ComputeShader volumetricFogComputeShader;
        private Vector2 textureSize;

        public RMVolumetricFogRenderPass(RMVolumetricFogRendererFeature.RMVolumetricFogSetting settings)
        {
            this.settings = settings;

            profilingSampler = new ProfilingSampler(profilerTag);
            renderPassEvent = settings.renderPassEvent;
            volumetricFogComputeShader = settings.volumetricFogComputeShader;

            cameraColorHandle.Init("_CameraColorTexture");
            cameraColorIden = cameraColorHandle.Identifier();
            cameraDepthHandle.Init("_CameraDepthAttachment");
            cameraDepthIden = cameraDepthHandle.Identifier();

            volumetricFogHandle.Init(volumetricFogName);
            volumetricFogIden = volumetricFogHandle.Identifier();
        }

        public void Setup(RMVolumetricFog volumetricFog)
        {
            this.volumetricFog = volumetricFog;
        }

        public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
        {
            RenderTextureDescriptor desc = cameraTextureDescriptor;
            desc.enableRandomWrite = true;
            desc.graphicsFormat = Experimental.Rendering.GraphicsFormat.R16G16B16A16_SFloat;
            textureSize = new Vector2(desc.width, desc.height);

            cmd.GetTemporaryRT(volumetricFogID, desc);
        }

        private void DoVolumetricFog(CommandBuffer cmd, Camera camera, RenderTargetIdentifier colorid, RenderTargetIdentifier depthid, RenderTargetIdentifier volid, ComputeShader computeShader)
        {
            int volumetricFogKernel = computeShader.FindKernel("VolumetricFogMain");
            computeShader.GetKernelThreadGroupSizes(volumetricFogKernel, out uint x, out uint y, out uint z);

            cmd.SetComputeTextureParam(computeShader, volumetricFogKernel, "_ColorTexture", colorid);
            cmd.SetComputeTextureParam(computeShader, volumetricFogKernel, "_DepthTexture", depthid);
            cmd.SetComputeTextureParam(computeShader, volumetricFogKernel, "_RW_VolTexture", volid);
            cmd.SetComputeFloatParam(computeShader, "_StepCount", volumetricFog.step.value);
            cmd.SetComputeVectorParam(computeShader, "_TextureSize", new Vector4(textureSize.x, textureSize.y, 1.0f / textureSize.x, 1.0f / textureSize.y));
            Color fogTint = volumetricFog.fogTint.value;
            fogTint.a = 0.03f;
            volumetricFog.fogTint.Override(fogTint);
            cmd.SetComputeVectorParam(computeShader, "_FogTint", volumetricFog.fogTint.value);
            cmd.SetComputeVectorParam(computeShader, "_NearFar", 
                new Vector4(camera.nearClipPlane,
                            camera.farClipPlane,
                            volumetricFog.fogNear.value,
                            volumetricFog.fogFar.value));
            cmd.SetComputeVectorParam(computeShader, "_VolumetricFogParams",
                new Vector4(volumetricFog.phase.value,
                            volumetricFog.density.value,
                            volumetricFog.intensity.value,
                            volumetricFog.maxTransmittance.value));
            cmd.DispatchCompute(computeShader, volumetricFogKernel,
                Mathf.CeilToInt(textureSize.x / x),
                Mathf.CeilToInt(textureSize.y / y),
                1);
        }

        public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
        {
            CommandBuffer cmd = CommandBufferPool.Get(profilerTag);
            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();

            using (new ProfilingScope(cmd, profilingSampler))
            {
                DoVolumetricFog(cmd, renderingData.cameraData.camera, cameraColorIden, cameraDepthIden, volumetricFogIden, volumetricFogComputeShader);
                cmd.Blit(volumetricFogIden, cameraColorIden);
            }

            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();
            CommandBufferPool.Release(cmd);
        }

        public override void FrameCleanup(CommandBuffer cmd)
        {
            cmd.ReleaseTemporaryRT(volumetricFogID);
        }
    }
}

RMVolumetricFogComputeShader.compute

只需要一个kernel就能完成体积雾的计算了,和之前使用3D纹理的体积雾进行比较,可以很明显的看到Ray Marching在一个kernel中完成了3D纹理三个kernel的工作。具体的流程甚至函数都是完全相同的。

#pragma kernel VolumetricFogMain

#define _MAIN_LIGHT_SHADOWS
#define _MAIN_LIGHT_SHADOWS_CASCADE
#define _SHADOWS_SOFT

#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"

Texture2D<float4> _ColorTexture;
Texture2D<float> _DepthTexture;
RWTexture2D<float4> _RW_VolTexture;

float4 _TAAOffsets;

float4 _TextureSize;
float _StepCount;
float4 _NearFar;
// x: cam near, y: cam far, z: fog near, w: fog far
float4 _FogTint;
float4 _VolumetricFogParams;

#define _Phase _VolumetricFogParams.x
#define _Density _VolumetricFogParams.y
#define _Intensity _VolumetricFogParams.z
#define _MaxTransmittance _VolumetricFogParams.w

float3 NDCToWorld(float3 ndc)
{
    ndc.xy = 2.0f * ndc.xy - 1.0f;
    ndc.y = -ndc.y;
    float4x4 invJitteredVP = UNITY_MATRIX_I_VP;//mul(UNITY_MATRIX_I_V, _InvJitteredProj);
    float4 positionWS = mul(invJitteredVP, float4(ndc, 1.0f));
    return positionWS.xyz / positionWS.w;
}

float Linear01DepthToRawDepth(float z, float4 zBufferParams)
{
    return (rcp(z) - zBufferParams.y) / zBufferParams.x;
}

float LinearEyeToRawDepth(float depth, float4 zBufferParams)
{
    return (1.0f / depth - zBufferParams.w) / zBufferParams.z;
}

float GetDepth(float2 camNearFar, float2 vfNearFar, float ratio)
{
    float valLeft = log(vfNearFar.x);
    float valRight = log(vfNearFar.y);
    float val = lerp(valLeft, valRight, ratio);
    float depthVal = exp(val);
    return depthVal;
}

float HGPhaseFunction(float g, float cosTheta)
{
    float g2 = g * g;
    float denominator = 1.0f + g2 - 2 * g * cosTheta;
    return 0.25 * (1.0f - g2) * rsqrt(denominator * denominator * denominator);
}

float3 GetFogColor(float3 color, float3 lightDir, float3 viewDir, float g)
{
    float cosVal = dot(-lightDir, viewDir);
    return color * HGPhaseFunction(g, cosVal);
}

float Hash13(float3 p)
{
    p = frac(p * 0.1031);
    p += dot(p, p.zyx + 31.32);
    return frac((p.x + p.y) * p.z);
}

[numthreads(8,8,1)]
void VolumetricFogMain(uint3 id : SV_DispatchThreadID)
{
    float4 colorTex = _ColorTexture.Load(int3(id.xy, 0), 0);
    float depthTex = _DepthTexture.Load(int3(id.xy, 0), 0);

    float2 texcoord = (id.xy + 0.5f) * _TextureSize.zw;
    texcoord = texcoord + 0.5f * _TAAOffsets.xy;
    float3 nearPlaneNDC = float3(texcoord, 1.0f);
    float3 nearPlaneWS = NDCToWorld(nearPlaneNDC);

    float3 targetNDC = float3(texcoord, depthTex);
    float3 targetWS = NDCToWorld(targetNDC);
    float4 targetShadowCoord = TransformWorldToShadowCoord(targetWS);
    Light targetLight = GetMainLight(targetShadowCoord);

    float3 toTarget = targetWS - nearPlaneWS;
    float3 rayDir = normalize(nearPlaneWS - _WorldSpaceCameraPos);
    float3 viewDir = -rayDir;

    float totalStep = _StepCount;
    float lastStepDepthVal = _NearFar.z;

    float jitter = Hash13(float3(texcoord, _Time.y));
    float3 accumScatter = float3(0.0f, 0.0f, 0.0f);
    float accumTrans = 1.0f;
    for (int s = 0; s < totalStep; s++)
    {
        jitter = Hash13(float3(texcoord, jitter));
        float ratio = (s + jitter) / totalStep;
        float depthVal = GetDepth(_NearFar.xy, _NearFar.zw, ratio);
        float rawDepth = LinearEyeToRawDepth(depthVal, _ZBufferParams);
        float stepSize = depthVal - lastStepDepthVal;
        lastStepDepthVal = depthVal;

        if (rawDepth < depthTex) break;

        float3 tempNDC = float3(texcoord, rawDepth);
        float3 tempPosWS = NDCToWorld(tempNDC);
        float4 shadowCoord = TransformWorldToShadowCoord(tempPosWS);
        Light mainLight = GetMainLight(shadowCoord);

        float3 lightColor = mainLight.color * mainLight.shadowAttenuation;
        float3 lightDir = mainLight.direction;

        float3 fogColor = GetFogColor(lightColor, lightDir, viewDir, _Phase);
        fogColor += _FogTint.rgb * _FogTint.a;
        float density = _Density;

        float transmittance = exp(-density * stepSize * 0.01f);
        float3 scatter = fogColor * (1.0f - transmittance);

        accumScatter += scatter * accumTrans;
        accumTrans *= transmittance;
    }

    accumTrans = max(1.0f - _MaxTransmittance, accumTrans);

    float3 finalColor = colorTex.rgb * accumTrans + accumScatter;
    finalColor = lerp(colorTex.rgb, finalColor, _Intensity);

    _RW_VolTexture[id.xy] = float4(finalColor, 1.0f);
}

RayMarchingDebug.cs

这个脚本是用来可视化Ray Marching的位置的,这样可以更好的理解GetDepth这个函数对Ray Marching步长的影响,也可以发现

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using Unity.Mathematics;

[ExecuteInEditMode]
public class RayMarchingDebug : MonoBehaviour
{
    public Camera cam;
    public RMVolumetricFog volumetricFog;
    public bool drawDebug = false;

    [Range(1.0f, 5.0f)]
    public float logBase = 2.0f;

    [Range(-1.0f, 1.0f)]
    public float xCoord;
    [Range(-1.0f, 1.0f)]
    public float yCoord;

    void OnEnable()
    {
        cam = Camera.main;
        volumetricFog = VolumeManager.instance.stack.GetComponent<RMVolumetricFog>();
    }

    float log(float val)
    {
        return math.log(val) / math.log(logBase);
    }

    float exp(float val)
    {
        return math.exp(val * math.log(logBase)); 
    }

    float GetDepth(Camera cam, RMVolumetricFog vf, float ratio)
    {
        float2 camNearFar = new float2(cam.nearClipPlane, cam.farClipPlane);
        float2 vfNearFar = new float2(vf.fogNear.value, vf.fogFar.value);
        float baseVal = log(camNearFar.y / camNearFar.x);
        float valLeft = log(vfNearFar.x / camNearFar.x);
        float valRight = log(vfNearFar.y / camNearFar.x);
        float val = math.lerp(valLeft, valRight, ratio);
        float depthVal = camNearFar.x * exp(val);
        return depthVal;
    }

    float LinearEyeToRawDepth(float depth, float4 zBufferParams)
    {
        return (1.0f / depth - zBufferParams.w) / zBufferParams.z;
    }

    private void OnDrawGizmos()
    {
        if (!drawDebug) return;
        if (!cam || !volumetricFog) return;
        Color originalColor = Gizmos.color;
        Gizmos.color = Color.red;

        float2 camNearFar = new float2(cam.nearClipPlane, cam.farClipPlane);
        float4 zBufferParams = new float4(
            1.0f - camNearFar.y / camNearFar.x,
            1.0f,
            (1.0f - camNearFar.y / camNearFar.x) / camNearFar.y,
            1.0f / camNearFar.y
            );

        float4x4 projMat = GL.GetGPUProjectionMatrix(cam.projectionMatrix, false);
        float4x4 invProj = math.inverse(projMat);
        float4x4 viewMat = cam.transform.worldToLocalMatrix;
        float4x4 invView = math.inverse(viewMat);

        float ratio = 0.0f;
        int slice = (int)volumetricFog.step.value;
        for (int i = 0; i < slice; i++)
        {
            ratio = (i + 0.5f) / slice;
            float depthVal = GetDepth(cam, volumetricFog, ratio);
            float rawDepth = LinearEyeToRawDepth(depthVal, zBufferParams);
            float4 ndc = new float4(xCoord, -yCoord, rawDepth, 1.0f);
            float4 positionVS = math.mul(invProj, ndc);
            float4 positionWS = math.mul(invView, positionVS);
            positionWS /= positionWS.w;

            Gizmos.DrawSphere(positionWS.xyz, 0.1f);
        }
        Gizmos.color = originalColor;
    }
}

后记

诶,这就水了一篇新博客吗?