Unity Shader 基礎(4) 由深度紋理重建座標

在PostImage中常常會用到物體自己的位置信息,可是Image Effect自身是不包含這些信息的,由於屏幕後處實際上是使用特定的材質渲染一個恰好填滿屏幕的四邊形面片(四個角對應近剪裁面的四個角)。這篇文章主要介紹幾種在Image Effct shader中還原世界座標的方式。這個問題在《Shader入門精要》中也作了描述,這裏可能偏重於我的的一些疑惑。html

這篇文章相關的兩外兩篇文章:
Unity Shader 基礎(3) 獲取深度紋理
Unity Shader 基礎(2) Image Effectgit

1. View-Projection 逆矩陣

雖然在Image Effect中沒有實際的頂點信息,可是帶有紋理uv'座標信息以及得到深度信息,根據UV信息能夠得知NDC下xy座標,即:
\[x_{ndc} = 2 * uv.x -1 \\ y_{ndc} = 2 * uv.y - 1\]
在加上經過深度紋理或者深度法線紋理,能夠得到NDC下深度信息,從而能夠計算出世界座標。
\[P_world = {M_{view}}^{-1} * {M_{projection}}^{-1} * P_{ndc}\]
詳情可能夠參考:Unity Answer:Reconstructing world pos from depth 以及GPU Gem3 :Chapter 27. Motion Blur as a Post-Processing Effect
Pixel shader代碼*,逆矩陣須要從C#中傳遞過來:github

fixed4 frag(uoutput o) : COLOR
  {
        fixed4 col = tex2D(_MainTex, o.uv);
        float depth = UNITY_SAMPLE_DEPTH(tex2D(_CameraDepthTexture, o.uv_depth));
        float4 ndcPos = float4(o.uv.x* 2 - 1  ,o.uv.y * 2 - 1 ,depth , 1);
        
        //_Matrix_vp_inverse外部傳遞,具體爲:
        //Matrix4x4 temp = mCam.projectionMatrix * mCam.worldToCameraMatrix;
        //temp = temp.inverse;
        //mMat.SetMatrix("_Matrix_vp_inverse", temp);
        
        float4 worldHPos = mul(_Matrix_vp_inverse,ndcPos);
        float4 worldPos  = worldHPos / worldHPos.w;
        float dis = length(worldPos.xyz);
        float3 worldPos2 = worldPos.xyz/dis;
        worldPos2 = worldPos2 * 0.5 + 0.5;
        return fixed4(worldPos2,1);
  }

mark

2 遠剪裁面插值

理解一下兩點ide

  1. Image Effect是Post Processing 的一種方式,大體過程就是把Color Buffer的輸出當作紋理,而後採用特性的材質渲染和屏大小同樣的四角形面片(面片的四角即近剪裁面的四個角)。
  2. Vertex shader輸出的數據到Pixel shader輸入,通過光柵化會進行插值。遠剪裁面的四條射線在Pixel shader後是通過插值的,以下圖:
    mark
    基於上面兩點: 對從攝像機原點出發,經剪裁面的攝像機射線進行插值得到每一個位置的攝像機視線方向信息,再已知深度信息的狀況下便可 得到攝像機位置 。

取其中一個射線:
mark
根據圖中比例關係:
\[\frac{YellowL}{YellowL + GreenL} = \frac{\vec{BlackV}}{\vec{BlackV} + \vec{BlueV}}\]
由於\(YellowL + GreenL = 1\) , 因此:
\[ \vec{BlackV} = YellowL * ({\vec{BlackV} + \vec{BlueV}} )\]
其中YellowL爲DepthMap中01空間數值,${\vec{BlackV} + \vec{BlueV}} \(爲遠剪裁面四角向量插值後向量\)\vec{interpolatedRay}$。
\[wPos = camWPos + \vec{BlackV} = camWPos + YellowL * \vec{interpolatedRay} \]測試

實現過程當中:Vertex Shader中計算射線向量,Pixel shader中插值計算結果ui

struct uoutput
{
    float4 pos : SV_POSITION;
    half2 uv : TEXCOORD0;
    float4 uv_depth : TEXCOORD1;
float4 interpolatedRay : TEXCOORD2;
    float3 cameraToFarPlane : TEXCOORD3;

};

  uoutput far_ray_vert(uinput i)
  {
    uoutput o;
      o.pos = mul(UNITY_MATRIX_MVP, i.pos);
      //o.uv = MultiplyUV(UNITY_MATRIX_TEXTURE0, i.uv);
      o.uv = i.uv ;
      o.uv_depth.xy = o.uv ;
    #if UNITY_UV_STARTS_AT_TOP
    if (_MainTex_TexelSize.y < 0)
    o.uv_depth.y = 1 - o.uv_depth.y;
    #endif

     //  計算遠剪裁面每一個角相對攝像機的向量
      // Clip space X and Y coords
      float2 clipXY = o.pos.xy / o.pos.w;
             
      // Position of the far plane in clip space
      float4 farPlaneClip = float4(clipXY, 1, 1);
             
      // Homogeneous world position on the far plane
      farPlaneClip *= float4(1,_ProjectionParams.x,1,1);   
      float4 farPlaneWorld4 = mul(_ClipToWorld, farPlaneClip);
             
      // World position on the far plane  ?????
      float3 farPlaneWorld = farPlaneWorld4.xyz / farPlaneWorld4.w;
             
      // Vector from the camera to the far plane
      o.cameraToFarPlane = farPlaneWorld - _WorldSpaceCameraPos;
      
      return o;
}


   fixed4 far_ray_frag(uoutput o) : COLOR
   {
     float linearDepth = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, o.uv_depth));

     float3 worldPos = _WorldSpaceCameraPos + linearDepth * o.cameraToFarPlane;

    //顏色輸出
     float dis = length(worldPos.xyz);
     float3 worldPos2 = worldPos.xyz/dis;
     worldPos2 = worldPos2 * 0.5 + 0.5;
     return fixed4(worldPos2,1);
   }

C#代碼:spa

mMat.SetMatrix("_ClipToWorld", (mCam.cameraToWorldMatrix * mCam.projectionMatrix).inverse);

mark

3. 近剪裁面射線插值

原理上上面相似,實際推導過程,能夠參考《Shader 入門精要》或者這裏, 代碼下載:下載3d

計算近近剪裁面四個角相對攝像機向量code

Matrix4x4 GetFrustumCorners()
    {
        Matrix4x4 frustumCorners = Matrix4x4.identity;
        Camera camera = mCam;
        Transform cameraTransform = mCam.gameObject.transform;

        float fov = camera.fieldOfView;
        float near = camera.nearClipPlane;
        float aspect = camera.aspect;

        float halfHeight = near * Mathf.Tan(fov * 0.5f * Mathf.Deg2Rad);
        Vector3 toRight = cameraTransform.right * halfHeight * aspect;
        Vector3 toTop = cameraTransform.up * halfHeight;

        Vector3 topLeft = cameraTransform.forward * near + toTop - toRight;
        float scale = topLeft.magnitude / near;

        topLeft.Normalize();
        topLeft *= scale;

        Vector3 topRight = cameraTransform.forward * near + toRight + toTop;
        topRight.Normalize();
        topRight *= scale;

        Vector3 bottomLeft = cameraTransform.forward * near - toTop - toRight;
        bottomLeft.Normalize();
        bottomLeft *= scale;

        Vector3 bottomRight = cameraTransform.forward * near + toRight - toTop;
        bottomRight.Normalize();
        bottomRight *= scale;

        frustumCorners.SetRow(0, bottomLeft);
        frustumCorners.SetRow(1, bottomRight);
        frustumCorners.SetRow(2, topRight);
        frustumCorners.SetRow(3, topLeft);

        return frustumCorners;
    }
    //設置
    mMat.SetMatrix("_FrustumCornersWS", GetFrustumCorners());

Shaderorm

struct uinput
 {
     float4 pos : POSITION;
     half2 uv : TEXCOORD0;
 };

 struct uoutput
 {
     float4 pos : SV_POSITION;
     half2 uv : TEXCOORD0;
     float4 uv_depth : TEXCOORD1;
     float4 interpolatedRay : TEXCOORD2;
     float4 cameraToFarPlane : TEXCOORD3;
 };
            
uoutput near_ray_vert(uinput i)
{
    uoutput o;
    o.pos = mul(UNITY_MATRIX_MVP, i.pos);
    o.uv = MultiplyUV(UNITY_MATRIX_TEXTURE0, i.uv);
    o.uv = i.uv ;
    o.uv_depth.xy = o.uv ;
    #if UNITY_UV_STARTS_AT_TOP
    if (_MainTex_TexelSize.y < 0)
    o.uv_depth.y = 1 - o.uv_depth.y;
    #endif
    
    int index = 0;
    if (i.uv.x < 0.5 && i.uv.y < 0.5) 
    {
    index = 0;
    } 
    else if (i.uv.x > 0.5 && i.uv.y < 0.5) 
    {
    index = 1;
    } 
    else if (i.uv.x > 0.5 && i.uv.y > 0.5) 
    {
    index = 2;
    } 
    else 
    {
    index = 3;
    }
    o.interpolatedRay = _FrustumCornersWS[(int)index];
    return o;
}
fixed4 near_ray_frag(uoutput o) : COLOR
{
    float linearDepth = LinearEyeDepth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, o.uv_depth));
    float3 worldPos = _WorldSpaceCameraPos + linearDepth * o.interpolatedRay.xyz;
    return WorldPosTo01(worldPos);     float dis = length(worldPos.xyz);
     float3 worldPos2 = worldPos.xyz/dis;
     worldPos2 = worldPos2 * 0.5 + 0.5;
     return fixed4(worldPos2,1);
 }

mark

4. 小結

上面的推到過程,參考到一些 以及 Jim的博客,在Unity提供的Effect(Global Fog以及Emotion Blur)中也有使用相似的方式。對Global Fog中插值使用方式還挺有點不甚理解,使用pos.z做爲射線索引,沒弄明白這個Z爲啥能夠做爲索引。

文章源碼測試源碼下載:http://pan.baidu.com/s/1c2rHVf6

相關文章
相關標籤/搜索