PerCameraData.bslinc 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. mixin PerCameraData
  2. {
  3. code
  4. {
  5. cbuffer PerCamera
  6. {
  7. float3 gViewDir;
  8. float3 gViewOrigin;
  9. float4x4 gMatViewProj;
  10. float4x4 gMatView;
  11. float4x4 gMatProj;
  12. float4x4 gMatInvProj;
  13. float4x4 gMatInvViewProj;
  14. // Special inverse view-projection matrix that had projection entries that affect z and w eliminated.
  15. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space,
  16. // and view_z/view_w in view space, into world space
  17. float4x4 gMatScreenToWorld;
  18. // Converts device Z to world Z using this formula: worldZ = (1 / (deviceZ + y)) * x
  19. float2 gDeviceZToWorldZ;
  20. float2 gNDCZToWorldZ;
  21. float2 gNDCZToDeviceZ;
  22. // x - near plane distance, y - far plane distance
  23. float2 gNearFar;
  24. // xy - Viewport offset in pixels
  25. // zw - Viewport width & height in pixels
  26. int4 gViewportRectangle;
  27. // xy - (Viewport size in pixels / 2) / Target size in pixels
  28. // zw - (Viewport offset in pixels + (Viewport size in pixels / 2) + Optional pixel center offset) / Target size in pixels
  29. float4 gClipToUVScaleOffset;
  30. float gAmbientFactor;
  31. }
  32. /** Converts Z value in range [0,1] into Z value in view space. */
  33. float convertFromDeviceZ(float deviceZ)
  34. {
  35. return (1.0f / (deviceZ + gDeviceZToWorldZ.y)) * gDeviceZToWorldZ.x;
  36. }
  37. /** Converts Z value from view space to NDC space. */
  38. float convertToNDCZ(float viewZ)
  39. {
  40. return -gNDCZToWorldZ.y + (gNDCZToWorldZ.x / viewZ);
  41. }
  42. /** Converts Z value from NDC space to device Z value in range [0, 1]. */
  43. float NDCZToDeviceZ(float ndcZ)
  44. {
  45. return (ndcZ + gNDCZToDeviceZ.y) * gNDCZToDeviceZ.x;
  46. }
  47. /** Converts Z value from device range ([0, 1]) to NDC space. */
  48. float DeviceZToNDCZ(float deviceZ)
  49. {
  50. return deviceZ / gNDCZToDeviceZ.x - gNDCZToDeviceZ.y;
  51. }
  52. /** Converts position in NDC to UV coordinates mapped to the screen rectangle. */
  53. float2 NDCToUV(float2 ndcPos)
  54. {
  55. return ndcPos.xy * gClipToUVScaleOffset.xy + gClipToUVScaleOffset.zw;
  56. }
  57. /** Converts position in UV coordinates mapped to the screen, to screen coordinates in pixels. */
  58. uint2 UVToScreen(float2 uv)
  59. {
  60. return (uint2)(uv * (float2)gViewportRectangle.zw - ((float2)gViewportRectangle.xy + 0.5f));
  61. }
  62. /** Converts position in NDC to screen coordinates in pixels. */
  63. uint2 NDCToScreen(float2 ndcPos)
  64. {
  65. float2 uv = NDCToUV(ndcPos);
  66. return UVToScreen(uv);
  67. }
  68. /** Converts position in NDC to world space. */
  69. float3 NDCToWorld(float2 ndcPos, float depth)
  70. {
  71. // x, y are now in clip space, z, w are in view space
  72. // We multiply them by a special inverse view-projection matrix, that had the projection entries that effect
  73. // z, w eliminated (since they are already in view space)
  74. // Note: Multiply by depth should be avoided if using ortographic projection
  75. float4 mixedSpacePos = float4(ndcPos.xy * -depth, depth, 1);
  76. float4 worldPosition4D = mul(gMatScreenToWorld, mixedSpacePos);
  77. return worldPosition4D.xyz / worldPosition4D.w;
  78. }
  79. };
  80. };