ExponentialShadowmappingResolve.glslp 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. // Copyright (C) 2009-2020, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #pragma anki input const UVec2 INPUT_TEXTURE_SIZE
  6. #pragma anki start comp
  7. #include <shaders/GaussianBlurCommon.glsl>
  8. #include <shaders/Functions.glsl>
  9. layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
  10. const F32 OFFSET = 1.25;
  11. layout(push_constant, std430) uniform pc_
  12. {
  13. Vec2 u_uvScale;
  14. Vec2 u_uvTranslation;
  15. F32 u_near;
  16. F32 u_far;
  17. U32 u_renderingTechnique; // If value is 0: perspective+blur, 1: perspective, 2: ortho+blur, 3: ortho
  18. U32 u_padding;
  19. UVec4 u_viewport;
  20. };
  21. layout(set = 0, binding = 0) uniform sampler u_linearAnyClampSampler;
  22. layout(set = 0, binding = 1) uniform texture2D u_inputTex;
  23. layout(set = 0, binding = 2) uniform writeonly image2D u_outImg;
  24. F32 sampleLinearDepthPerspective(Vec2 uv)
  25. {
  26. return linearizeDepth(textureLod(u_inputTex, u_linearAnyClampSampler, uv, 0.0).r, u_near, u_far);
  27. }
  28. F32 sampleLinearDepthOrhographic(Vec2 uv)
  29. {
  30. return textureLod(u_inputTex, u_linearAnyClampSampler, uv, 0.0).r;
  31. }
  32. void main()
  33. {
  34. if(gl_GlobalInvocationID.x >= u_viewport.z || gl_GlobalInvocationID.y >= u_viewport.w)
  35. {
  36. // Skip if it's out of bounds
  37. return;
  38. }
  39. // Compute the read UV
  40. Vec2 uv = (Vec2(gl_GlobalInvocationID.xy) + 0.5) / Vec2(u_viewport.zw);
  41. uv = uv * u_uvScale + u_uvTranslation;
  42. // Compute the UV limits. We can't sample beyond those
  43. const Vec2 TEXEL_SIZE = 1.0 / Vec2(INPUT_TEXTURE_SIZE);
  44. const Vec2 HALF_TEXEL_SIZE = TEXEL_SIZE / 2.0;
  45. const Vec2 maxUv = (Vec2(1.0) * u_uvScale + u_uvTranslation) - HALF_TEXEL_SIZE;
  46. const Vec2 minUv = (Vec2(0.0) * u_uvScale + u_uvTranslation) + HALF_TEXEL_SIZE;
  47. // Sample
  48. const Vec2 UV_OFFSET = OFFSET * TEXEL_SIZE;
  49. const F32 w0 = BOX_WEIGHTS[0u];
  50. const F32 w1 = BOX_WEIGHTS[1u];
  51. const F32 w2 = BOX_WEIGHTS[2u];
  52. F32 outDepth;
  53. switch(u_renderingTechnique)
  54. {
  55. case 0u:
  56. outDepth = sampleLinearDepthPerspective(uv) * w0;
  57. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(UV_OFFSET.x, 0.0), minUv, maxUv)) * w1;
  58. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(-UV_OFFSET.x, 0.0), minUv, maxUv)) * w1;
  59. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(0.0, UV_OFFSET.y), minUv, maxUv)) * w1;
  60. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(0.0, -UV_OFFSET.y), minUv, maxUv)) * w1;
  61. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(UV_OFFSET.x, UV_OFFSET.y), minUv, maxUv)) * w2;
  62. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(-UV_OFFSET.x, UV_OFFSET.y), minUv, maxUv)) * w2;
  63. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(UV_OFFSET.x, -UV_OFFSET.y), minUv, maxUv)) * w2;
  64. outDepth += sampleLinearDepthPerspective(clamp(uv + Vec2(-UV_OFFSET.x, -UV_OFFSET.y), minUv, maxUv)) * w2;
  65. break;
  66. case 1u:
  67. outDepth = sampleLinearDepthPerspective(uv);
  68. break;
  69. case 2u:
  70. outDepth = sampleLinearDepthOrhographic(uv) * w0;
  71. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(UV_OFFSET.x, 0.0), minUv, maxUv)) * w1;
  72. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(-UV_OFFSET.x, 0.0), minUv, maxUv)) * w1;
  73. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(0.0, UV_OFFSET.y), minUv, maxUv)) * w1;
  74. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(0.0, -UV_OFFSET.y), minUv, maxUv)) * w1;
  75. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(UV_OFFSET.x, UV_OFFSET.y), minUv, maxUv)) * w2;
  76. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(-UV_OFFSET.x, UV_OFFSET.y), minUv, maxUv)) * w2;
  77. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(UV_OFFSET.x, -UV_OFFSET.y), minUv, maxUv)) * w2;
  78. outDepth += sampleLinearDepthOrhographic(clamp(uv + Vec2(-UV_OFFSET.x, -UV_OFFSET.y), minUv, maxUv)) * w2;
  79. break;
  80. default:
  81. outDepth = sampleLinearDepthOrhographic(uv);
  82. }
  83. // Write the results
  84. imageStore(u_outImg, IVec2(gl_GlobalInvocationID.xy) + IVec2(u_viewport.xy), Vec4(outDepth));
  85. }
  86. #pragma anki end