HBAOShader.js 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331
  1. import {
  2. Matrix4,
  3. Vector2,
  4. Vector4,
  5. } from 'three';
  6. /**
  7. * References:
  8. * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.577.2286&rep=rep1&type=pdf
  9. * https://ceur-ws.org/Vol-3027/paper5.pdf
  10. * http://www.derschmale.com/2013/12/20/an-alternative-implementation-for-hbao-2
  11. * https://github.com/N8python/n8ao
  12. * https://github.com/0beqz/realism-effects
  13. * https://github.com/scanberg/hbao/blob/master/resources/shaders/hbao_frag.glsl
  14. * https://github.com/nvpro-samples/gl_ssao/blob/master/hbao.frag.glsl
  15. */
  16. const HBAOShader = {
  17. name: 'HBAOShader',
  18. defines: {
  19. 'PERSPECTIVE_CAMERA': 1,
  20. 'SAMPLES': 16,
  21. 'SAMPLE_VECTORS': generateHaboSampleKernelInitializer( 16 ),
  22. 'NORMAL_VECTOR_TYPE': 1,
  23. 'DEPTH_VALUE_SOURCE': 0,
  24. 'COSINE_SAMPLE_HEMISPHERE': 0,
  25. },
  26. uniforms: {
  27. 'tNormal': { value: null },
  28. 'tDepth': { value: null },
  29. 'tNoise': { value: null },
  30. 'resolution': { value: new Vector2() },
  31. 'cameraNear': { value: null },
  32. 'cameraFar': { value: null },
  33. 'cameraProjectionMatrix': { value: new Matrix4() },
  34. 'cameraProjectionMatrixInverse': { value: new Matrix4() },
  35. 'radius': { value: 2. },
  36. 'distanceExponent': { value: 1. },
  37. 'bias': { value: 0.01 },
  38. },
  39. vertexShader: /* glsl */`
  40. varying vec2 vUv;
  41. void main() {
  42. vUv = uv;
  43. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  44. }`,
  45. fragmentShader: /* glsl */`
  46. varying vec2 vUv;
  47. uniform sampler2D tNormal;
  48. uniform sampler2D tDepth;
  49. uniform sampler2D tNoise;
  50. uniform vec2 resolution;
  51. uniform float cameraNear;
  52. uniform float cameraFar;
  53. uniform mat4 cameraProjectionMatrix;
  54. uniform mat4 cameraProjectionMatrixInverse;
  55. uniform float radius;
  56. uniform float distanceExponent;
  57. uniform float bias;
  58. #include <common>
  59. #include <packing>
  60. #ifndef FRAGMENT_OUTPUT
  61. #define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
  62. #endif
  63. const vec4 sampleKernel[SAMPLES] = SAMPLE_VECTORS;
  64. vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
  65. vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
  66. vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
  67. return viewSpacePosition.xyz / viewSpacePosition.w;
  68. }
  69. float getDepth(const vec2 uv) {
  70. #if DEPTH_VALUE_SOURCE == 1
  71. return textureLod(tDepth, uv.xy, 0.0).a;
  72. #else
  73. return textureLod(tDepth, uv.xy, 0.0).r;
  74. #endif
  75. }
  76. float fetchDepth(const ivec2 uv) {
  77. #if DEPTH_VALUE_SOURCE == 1
  78. return texelFetch(tDepth, uv.xy, 0).a;
  79. #else
  80. return texelFetch(tDepth, uv.xy, 0).r;
  81. #endif
  82. }
  83. float getViewZ(const in float depth) {
  84. #if PERSPECTIVE_CAMERA == 1
  85. return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
  86. #else
  87. return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
  88. #endif
  89. }
  90. vec3 computeNormalFromDepth(const vec2 uv) {
  91. vec2 size = vec2(textureSize(tDepth, 0));
  92. ivec2 p = ivec2(uv * size);
  93. float c0 = fetchDepth(p);
  94. float l2 = fetchDepth(p - ivec2(2, 0));
  95. float l1 = fetchDepth(p - ivec2(1, 0));
  96. float r1 = fetchDepth(p + ivec2(1, 0));
  97. float r2 = fetchDepth(p + ivec2(2, 0));
  98. float b2 = fetchDepth(p - ivec2(0, 2));
  99. float b1 = fetchDepth(p - ivec2(0, 1));
  100. float t1 = fetchDepth(p + ivec2(0, 1));
  101. float t2 = fetchDepth(p + ivec2(0, 2));
  102. float dl = abs((2.0 * l1 - l2) - c0);
  103. float dr = abs((2.0 * r1 - r2) - c0);
  104. float db = abs((2.0 * b1 - b2) - c0);
  105. float dt = abs((2.0 * t1 - t2) - c0);
  106. vec3 ce = getViewPosition(uv, c0).xyz;
  107. vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz
  108. : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
  109. vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz
  110. : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
  111. return normalize(cross(dpdx, dpdy));
  112. }
  113. vec3 getViewNormal(const vec2 uv) {
  114. #if NORMAL_VECTOR_TYPE == 2
  115. return normalize(textureLod(tNormal, uv, 0.).rgb);
  116. #elif NORMAL_VECTOR_TYPE == 1
  117. return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
  118. #else
  119. return computeNormalFromDepth(uv);
  120. #endif
  121. }
  122. // source: https://www.shadertoy.com/view/cll3R4
  123. vec3 cosineSampleHemisphere(const vec3 n, const vec2 u) {
  124. float r = sqrt(u.x);
  125. float theta = 2.0 * PI * u.y;
  126. vec3 b = normalize(cross(n, vec3(0.0, 1.0, 1.0)));
  127. vec3 t = cross(b, n);
  128. return normalize(r * sin(theta) * b + sqrt(1.0 - u.x) * n + r * cos(theta) * t);
  129. }
  130. float getOcclusion(const vec2 uv, const vec3 viewPos, const vec3 viewNormal, const float depth, const vec4 sampleViewDir, inout float totalWeight) {
  131. vec3 sampleViewPos = viewPos + sampleViewDir.xyz * radius * pow(sampleViewDir.w, distanceExponent);
  132. vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
  133. vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
  134. float sampleDepth = getDepth(sampleUv);
  135. float distSample = abs(getViewZ(sampleDepth));
  136. float distWorld = abs(sampleViewPos.z);
  137. float distanceFalloffToUse = radius;
  138. float rangeCheck = smoothstep(0.0, 1.0, distanceFalloffToUse / (abs(distSample - distWorld)));
  139. float weight = dot(viewNormal, sampleViewDir.xyz);
  140. vec2 diff = (uv - sampleUv) * resolution;
  141. vec2 clipRangeCheck = step(0., sampleUv) * step(sampleUv, vec2(1.));
  142. float occlusion = rangeCheck * weight * step(distSample + bias, distWorld) * step(0.707, dot(diff, diff)) * clipRangeCheck.x * clipRangeCheck.y;
  143. totalWeight += weight;
  144. return occlusion;
  145. }
  146. void main() {
  147. float depth = getDepth(vUv.xy);
  148. if (depth == 1.0) {
  149. discard;
  150. return;
  151. }
  152. vec3 viewPos = getViewPosition(vUv, depth);
  153. vec3 viewNormal = getViewNormal(vUv);
  154. vec2 noiseResolution = vec2(textureSize(tNoise, 0));
  155. vec2 noiseUv = vUv * resolution / noiseResolution;
  156. vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
  157. vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
  158. vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));
  159. vec3 bitangent = cross(viewNormal, tangent);
  160. mat3 kernelMatrix = mat3(tangent, bitangent, viewNormal);
  161. float ao = 0.0, totalWeight = 0.0;
  162. for (int i = 0; i < SAMPLES; i++) {
  163. #if COSINE_SAMPLE_HEMISPHERE == 1
  164. vec4 sampleNoise = noiseTexel;
  165. if (i != 0) {
  166. const vec4 hn = vec4(0.618033988749895, 0.3247179572447458, 0.2207440846057596, 0.1673039782614187);
  167. sampleNoise = fract(sampleNoise + hn * float(i));
  168. sampleNoise = mix(sampleNoise, 1.0 - sampleNoise, step(0.5, sampleNoise)) * 2.0;
  169. }
  170. vec3 hemisphereDir = cosineSampleHemisphere( viewNormal, sampleNoise.rg );
  171. vec4 sampleViewDir = vec4(hemisphereDir, sampleNoise.b );
  172. #else
  173. vec4 sampleViewDir = sampleKernel[i];
  174. sampleViewDir.xyz = normalize(kernelMatrix * sampleViewDir.xyz);
  175. #endif
  176. float occlusion = getOcclusion(vUv, viewPos, viewNormal, depth, sampleViewDir, totalWeight);
  177. ao += occlusion;
  178. }
  179. if (totalWeight > 0.) {
  180. ao /= totalWeight;
  181. }
  182. ao = clamp(1. - ao, 0., 1.);
  183. gl_FragColor = FRAGMENT_OUTPUT;
  184. }`
  185. };
  186. const HBAODepthShader = {
  187. name: 'HBAODepthShader',
  188. defines: {
  189. 'PERSPECTIVE_CAMERA': 1
  190. },
  191. uniforms: {
  192. 'tDepth': { value: null },
  193. 'cameraNear': { value: null },
  194. 'cameraFar': { value: null },
  195. },
  196. vertexShader: /* glsl */`
  197. varying vec2 vUv;
  198. void main() {
  199. vUv = uv;
  200. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  201. }`,
  202. fragmentShader: /* glsl */`
  203. uniform sampler2D tDepth;
  204. uniform float cameraNear;
  205. uniform float cameraFar;
  206. varying vec2 vUv;
  207. #include <packing>
  208. float getLinearDepth( const in vec2 screenPosition ) {
  209. #if PERSPECTIVE_CAMERA == 1
  210. float fragCoordZ = texture2D( tDepth, screenPosition ).x;
  211. float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
  212. return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
  213. #else
  214. return texture2D( tDepth, screenPosition ).x;
  215. #endif
  216. }
  217. void main() {
  218. float depth = getLinearDepth( vUv );
  219. gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
  220. }`
  221. };
  222. function generateHaboSampleKernelInitializer( samples ) {
  223. const poissonDisk = generateHaboSamples( samples );
  224. let glslCode = 'vec4[SAMPLES](';
  225. for ( let i = 0; i < samples; i ++ ) {
  226. const sample = poissonDisk[ i ];
  227. glslCode += `vec4(${sample.x}, ${sample.y}, ${sample.z}, ${sample.w})`;
  228. if ( i < samples - 1 ) {
  229. glslCode += ',';
  230. }
  231. }
  232. glslCode += ')';
  233. return glslCode;
  234. }
  235. function generateHaboSamples( samples ) {
  236. const kernel = [];
  237. for ( let kernelIndex = 0; kernelIndex < samples; kernelIndex ++ ) {
  238. const spiralAngle = kernelIndex * Math.PI * ( 3 - Math.sqrt( 5 ) );
  239. const z = Math.sqrt( 0.99 - ( kernelIndex / ( samples - 1 ) ) * 0.98 );
  240. const radius = Math.sqrt( 1 - z * z );
  241. const x = Math.cos( spiralAngle ) * radius;
  242. const y = Math.sin( spiralAngle ) * radius;
  243. const scaleStep = 8;
  244. const scaleRange = Math.floor( samples / scaleStep );
  245. const scaleIndex =
  246. Math.floor( kernelIndex / scaleStep ) +
  247. ( kernelIndex % scaleStep ) * scaleRange;
  248. let scale = 1 - scaleIndex / samples;
  249. scale = 0.1 + 0.9 * scale;
  250. kernel.push( new Vector4( x, y, z, scale ) );
  251. }
  252. return kernel;
  253. }
  254. export { generateHaboSampleKernelInitializer, HBAOShader, HBAODepthShader };