HBAOShader.js 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. import {
  2. Matrix4,
  3. Vector2,
  4. Vector4,
  5. } from 'three';
  6. /**
  7. * References:
  8. * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.577.2286&rep=rep1&type=pdf
  9. * https://ceur-ws.org/Vol-3027/paper5.pdf
  10. * http://www.derschmale.com/2013/12/20/an-alternative-implementation-for-hbao-2
  11. * https://github.com/N8python/n8ao
  12. * https://github.com/0beqz/realism-effects
  13. * https://github.com/scanberg/hbao/blob/master/resources/shaders/hbao_frag.glsl
  14. * https://github.com/nvpro-samples/gl_ssao/blob/master/hbao.frag.glsl
  15. */
  16. const HBAOShader = {
  17. name: 'HBAOShader',
  18. defines: {
  19. 'PERSPECTIVE_CAMERA': 1,
  20. 'SAMPLES': 16,
  21. 'SAMPLE_VECTORS': generateHaboSampleKernelInitializer( 16 ),
  22. 'NORMAL_VECTOR_TYPE': 1,
  23. 'DEPTH_VALUE_SOURCE': 0,
  24. 'SAMPLING_FROM_NOISE': 0,
  25. },
  26. uniforms: {
  27. 'tNormal': { value: null },
  28. 'tDepth': { value: null },
  29. 'tNoise': { value: null },
  30. 'resolution': { value: new Vector2() },
  31. 'cameraNear': { value: null },
  32. 'cameraFar': { value: null },
  33. 'cameraProjectionMatrix': { value: new Matrix4() },
  34. 'cameraProjectionMatrixInverse': { value: new Matrix4() },
  35. 'radius': { value: 2. },
  36. 'distanceExponent': { value: 1. },
  37. 'bias': { value: 0.01 },
  38. },
  39. vertexShader: /* glsl */`
  40. varying vec2 vUv;
  41. void main() {
  42. vUv = uv;
  43. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  44. }`,
  45. fragmentShader: /* glsl */`
  46. varying vec2 vUv;
  47. uniform sampler2D tNormal;
  48. uniform sampler2D tDepth;
  49. uniform sampler2D tNoise;
  50. uniform vec2 resolution;
  51. uniform float cameraNear;
  52. uniform float cameraFar;
  53. uniform mat4 cameraProjectionMatrix;
  54. uniform mat4 cameraProjectionMatrixInverse;
  55. uniform float radius;
  56. uniform float distanceExponent;
  57. uniform float bias;
  58. #include <common>
  59. #include <packing>
  60. #ifndef FRAGMENT_OUTPUT
  61. #define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
  62. #endif
  63. const vec4 sampleKernel[SAMPLES] = SAMPLE_VECTORS;
  64. vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
  65. vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
  66. vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
  67. return viewSpacePosition.xyz / viewSpacePosition.w;
  68. }
  69. float getDepth(const vec2 uv) {
  70. #if DEPTH_VALUE_SOURCE == 1
  71. return textureLod(tDepth, uv.xy, 0.0).a;
  72. #else
  73. return textureLod(tDepth, uv.xy, 0.0).r;
  74. #endif
  75. }
  76. float fetchDepth(const ivec2 uv) {
  77. #if DEPTH_VALUE_SOURCE == 1
  78. return texelFetch(tDepth, uv.xy, 0).a;
  79. #else
  80. return texelFetch(tDepth, uv.xy, 0).r;
  81. #endif
  82. }
  83. float getViewZ(const in float depth) {
  84. #if PERSPECTIVE_CAMERA == 1
  85. return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
  86. #else
  87. return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
  88. #endif
  89. }
  90. vec3 computeNormalFromDepth(const vec2 uv) {
  91. vec2 size = vec2(textureSize(tDepth, 0));
  92. ivec2 p = ivec2(uv * size);
  93. float c0 = fetchDepth(p);
  94. float l2 = fetchDepth(p - ivec2(2, 0));
  95. float l1 = fetchDepth(p - ivec2(1, 0));
  96. float r1 = fetchDepth(p + ivec2(1, 0));
  97. float r2 = fetchDepth(p + ivec2(2, 0));
  98. float b2 = fetchDepth(p - ivec2(0, 2));
  99. float b1 = fetchDepth(p - ivec2(0, 1));
  100. float t1 = fetchDepth(p + ivec2(0, 1));
  101. float t2 = fetchDepth(p + ivec2(0, 2));
  102. float dl = abs((2.0 * l1 - l2) - c0);
  103. float dr = abs((2.0 * r1 - r2) - c0);
  104. float db = abs((2.0 * b1 - b2) - c0);
  105. float dt = abs((2.0 * t1 - t2) - c0);
  106. vec3 ce = getViewPosition(uv, c0).xyz;
  107. vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz
  108. : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
  109. vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz
  110. : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
  111. return normalize(cross(dpdx, dpdy));
  112. }
  113. vec3 getViewNormal(const vec2 uv) {
  114. #if NORMAL_VECTOR_TYPE == 2
  115. return normalize(textureLod(tNormal, uv, 0.).rgb);
  116. #elif NORMAL_VECTOR_TYPE == 1
  117. return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
  118. #else
  119. return computeNormalFromDepth(uv);
  120. #endif
  121. }
  122. float getOcclusion(const vec2 uv, const vec3 viewPos, const vec3 viewNormal, const float depth, const vec4 sampleViewDir, inout float totalWeight) {
  123. vec3 sampleViewPos = viewPos + sampleViewDir.xyz * radius * pow(sampleViewDir.w, distanceExponent);
  124. vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
  125. vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
  126. float sampleDepth = getDepth(sampleUv);
  127. float distSample = abs(getViewZ(sampleDepth));
  128. float distWorld = abs(sampleViewPos.z);
  129. float distanceFalloffToUse = radius;
  130. float rangeCheck = smoothstep(0.0, 1.0, distanceFalloffToUse / (abs(distSample - distWorld)));
  131. float weight = dot(viewNormal, sampleViewDir.xyz);
  132. vec2 diff = (uv - sampleUv) * resolution;
  133. vec2 clipRangeCheck = step(0., sampleUv) * step(sampleUv, vec2(1.));
  134. float occlusion = rangeCheck * weight * step(distSample + bias, distWorld) * step(0.707, dot(diff, diff)) * clipRangeCheck.x * clipRangeCheck.y;
  135. totalWeight += weight;
  136. return occlusion;
  137. }
  138. void main() {
  139. float depth = getDepth(vUv.xy);
  140. if (depth == 1.0) {
  141. discard;
  142. return;
  143. }
  144. vec3 viewPos = getViewPosition(vUv, depth);
  145. vec3 viewNormal = getViewNormal(vUv);
  146. vec2 noiseResolution = vec2(textureSize(tNoise, 0));
  147. vec2 noiseUv = vUv * resolution / noiseResolution;
  148. vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
  149. vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
  150. vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));
  151. vec3 bitangent = cross(viewNormal, tangent);
  152. mat3 kernelMatrix = mat3(tangent, bitangent, viewNormal);
  153. float ao = 0.0, totalWeight = 0.0;
  154. for (int i = 0; i < SAMPLES; i++) {
  155. #if SAMPLING_FROM_NOISE == 1
  156. vec4 sampleNoise = noiseTexel;
  157. if (i != 0) {
  158. const vec4 hn = vec4(0.618033988749895, 0.3247179572447458, 0.2207440846057596, 0.1673039782614187);
  159. sampleNoise = fract(sampleNoise + hn * float(i));
  160. sampleNoise = mix(sampleNoise, 1.0 - sampleNoise, step(0.5, sampleNoise)) * 2.0;
  161. }
  162. vec3 hemisphereDir = normalize(kernelMatrix * vec3(sampleNoise.xy * 2. - 1., sampleNoise.z));
  163. vec4 sampleViewDir = vec4(hemisphereDir, sampleNoise.a);
  164. #else
  165. vec4 sampleViewDir = sampleKernel[i];
  166. sampleViewDir.xyz = normalize(kernelMatrix * sampleViewDir.xyz);
  167. #endif
  168. float occlusion = getOcclusion(vUv, viewPos, viewNormal, depth, sampleViewDir, totalWeight);
  169. ao += occlusion;
  170. }
  171. if (totalWeight > 0.) {
  172. ao /= totalWeight;
  173. }
  174. ao = clamp(1. - ao, 0., 1.);
  175. gl_FragColor = FRAGMENT_OUTPUT;
  176. }`
  177. };
  178. const HBAODepthShader = {
  179. name: 'HBAODepthShader',
  180. defines: {
  181. 'PERSPECTIVE_CAMERA': 1
  182. },
  183. uniforms: {
  184. 'tDepth': { value: null },
  185. 'cameraNear': { value: null },
  186. 'cameraFar': { value: null },
  187. },
  188. vertexShader: /* glsl */`
  189. varying vec2 vUv;
  190. void main() {
  191. vUv = uv;
  192. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  193. }`,
  194. fragmentShader: /* glsl */`
  195. uniform sampler2D tDepth;
  196. uniform float cameraNear;
  197. uniform float cameraFar;
  198. varying vec2 vUv;
  199. #include <packing>
  200. float getLinearDepth( const in vec2 screenPosition ) {
  201. #if PERSPECTIVE_CAMERA == 1
  202. float fragCoordZ = texture2D( tDepth, screenPosition ).x;
  203. float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
  204. return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
  205. #else
  206. return texture2D( tDepth, screenPosition ).x;
  207. #endif
  208. }
  209. void main() {
  210. float depth = getLinearDepth( vUv );
  211. gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
  212. }`
  213. };
  214. function generateHaboSampleKernelInitializer( samples ) {
  215. const poissonDisk = generateHaboSamples( samples );
  216. let glslCode = 'vec4[SAMPLES](';
  217. for ( let i = 0; i < samples; i ++ ) {
  218. const sample = poissonDisk[ i ];
  219. glslCode += `vec4(${sample.x}, ${sample.y}, ${sample.z}, ${sample.w})`;
  220. if ( i < samples - 1 ) {
  221. glslCode += ',';
  222. }
  223. }
  224. glslCode += ')';
  225. return glslCode;
  226. }
  227. function generateHaboSamples( samples ) {
  228. const kernel = [];
  229. for ( let kernelIndex = 0; kernelIndex < samples; kernelIndex ++ ) {
  230. const spiralAngle = kernelIndex * Math.PI * ( 3 - Math.sqrt( 5 ) );
  231. const z = Math.sqrt( 0.99 - ( kernelIndex / ( samples - 1 ) ) * 0.98 );
  232. const radius = Math.sqrt( 1 - z * z );
  233. const x = Math.cos( spiralAngle ) * radius;
  234. const y = Math.sin( spiralAngle ) * radius;
  235. const scaleStep = 8;
  236. const scaleRange = Math.floor( samples / scaleStep );
  237. const scaleIndex =
  238. Math.floor( kernelIndex / scaleStep ) +
  239. ( kernelIndex % scaleStep ) * scaleRange;
  240. let scale = 1 - scaleIndex / samples;
  241. scale = 0.1 + 0.9 * scale;
  242. kernel.push( new Vector4( x, y, z, scale ) );
  243. }
  244. return kernel;
  245. }
  246. export { generateHaboSampleKernelInitializer, HBAOShader, HBAODepthShader };