GTAOShader.js 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424
  1. import {
  2. DataTexture,
  3. Matrix4,
  4. RepeatWrapping,
  5. Vector2,
  6. Vector3,
  7. } from 'three';
  8. /**
  9. * References:
  10. * - implemented algorithm - GTAO
  11. * - https://iryoku.com/downloads/Practical-Realtime-Strategies-for-Accurate-Indirect-Occlusion.pdf
  12. * - https://github.com/Patapom/GodComplex/blob/master/Tests/TestHBIL/2018%20Mayaux%20-%20Horizon-Based%20Indirect%20Lighting%20(HBIL).pdf
  13. *
  14. * - other AO algorithms that are not implemented here:
  15. * - Screen Space Ambient Occlusion (SSAO), see also SSAOShader.js
  16. * - http://john-chapman-graphics.blogspot.com/2013/01/ssao-tutorial.html
  17. * - https://learnopengl.com/Advanced-Lighting/SSAO
  18. * - https://creativecoding.soe.ucsc.edu/courses/cmpm164/_schedule/AmbientOcclusion.pdf
  19. * - https://drive.google.com/file/d/1SyagcEVplIm2KkRD3WQYSO9O0Iyi1hfy/edit
  20. * - Scalable Ambient Occlusion (SAO), see also SAOShader.js
  21. * - https://casual-effects.com/research/McGuire2012SAO/index.html
  22. * - https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf
  23. * - N8HO
  24. * - https://github.com/N8python/n8ao
  25. * - Horizon Based Ambient Occlusion (HBAO)
  26. * - http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.577.2286&rep=rep1&type=pdf
  27. * - https://www.derschmale.com/2013/12/20/an-alternative-implementation-for-hbao-2/
  28. *
  29. * - further reading
  30. * - https://ceur-ws.org/Vol-3027/paper5.pdf
  31. * - https://www.comp.nus.edu.sg/~lowkl/publications/mssao_visual_computer_2012.pdf
  32. * - https://web.ics.purdue.edu/~tmcgraw/papers/mcgraw-ao-2008.pdf
  33. * - https://www.activision.com/cdn/research/Practical_Real_Time_Strategies_for_Accurate_Indirect_Occlusion_NEW%20VERSION_COLOR.pdf
  34. * - https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.390.2463&rep=rep1&type=pdf
  35. * - https://www.intel.com/content/www/us/en/developer/articles/technical/adaptive-screen-space-ambient-occlusion.html
  36. */
  37. const GTAOShader = {
  38. name: 'GTAOShader',
  39. defines: {
  40. PERSPECTIVE_CAMERA: 1,
  41. SAMPLES: 16,
  42. NORMAL_VECTOR_TYPE: 1,
  43. DEPTH_SWIZZLING: 'x',
  44. SCREEN_SPACE_RADIUS: 0,
  45. SCREEN_SPACE_RADIUS_SCALE: 100.0,
  46. SCENE_CLIP_BOX: 0,
  47. },
  48. uniforms: {
  49. tNormal: { value: null },
  50. tDepth: { value: null },
  51. tNoise: { value: null },
  52. resolution: { value: new Vector2() },
  53. cameraNear: { value: null },
  54. cameraFar: { value: null },
  55. cameraProjectionMatrix: { value: new Matrix4() },
  56. cameraProjectionMatrixInverse: { value: new Matrix4() },
  57. cameraWorldMatrix: { value: new Matrix4() },
  58. radius: { value: 0.25 },
  59. distanceExponent: { value: 1. },
  60. thickness: { value: 1. },
  61. distanceFallOff: { value: 1. },
  62. scale: { value: 1. },
  63. sceneBoxMin: { value: new Vector3( - 1, - 1, - 1 ) },
  64. sceneBoxMax: { value: new Vector3( 1, 1, 1 ) },
  65. },
  66. vertexShader: /* glsl */`
  67. varying vec2 vUv;
  68. void main() {
  69. vUv = uv;
  70. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  71. }`,
  72. fragmentShader: /* glsl */`
  73. varying vec2 vUv;
  74. uniform highp sampler2D tNormal;
  75. uniform highp sampler2D tDepth;
  76. uniform sampler2D tNoise;
  77. uniform vec2 resolution;
  78. uniform float cameraNear;
  79. uniform float cameraFar;
  80. uniform mat4 cameraProjectionMatrix;
  81. uniform mat4 cameraProjectionMatrixInverse;
  82. uniform mat4 cameraWorldMatrix;
  83. uniform float radius;
  84. uniform float distanceExponent;
  85. uniform float thickness;
  86. uniform float distanceFallOff;
  87. uniform float scale;
  88. #if SCENE_CLIP_BOX == 1
  89. uniform vec3 sceneBoxMin;
  90. uniform vec3 sceneBoxMax;
  91. #endif
  92. #include <common>
  93. #include <packing>
  94. #ifndef FRAGMENT_OUTPUT
  95. #define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
  96. #endif
  97. vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
  98. vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
  99. vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
  100. return viewSpacePosition.xyz / viewSpacePosition.w;
  101. }
  102. float getDepth(const vec2 uv) {
  103. return textureLod(tDepth, uv.xy, 0.0).DEPTH_SWIZZLING;
  104. }
  105. float fetchDepth(const ivec2 uv) {
  106. return texelFetch(tDepth, uv.xy, 0).DEPTH_SWIZZLING;
  107. }
  108. float getViewZ(const in float depth) {
  109. #if PERSPECTIVE_CAMERA == 1
  110. return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
  111. #else
  112. return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
  113. #endif
  114. }
  115. vec3 computeNormalFromDepth(const vec2 uv) {
  116. vec2 size = vec2(textureSize(tDepth, 0));
  117. ivec2 p = ivec2(uv * size);
  118. float c0 = fetchDepth(p);
  119. float l2 = fetchDepth(p - ivec2(2, 0));
  120. float l1 = fetchDepth(p - ivec2(1, 0));
  121. float r1 = fetchDepth(p + ivec2(1, 0));
  122. float r2 = fetchDepth(p + ivec2(2, 0));
  123. float b2 = fetchDepth(p - ivec2(0, 2));
  124. float b1 = fetchDepth(p - ivec2(0, 1));
  125. float t1 = fetchDepth(p + ivec2(0, 1));
  126. float t2 = fetchDepth(p + ivec2(0, 2));
  127. float dl = abs((2.0 * l1 - l2) - c0);
  128. float dr = abs((2.0 * r1 - r2) - c0);
  129. float db = abs((2.0 * b1 - b2) - c0);
  130. float dt = abs((2.0 * t1 - t2) - c0);
  131. vec3 ce = getViewPosition(uv, c0).xyz;
  132. vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
  133. vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
  134. return normalize(cross(dpdx, dpdy));
  135. }
  136. vec3 getViewNormal(const vec2 uv) {
  137. #if NORMAL_VECTOR_TYPE == 2
  138. return normalize(textureLod(tNormal, uv, 0.).rgb);
  139. #elif NORMAL_VECTOR_TYPE == 1
  140. return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
  141. #else
  142. return computeNormalFromDepth(uv);
  143. #endif
  144. }
  145. vec3 getSceneUvAndDepth(vec3 sampleViewPos) {
  146. vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
  147. vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
  148. float sampleSceneDepth = getDepth(sampleUv);
  149. return vec3(sampleUv, sampleSceneDepth);
  150. }
  151. void main() {
  152. float depth = getDepth(vUv.xy);
  153. if (depth >= 1.0) {
  154. discard;
  155. return;
  156. }
  157. vec3 viewPos = getViewPosition(vUv, depth);
  158. vec3 viewNormal = getViewNormal(vUv);
  159. float radiusToUse = radius;
  160. float distanceFalloffToUse = thickness;
  161. #if SCREEN_SPACE_RADIUS == 1
  162. float radiusScale = getViewPosition(vec2(0.5 + float(SCREEN_SPACE_RADIUS_SCALE) / resolution.x, 0.0), depth).x;
  163. radiusToUse *= radiusScale;
  164. distanceFalloffToUse *= radiusScale;
  165. #endif
  166. #if SCENE_CLIP_BOX == 1
  167. vec3 worldPos = (cameraWorldMatrix * vec4(viewPos, 1.0)).xyz;
  168. float boxDistance = length(max(vec3(0.0), max(sceneBoxMin - worldPos, worldPos - sceneBoxMax)));
  169. if (boxDistance > radiusToUse) {
  170. discard;
  171. return;
  172. }
  173. #endif
  174. vec2 noiseResolution = vec2(textureSize(tNoise, 0));
  175. vec2 noiseUv = vUv * resolution / noiseResolution;
  176. vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
  177. vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
  178. vec3 tangent = normalize(vec3(randomVec.xy, 0.));
  179. vec3 bitangent = vec3(-tangent.y, tangent.x, 0.);
  180. mat3 kernelMatrix = mat3(tangent, bitangent, vec3(0., 0., 1.));
  181. const int DIRECTIONS = SAMPLES < 30 ? 3 : 5;
  182. const int STEPS = (SAMPLES + DIRECTIONS - 1) / DIRECTIONS;
  183. float ao = 0.0, totalWeight = 0.0;
  184. for (int i = 0; i < DIRECTIONS; ++i) {
  185. float angle = float(i) / float(DIRECTIONS) * PI;
  186. vec4 sampleDir = vec4(cos(angle), sin(angle), 0., 0.5 + 0.5 * noiseTexel.w);
  187. sampleDir.xyz = normalize(kernelMatrix * sampleDir.xyz);
  188. vec3 viewDir = normalize(-viewPos.xyz);
  189. vec3 sliceBitangent = normalize(cross(sampleDir.xyz, viewDir));
  190. vec3 sliceTangent = cross(sliceBitangent, viewDir);
  191. vec3 normalInSlice = normalize(viewNormal - sliceBitangent * dot(viewNormal, sliceBitangent));
  192. vec3 tangentToNormalInSlice = cross(normalInSlice, sliceBitangent);
  193. vec2 cosHorizons = vec2(dot(viewDir, tangentToNormalInSlice), dot(viewDir, -tangentToNormalInSlice));
  194. for (int j = 0; j < STEPS; ++j) {
  195. vec3 sampleViewOffset = sampleDir.xyz * radiusToUse * sampleDir.w * pow(float(j + 1) / float(STEPS), distanceExponent);
  196. vec3 sampleSceneUvDepth = getSceneUvAndDepth(viewPos + sampleViewOffset);
  197. vec3 sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
  198. vec3 viewDelta = sampleSceneViewPos - viewPos;
  199. if (abs(viewDelta.z) < thickness) {
  200. float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
  201. cosHorizons.x += max(0., (sampleCosHorizon - cosHorizons.x) * mix(1., 2. / float(j + 2), distanceFallOff));
  202. }
  203. sampleSceneUvDepth = getSceneUvAndDepth(viewPos - sampleViewOffset);
  204. sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
  205. viewDelta = sampleSceneViewPos - viewPos;
  206. if (abs(viewDelta.z) < thickness) {
  207. float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
  208. cosHorizons.y += max(0., (sampleCosHorizon - cosHorizons.y) * mix(1., 2. / float(j + 2), distanceFallOff));
  209. }
  210. }
  211. vec2 sinHorizons = sqrt(1. - cosHorizons * cosHorizons);
  212. float nx = dot(normalInSlice, sliceTangent);
  213. float ny = dot(normalInSlice, viewDir);
  214. float nxb = 1. / 2. * (acos(cosHorizons.y) - acos(cosHorizons.x) + sinHorizons.x * cosHorizons.x - sinHorizons.y * cosHorizons.y);
  215. float nyb = 1. / 2. * (2. - cosHorizons.x * cosHorizons.x - cosHorizons.y * cosHorizons.y);
  216. float occlusion = nx * nxb + ny * nyb;
  217. ao += occlusion;
  218. }
  219. ao = clamp(ao / float(DIRECTIONS), 0., 1.);
  220. #if SCENE_CLIP_BOX == 1
  221. ao = mix(ao, 1., smoothstep(0., radiusToUse, boxDistance));
  222. #endif
  223. ao = pow(ao, scale);
  224. gl_FragColor = FRAGMENT_OUTPUT;
  225. }`
  226. };
  227. const GTAODepthShader = {
  228. name: 'GTAODepthShader',
  229. defines: {
  230. PERSPECTIVE_CAMERA: 1
  231. },
  232. uniforms: {
  233. tDepth: { value: null },
  234. cameraNear: { value: null },
  235. cameraFar: { value: null },
  236. },
  237. vertexShader: /* glsl */`
  238. varying vec2 vUv;
  239. void main() {
  240. vUv = uv;
  241. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  242. }`,
  243. fragmentShader: /* glsl */`
  244. uniform sampler2D tDepth;
  245. uniform float cameraNear;
  246. uniform float cameraFar;
  247. varying vec2 vUv;
  248. #include <packing>
  249. float getLinearDepth( const in vec2 screenPosition ) {
  250. #if PERSPECTIVE_CAMERA == 1
  251. float fragCoordZ = texture2D( tDepth, screenPosition ).x;
  252. float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
  253. return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
  254. #else
  255. return texture2D( tDepth, screenPosition ).x;
  256. #endif
  257. }
  258. void main() {
  259. float depth = getLinearDepth( vUv );
  260. gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
  261. }`
  262. };
  263. const GTAOBlendShader = {
  264. name: 'GTAOBlendShader',
  265. uniforms: {
  266. tDiffuse: { value: null },
  267. intensity: { value: 1.0 }
  268. },
  269. vertexShader: /* glsl */`
  270. varying vec2 vUv;
  271. void main() {
  272. vUv = uv;
  273. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  274. }`,
  275. fragmentShader: /* glsl */`
  276. uniform float intensity;
  277. uniform sampler2D tDiffuse;
  278. varying vec2 vUv;
  279. void main() {
  280. vec4 texel = texture2D( tDiffuse, vUv );
  281. gl_FragColor = vec4(mix(vec3(1.), texel.rgb, intensity), texel.a);
  282. }`
  283. };
  284. function generateMagicSquareNoise( size = 5 ) {
  285. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  286. const magicSquare = generateMagicSquare( noiseSize );
  287. const noiseSquareSize = magicSquare.length;
  288. const data = new Uint8Array( noiseSquareSize * 4 );
  289. for ( let inx = 0; inx < noiseSquareSize; ++ inx ) {
  290. const iAng = magicSquare[ inx ];
  291. const angle = ( 2 * Math.PI * iAng ) / noiseSquareSize;
  292. const randomVec = new Vector3(
  293. Math.cos( angle ),
  294. Math.sin( angle ),
  295. 0
  296. ).normalize();
  297. data[ inx * 4 ] = ( randomVec.x * 0.5 + 0.5 ) * 255;
  298. data[ inx * 4 + 1 ] = ( randomVec.y * 0.5 + 0.5 ) * 255;
  299. data[ inx * 4 + 2 ] = 127;
  300. data[ inx * 4 + 3 ] = 255;
  301. }
  302. const noiseTexture = new DataTexture( data, noiseSize, noiseSize );
  303. noiseTexture.wrapS = RepeatWrapping;
  304. noiseTexture.wrapT = RepeatWrapping;
  305. noiseTexture.needsUpdate = true;
  306. return noiseTexture;
  307. }
  308. function generateMagicSquare( size ) {
  309. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  310. const noiseSquareSize = noiseSize * noiseSize;
  311. const magicSquare = Array( noiseSquareSize ).fill( 0 );
  312. let i = Math.floor( noiseSize / 2 );
  313. let j = noiseSize - 1;
  314. for ( let num = 1; num <= noiseSquareSize; ) {
  315. if ( i === - 1 && j === noiseSize ) {
  316. j = noiseSize - 2;
  317. i = 0;
  318. } else {
  319. if ( j === noiseSize ) {
  320. j = 0;
  321. }
  322. if ( i < 0 ) {
  323. i = noiseSize - 1;
  324. }
  325. }
  326. if ( magicSquare[ i * noiseSize + j ] !== 0 ) {
  327. j -= 2;
  328. i ++;
  329. continue;
  330. } else {
  331. magicSquare[ i * noiseSize + j ] = num ++;
  332. }
  333. j ++;
  334. i --;
  335. }
  336. return magicSquare;
  337. }
  338. export { generateMagicSquareNoise, GTAOShader, GTAODepthShader, GTAOBlendShader };