PMREMGenerator.js 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
  1. /**
  2. * @author Emmett Lalish / elalish
  3. *
  4. * This class generates a Prefiltered, Mipmapped Radiance Environment Map
  5. * (PMREM) from a cubeMap environment texture. This allows different levels of
  6. * blur to be quickly accessed based on material roughness. It is packed into a
  7. * special CubeUV format that allows us to perform custom interpolation so that
  8. * we can support nonlinear formats such as RGBE. Unlike a traditional mipmap
  9. * chain, it only goes down to the LOD_MIN level (above), and then creates extra
  10. * even more filtered 'mips' at the same LOD_MIN resolution, associated with
  11. * higher roughness levels. In this way we maintain resolution to smoothly
  12. * interpolate diffuse lighting while limiting sampling computation.
  13. */
  14. THREE.PMREMGenerator = ( function () {
  15. const LOD_MIN = 4;
  16. const LOD_MAX = 8;
  17. const SIZE_MAX = Math.pow( 2, LOD_MAX );
  18. // The standard deviations (radians) associated with the extra mips. These are
  19. // chosen to approximate a Trowbridge-Reitz distribution function times the
  20. // geometric shadowing function.
  21. const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ];
  22. const TOTAL_LODS = LOD_MAX - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length;
  23. const ENCODINGS = {
  24. [ THREE.LinearEncoding ]: 0,
  25. [ THREE.sRGBEncoding ]: 1,
  26. [ THREE.RGBEEncoding ]: 2,
  27. [ THREE.RGBM7Encoding ]: 3,
  28. [ THREE.RGBM16Encoding ]: 4,
  29. [ THREE.RGBDEncoding ]: 5,
  30. [ THREE.GammaEncoding ]: 6
  31. };
  32. var _flatCamera = new THREE.OrthographicCamera();
  33. var _blurMaterial = getShader();
  34. var { _lodPlanes, _sizeLods, _sigmas } = createPlanes();
  35. var _pingPongRenderTarget = null;
  36. // Golden Ratio
  37. const PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
  38. const INV_PHI = 1 / PHI;
  39. // Vertices of a dodecahedron (except the opposites, which represent the
  40. // same axis), used as axis directions evenly spread on a sphere.
  41. var _axisDirections = [
  42. new THREE.Vector3( 1, 1, 1 ),
  43. new THREE.Vector3( - 1, 1, 1 ),
  44. new THREE.Vector3( 1, 1, - 1 ),
  45. new THREE.Vector3( - 1, 1, - 1 ),
  46. new THREE.Vector3( 0, PHI, - INV_PHI ),
  47. new THREE.Vector3( INV_PHI, 0, PHI ),
  48. new THREE.Vector3( - INV_PHI, 0, PHI ),
  49. new THREE.Vector3( PHI, INV_PHI, 0 ),
  50. new THREE.Vector3( - PHI, INV_PHI, 0 ) ];
  51. var PMREMGenerator = function ( renderer ) {
  52. this.renderer = renderer;
  53. };
  54. PMREMGenerator.prototype = {
  55. constructor: PMREMGenerator,
  56. /**
  57. * Generates a PMREM from a supplied Scene, which can be faster than using an
  58. * image if networking bandwidth is low. Optional near and far planes ensure
  59. * the scene is rendered in its entirety (the cubeCamera is placed at the
  60. * origin).
  61. */
  62. fromScene: function ( scene, near = 0.1, far = 100 ) {
  63. const dpr = this.renderer.getPixelRatio();
  64. this.renderer.setPixelRatio( 1 );
  65. const cubeUVRenderTarget = allocateTargets();
  66. sceneToCubeUV( scene, near, far, cubeUVRenderTarget );
  67. applyPMREM( cubeUVRenderTarget );
  68. _pingPongRenderTarget.dispose();
  69. this.renderer.setPixelRatio( dpr );
  70. return cubeUVRenderTarget;
  71. },
  72. /**
  73. * Generates a PMREM from an equirectangular texture, which can be either LDR
  74. * (RGBFormat) or HDR (RGBEFormat).
  75. */
  76. fromEquirectangular: function ( equirectangular ) {
  77. const dpr = this.renderer.getPixelRatio();
  78. this.renderer.setPixelRatio( 1 );
  79. equirectangular.magFilter = THREE.NearestFilter;
  80. equirectangular.minFilter = THREE.NearestFilter;
  81. equirectangular.generateMipmaps = false;
  82. const cubeUVRenderTarget = allocateTargets( equirectangular );
  83. equirectangularToCubeUV( equirectangular, cubeUVRenderTarget );
  84. applyPMREM( cubeUVRenderTarget );
  85. _pingPongRenderTarget.dispose();
  86. this.renderer.setPixelRatio( dpr );
  87. return cubeUVRenderTarget;
  88. },
  89. };
  90. function createPlanes() {
  91. var _lodPlanes = [];
  92. var _sizeLods = [];
  93. var _sigmas = [];
  94. let lod = LOD_MAX;
  95. for ( let i = 0; i < TOTAL_LODS; i ++ ) {
  96. const sizeLod = Math.pow( 2, lod );
  97. _sizeLods.push( sizeLod );
  98. let sigma = 1.0 / sizeLod;
  99. if ( i > LOD_MAX - LOD_MIN ) {
  100. sigma = EXTRA_LOD_SIGMA[ i - LOD_MAX + LOD_MIN - 1 ];
  101. } else if ( i == 0 ) {
  102. sigma = 0;
  103. }
  104. _sigmas.push( sigma );
  105. const texelSize = 1.0 / ( sizeLod - 1 );
  106. const min = - texelSize / 2;
  107. const max = 1 + texelSize / 2;
  108. const uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ];
  109. const cubeFaces = 6;
  110. const vertices = 6;
  111. const positionSize = 3;
  112. const uvSize = 2;
  113. const faceIndexSize = 1;
  114. const position = new Float32Array( positionSize * vertices * cubeFaces );
  115. const uv = new Float32Array( uvSize * vertices * cubeFaces );
  116. const faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces );
  117. for ( let face = 0; face < cubeFaces; face ++ ) {
  118. const x = ( face % 3 ) * 2 / 3 - 1;
  119. const y = face > 2 ? 0 : - 1;
  120. const coordinates = [
  121. [ x, y, 0 ],
  122. [ x + 2 / 3, y, 0 ],
  123. [ x + 2 / 3, y + 1, 0 ],
  124. [ x, y, 0 ],
  125. [ x + 2 / 3, y + 1, 0 ],
  126. [ x, y + 1, 0 ]
  127. ];
  128. position.set( Array.concat( ...coordinates ),
  129. positionSize * vertices * face );
  130. uv.set( uv1, uvSize * vertices * face );
  131. const fill = [ face, face, face, face, face, face ];
  132. faceIndex.set( fill, faceIndexSize * vertices * face );
  133. }
  134. const planes = new THREE.BufferGeometry();
  135. planes.addAttribute(
  136. 'position', new THREE.BufferAttribute( position, positionSize ) );
  137. planes.addAttribute( 'uv', new THREE.BufferAttribute( uv, uvSize ) );
  138. planes.addAttribute(
  139. 'faceIndex', new THREE.BufferAttribute( faceIndex, faceIndexSize ) );
  140. _lodPlanes.push( planes );
  141. if ( lod > LOD_MIN ) {
  142. lod --;
  143. }
  144. }
  145. return { _lodPlanes, _sizeLods, _sigmas };
  146. }
  147. function allocateTargets( equirectangular ) {
  148. const params = {
  149. magFilter: THREE.NearestFilter,
  150. minFilter: THREE.NearestFilter,
  151. generateMipmaps: false,
  152. type: equirectangular ? equirectangular.type : THREE.UnsignedByteType,
  153. format: equirectangular ? equirectangular.format : THREE.RGBEFormat,
  154. encoding: equirectangular ? equirectangular.encoding : THREE.RGBEEncoding,
  155. depthBuffer: false,
  156. stencilBuffer: false
  157. };
  158. const cubeUVRenderTarget = createRenderTarget(
  159. { ...params, depthBuffer: ( equirectangular ? false : true ) } );
  160. _pingPongRenderTarget = createRenderTarget( params );
  161. return cubeUVRenderTarget;
  162. }
  163. function sceneToCubeUV(
  164. scene, near, far,
  165. cubeUVRenderTarget ) {
  166. const fov = 90;
  167. const aspect = 1;
  168. const cubeCamera = new THREE.PerspectiveCamera( fov, aspect, near, far );
  169. const upSign = [ 1, 1, 1, 1, - 1, 1 ];
  170. const forwardSign = [ 1, 1, - 1, - 1, - 1, 1 ];
  171. const gammaOutput = this.renderer.gammaOutput;
  172. const toneMapping = this.renderer.toneMapping;
  173. const toneMappingExposure = this.renderer.toneMappingExposure;
  174. this.renderer.toneMapping = THREE.LinearToneMapping;
  175. this.renderer.toneMappingExposure = 1.0;
  176. this.renderer.gammaOutput = false;
  177. scene.scale.z *= - 1;
  178. this.renderer.setRenderTarget( cubeUVRenderTarget );
  179. for ( let i = 0; i < 6; i ++ ) {
  180. const col = i % 3;
  181. if ( col == 0 ) {
  182. cubeCamera.up.set( 0, upSign[ i ], 0 );
  183. cubeCamera.lookAt( forwardSign[ i ], 0, 0 );
  184. } else if ( col == 1 ) {
  185. cubeCamera.up.set( 0, 0, upSign[ i ] );
  186. cubeCamera.lookAt( 0, forwardSign[ i ], 0 );
  187. } else {
  188. cubeCamera.up.set( 0, upSign[ i ], 0 );
  189. cubeCamera.lookAt( 0, 0, forwardSign[ i ] );
  190. }
  191. this.renderer.setViewport(
  192. col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
  193. this.renderer.render( scene, cubeCamera );
  194. }
  195. this.renderer.toneMapping = toneMapping;
  196. this.renderer.toneMappingExposure = toneMappingExposure;
  197. this.renderer.gammaOutput = gammaOutput;
  198. scene.scale.z *= - 1;
  199. }
  200. function equirectangularToCubeUV(
  201. equirectangular, cubeUVRenderTarget ) {
  202. const scene = new THREE.Scene();
  203. scene.add( new THREE.Mesh( _lodPlanes[ 0 ], _blurMaterial ) );
  204. const uniforms = _blurMaterial.uniforms;
  205. uniforms[ 'envMap' ].value = equirectangular;
  206. uniforms[ 'copyEquirectangular' ].value = true;
  207. uniforms[ 'texelSize' ].value.set(
  208. 1.0 / equirectangular.image.width, 1.0 / equirectangular.image.height );
  209. uniforms[ 'inputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  210. uniforms[ 'outputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  211. this.renderer.setRenderTarget( cubeUVRenderTarget );
  212. this.renderer.setViewport( 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
  213. this.renderer.render( scene, _flatCamera );
  214. }
  215. function createRenderTarget( params ) {
  216. const cubeUVRenderTarget =
  217. new THREE.WebGLRenderTarget( 3 * SIZE_MAX, 3 * SIZE_MAX, params );
  218. cubeUVRenderTarget.texture.mapping = THREE.CubeUVReflectionMapping;
  219. cubeUVRenderTarget.texture.name = 'PMREM.cubeUv';
  220. return cubeUVRenderTarget;
  221. }
  222. function applyPMREM( cubeUVRenderTarget ) {
  223. for ( let i = 1; i < TOTAL_LODS; i ++ ) {
  224. const sigma = Math.sqrt(
  225. _sigmas[ i ] * _sigmas[ i ] -
  226. _sigmas[ i - 1 ] * _sigmas[ i - 1 ] );
  227. const poleAxis =
  228. _axisDirections[ ( i - 1 ) % _axisDirections.length ];
  229. blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis );
  230. }
  231. }
  232. /**
  233. * This is a two-pass Gaussian blur for a cubemap. Normally this is done
  234. * vertically and horizontally, but this breaks down on a cube. Here we apply
  235. * the blur latitudinally (around the poles), and then longitudinally (towards
  236. * the poles) to approximate the orthogonally-separable blur. It is least
  237. * accurate at the poles, but still does a decent job.
  238. */
  239. function blur(
  240. cubeUVRenderTarget, lodIn, lodOut,
  241. sigma, poleAxis ) {
  242. halfBlur(
  243. cubeUVRenderTarget,
  244. _pingPongRenderTarget,
  245. lodIn,
  246. lodOut,
  247. sigma,
  248. 'latitudinal',
  249. poleAxis );
  250. halfBlur(
  251. _pingPongRenderTarget,
  252. cubeUVRenderTarget,
  253. lodOut,
  254. lodOut,
  255. sigma,
  256. 'longitudinal',
  257. poleAxis );
  258. }
  259. function halfBlur(
  260. targetIn, targetOut, lodIn,
  261. lodOut, sigmaRadians, direction,
  262. poleAxis ) {
  263. if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) {
  264. console.error(
  265. 'blur direction must be either latitudinal or longitudinal!' );
  266. }
  267. // The maximum length of the blur for loop, chosen to equal the number needed
  268. // for GENERATED_SIGMA. Smaller _sigmas will use fewer samples and exit early,
  269. // but not recompile the shader.
  270. const MAX_SAMPLES = 20;
  271. // Number of standard deviations at which to cut off the discrete approximation.
  272. const STANDARD_DEVIATIONS = 3;
  273. const blurScene = new THREE.Scene();
  274. blurScene.add( new THREE.Mesh( _lodPlanes[ lodOut ], _blurMaterial ) );
  275. const blurUniforms = _blurMaterial.uniforms;
  276. const pixels = _sizeLods[ lodIn ] - 1;
  277. const radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 );
  278. const sigmaPixels = sigmaRadians / radiansPerPixel;
  279. const samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES;
  280. if ( samples > MAX_SAMPLES ) {
  281. console.warn( `sigmaRadians, ${
  282. sigmaRadians}, is too large and will clip, as it requested ${
  283. samples} samples when the maximum is set to ${MAX_SAMPLES}` );
  284. }
  285. let weights = [];
  286. let sum = 0;
  287. for ( let i = 0; i < MAX_SAMPLES; ++ i ) {
  288. const x = i / sigmaPixels;
  289. const weight = Math.exp( - x * x / 2 );
  290. weights.push( weight );
  291. if ( i == 0 ) {
  292. sum += weight;
  293. } else if ( i < samples ) {
  294. sum += 2 * weight;
  295. }
  296. }
  297. weights = weights.map( w => w / sum );
  298. blurUniforms[ 'envMap' ].value = targetIn.texture;
  299. blurUniforms[ 'copyEquirectangular' ].value = false;
  300. blurUniforms[ 'samples' ].value = samples;
  301. blurUniforms[ 'weights' ].value = weights;
  302. blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal';
  303. if ( poleAxis ) {
  304. blurUniforms[ 'poleAxis' ].value = poleAxis;
  305. }
  306. blurUniforms[ 'dTheta' ].value = radiansPerPixel;
  307. blurUniforms[ 'mipInt' ].value = LOD_MAX - lodIn;
  308. blurUniforms[ 'inputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  309. blurUniforms[ 'outputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  310. const outputSize = _sizeLods[ lodOut ];
  311. const x = 3 * Math.max( 0, SIZE_MAX - 2 * outputSize );
  312. const y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) +
  313. 2 * outputSize *
  314. ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
  315. this.renderer.autoClear = false;
  316. this.renderer.setRenderTarget( targetOut );
  317. this.renderer.setViewport( x, y, 3 * outputSize, 2 * outputSize );
  318. this.renderer.render( blurScene, _flatCamera );
  319. }
  320. function getShader( maxSamples ) {
  321. const weights = new Float32Array( maxSamples );
  322. const texelSize = new THREE.Vector2( 1, 1 );
  323. const poleAxis = new THREE.Vector3( 0, 1, 0 );
  324. var shaderMaterial = new THREE.RawShaderMaterial( {
  325. defines: { 'n': maxSamples },
  326. uniforms: {
  327. 'envMap': { value: null },
  328. 'copyEquirectangular': { value: false },
  329. 'texelSize': { value: texelSize },
  330. 'samples': { value: 1 },
  331. 'weights': { value: weights },
  332. 'latitudinal': { value: false },
  333. 'dTheta': { value: 0 },
  334. 'mipInt': { value: 0 },
  335. 'poleAxis': { value: poleAxis },
  336. 'inputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] },
  337. 'outputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] }
  338. },
  339. vertexShader: `
  340. precision mediump float;
  341. precision mediump int;
  342. attribute vec3 position;
  343. attribute vec2 uv;
  344. attribute float faceIndex;
  345. varying vec2 vUv;
  346. varying float vFaceIndex;
  347. void main() {
  348. vUv = uv;
  349. vFaceIndex = faceIndex;
  350. gl_Position = vec4( position, 1.0 );
  351. }
  352. `,
  353. fragmentShader: `
  354. precision mediump float;
  355. precision mediump int;
  356. varying vec2 vUv;
  357. varying float vFaceIndex;
  358. uniform sampler2D envMap;
  359. uniform bool copyEquirectangular;
  360. uniform vec2 texelSize;
  361. uniform int samples;
  362. uniform float weights[n];
  363. uniform bool latitudinal;
  364. uniform float dTheta;
  365. uniform float mipInt;
  366. uniform vec3 poleAxis;
  367. uniform int inputEncoding;
  368. uniform int outputEncoding;
  369. #include <encodings_pars_fragment>
  370. vec4 inputTexelToLinear(vec4 value){
  371. if(inputEncoding == 0){
  372. return value;
  373. }else if(inputEncoding == 1){
  374. return sRGBToLinear(value);
  375. }else if(inputEncoding == 2){
  376. return RGBEToLinear(value);
  377. }else if(inputEncoding == 3){
  378. return RGBMToLinear(value, 7.0);
  379. }else if(inputEncoding == 4){
  380. return RGBMToLinear(value, 16.0);
  381. }else if(inputEncoding == 5){
  382. return RGBDToLinear(value, 256.0);
  383. }else{
  384. return GammaToLinear(value, 2.2);
  385. }
  386. }
  387. vec4 linearToOutputTexel(vec4 value){
  388. if(outputEncoding == 0){
  389. return value;
  390. }else if(outputEncoding == 1){
  391. return LinearTosRGB(value);
  392. }else if(outputEncoding == 2){
  393. return LinearToRGBE(value);
  394. }else if(outputEncoding == 3){
  395. return LinearToRGBM(value, 7.0);
  396. }else if(outputEncoding == 4){
  397. return LinearToRGBM(value, 16.0);
  398. }else if(outputEncoding == 5){
  399. return LinearToRGBD(value, 256.0);
  400. }else{
  401. return LinearToGamma(value, 2.2);
  402. }
  403. }
  404. vec4 envMapTexelToLinear(vec4 color) {
  405. return inputTexelToLinear(color);
  406. }
  407. #define ENVMAP_TYPE_CUBE_UV
  408. #include <cube_uv_reflection_fragment>
  409. #define RECIPROCAL_PI 0.31830988618
  410. #define RECIPROCAL_PI2 0.15915494
  411. void main() {
  412. gl_FragColor = vec4(0.0);
  413. outputDirection = getDirection(vUv, vFaceIndex);
  414. if (copyEquirectangular) {
  415. vec3 direction = normalize(outputDirection);
  416. vec2 uv;
  417. uv.y = asin(clamp(direction.y, -1.0, 1.0)) * RECIPROCAL_PI + 0.5;
  418. uv.x = atan(direction.z, direction.x) * RECIPROCAL_PI2 + 0.5;
  419. vec2 f = fract(uv / texelSize - 0.5);
  420. uv -= f * texelSize;
  421. vec3 tl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  422. uv.x += texelSize.x;
  423. vec3 tr = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  424. uv.y += texelSize.y;
  425. vec3 br = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  426. uv.x -= texelSize.x;
  427. vec3 bl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  428. vec3 tm = mix(tl, tr, f.x);
  429. vec3 bm = mix(bl, br, f.x);
  430. gl_FragColor.rgb = mix(tm, bm, f.y);
  431. } else {
  432. for (int i = 0; i < n; i++) {
  433. if (i >= samples)
  434. break;
  435. for (int dir = -1; dir < 2; dir += 2) {
  436. if (i == 0 && dir == 1)
  437. continue;
  438. vec3 axis = latitudinal ? poleAxis : cross(poleAxis, outputDirection);
  439. if (all(equal(axis, vec3(0.0))))
  440. axis = cross(vec3(0.0, 1.0, 0.0), outputDirection);
  441. axis = normalize(axis);
  442. float theta = dTheta * float(dir * i);
  443. float cosTheta = cos(theta);
  444. // Rodrigues' axis-angle rotation
  445. vec3 sampleDirection = outputDirection * cosTheta
  446. + cross(axis, outputDirection) * sin(theta)
  447. + axis * dot(axis, outputDirection) * (1.0 - cosTheta);
  448. gl_FragColor.rgb +=
  449. weights[i] * bilinearCubeUV(envMap, sampleDirection, mipInt);
  450. }
  451. }
  452. }
  453. gl_FragColor = linearToOutputTexel(gl_FragColor);
  454. }
  455. `,
  456. blending: THREE.NoBlending,
  457. depthTest: false,
  458. depthWrite: false
  459. } );
  460. shaderMaterial.type = 'SphericalGaussianBlur';
  461. return shaderMaterial;
  462. }
  463. return PMREMGenerator;
  464. } )();