PMREMGenerator.js 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. /**
  2. * @author Emmett Lalish / elalish
  3. *
  4. * This class generates a Prefiltered, Mipmapped Radiance Environment Map
  5. * (PMREM) from a cubeMap environment texture. This allows different levels of
  6. * blur to be quickly accessed based on material roughness. It is packed into a
  7. * special CubeUV format that allows us to perform custom interpolation so that
  8. * we can support nonlinear formats such as RGBE. Unlike a traditional mipmap
  9. * chain, it only goes down to the LOD_MIN level (above), and then creates extra
  10. * even more filtered 'mips' at the same LOD_MIN resolution, associated with
  11. * higher roughness levels. In this way we maintain resolution to smoothly
  12. * interpolate diffuse lighting while limiting sampling computation.
  13. */
  14. THREE.PMREMGenerator = ( function () {
  15. const LOD_MIN = 4;
  16. const LOD_MAX = 8;
  17. const SIZE_MAX = Math.pow( 2, LOD_MAX );
  18. // The standard deviations (radians) associated with the extra mips. These are
  19. // chosen to approximate a Trowbridge-Reitz distribution function times the
  20. // geometric shadowing function.
  21. const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ];
  22. const TOTAL_LODS = LOD_MAX - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length;
  23. // The maximum length of the blur for loop, chosen to equal the number needed
  24. // for GENERATED_SIGMA. Smaller _sigmas will use fewer samples and exit early,
  25. // but not recompile the shader.
  26. const MAX_SAMPLES = 20;
  27. const ENCODINGS = {
  28. [ THREE.LinearEncoding ]: 0,
  29. [ THREE.sRGBEncoding ]: 1,
  30. [ THREE.RGBEEncoding ]: 2,
  31. [ THREE.RGBM7Encoding ]: 3,
  32. [ THREE.RGBM16Encoding ]: 4,
  33. [ THREE.RGBDEncoding ]: 5,
  34. [ THREE.GammaEncoding ]: 6
  35. };
  36. var _flatCamera = new THREE.OrthographicCamera();
  37. var _blurMaterial = _getShader( MAX_SAMPLES );
  38. var { _lodPlanes, _sizeLods, _sigmas } = _createPlanes();
  39. var _pingPongRenderTarget = null;
  40. // Golden Ratio
  41. const PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
  42. const INV_PHI = 1 / PHI;
  43. // Vertices of a dodecahedron (except the opposites, which represent the
  44. // same axis), used as axis directions evenly spread on a sphere.
  45. var _axisDirections = [
  46. new THREE.Vector3( 1, 1, 1 ),
  47. new THREE.Vector3( - 1, 1, 1 ),
  48. new THREE.Vector3( 1, 1, - 1 ),
  49. new THREE.Vector3( - 1, 1, - 1 ),
  50. new THREE.Vector3( 0, PHI, INV_PHI ),
  51. new THREE.Vector3( 0, PHI, - INV_PHI ),
  52. new THREE.Vector3( INV_PHI, 0, PHI ),
  53. new THREE.Vector3( - INV_PHI, 0, PHI ),
  54. new THREE.Vector3( PHI, INV_PHI, 0 ),
  55. new THREE.Vector3( - PHI, INV_PHI, 0 ) ];
  56. var PMREMGenerator = function ( renderer ) {
  57. this.renderer = renderer;
  58. };
  59. PMREMGenerator.prototype = {
  60. constructor: PMREMGenerator,
  61. /**
  62. * Generates a PMREM from a supplied Scene, which can be faster than using an
  63. * image if networking bandwidth is low. Optional sigma specifies a blur radius
  64. * in radians to be applied to the scene before PMREM generation. Optional near
  65. * and far planes ensure the scene is rendered in its entirety (the cubeCamera
  66. * is placed at the origin).
  67. */
  68. fromScene: function ( scene, sigma = 0, near = 0.1, far = 100 ) {
  69. const cubeUVRenderTarget = _allocateTargets();
  70. _sceneToCubeUV( scene, near, far, cubeUVRenderTarget );
  71. if ( sigma > 0 ) {
  72. _blur( cubeUVRenderTarget, 0, 0, sigma );
  73. }
  74. _applyPMREM( cubeUVRenderTarget );
  75. _pingPongRenderTarget.dispose();
  76. return cubeUVRenderTarget;
  77. },
  78. /**
  79. * Generates a PMREM from an equirectangular texture, which can be either LDR
  80. * (RGBFormat) or HDR (RGBEFormat).
  81. */
  82. fromEquirectangular: function ( equirectangular ) {
  83. equirectangular.magFilter = THREE.NearestFilter;
  84. equirectangular.minFilter = THREE.NearestFilter;
  85. equirectangular.generateMipmaps = false;
  86. const cubeUVRenderTarget = _allocateTargets( equirectangular );
  87. _equirectangularToCubeUV( equirectangular, cubeUVRenderTarget );
  88. _applyPMREM( cubeUVRenderTarget );
  89. _pingPongRenderTarget.dispose();
  90. return cubeUVRenderTarget;
  91. },
  92. };
  93. function _createPlanes() {
  94. var _lodPlanes = [];
  95. var _sizeLods = [];
  96. var _sigmas = [];
  97. let lod = LOD_MAX;
  98. for ( let i = 0; i < TOTAL_LODS; i ++ ) {
  99. const sizeLod = Math.pow( 2, lod );
  100. _sizeLods.push( sizeLod );
  101. let sigma = 1.0 / sizeLod;
  102. if ( i > LOD_MAX - LOD_MIN ) {
  103. sigma = EXTRA_LOD_SIGMA[ i - LOD_MAX + LOD_MIN - 1 ];
  104. } else if ( i == 0 ) {
  105. sigma = 0;
  106. }
  107. _sigmas.push( sigma );
  108. const texelSize = 1.0 / ( sizeLod - 1 );
  109. const min = - texelSize / 2;
  110. const max = 1 + texelSize / 2;
  111. const uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ];
  112. const cubeFaces = 6;
  113. const vertices = 6;
  114. const positionSize = 3;
  115. const uvSize = 2;
  116. const faceIndexSize = 1;
  117. const position = new Float32Array( positionSize * vertices * cubeFaces );
  118. const uv = new Float32Array( uvSize * vertices * cubeFaces );
  119. const faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces );
  120. for ( let face = 0; face < cubeFaces; face ++ ) {
  121. const x = ( face % 3 ) * 2 / 3 - 1;
  122. const y = face > 2 ? 0 : - 1;
  123. const coordinates = [
  124. [ x, y, 0 ],
  125. [ x + 2 / 3, y, 0 ],
  126. [ x + 2 / 3, y + 1, 0 ],
  127. [ x, y, 0 ],
  128. [ x + 2 / 3, y + 1, 0 ],
  129. [ x, y + 1, 0 ]
  130. ];
  131. position.set( Array.concat( ...coordinates ),
  132. positionSize * vertices * face );
  133. uv.set( uv1, uvSize * vertices * face );
  134. const fill = [ face, face, face, face, face, face ];
  135. faceIndex.set( fill, faceIndexSize * vertices * face );
  136. }
  137. const planes = new THREE.BufferGeometry();
  138. planes.addAttribute(
  139. 'position', new THREE.BufferAttribute( position, positionSize ) );
  140. planes.addAttribute( 'uv', new THREE.BufferAttribute( uv, uvSize ) );
  141. planes.addAttribute(
  142. 'faceIndex', new THREE.BufferAttribute( faceIndex, faceIndexSize ) );
  143. _lodPlanes.push( planes );
  144. if ( lod > LOD_MIN ) {
  145. lod --;
  146. }
  147. }
  148. return { _lodPlanes, _sizeLods, _sigmas };
  149. }
  150. function _allocateTargets( equirectangular ) {
  151. const params = {
  152. magFilter: THREE.NearestFilter,
  153. minFilter: THREE.NearestFilter,
  154. generateMipmaps: false,
  155. type: equirectangular ? equirectangular.type : THREE.UnsignedByteType,
  156. format: equirectangular ? equirectangular.format : THREE.RGBEFormat,
  157. encoding: equirectangular ? equirectangular.encoding : THREE.RGBEEncoding,
  158. depthBuffer: false,
  159. stencilBuffer: false
  160. };
  161. const cubeUVRenderTarget = _createRenderTarget(
  162. { ...params, depthBuffer: ( equirectangular ? false : true ) } );
  163. _pingPongRenderTarget = _createRenderTarget( params );
  164. return cubeUVRenderTarget;
  165. }
  166. function _sceneToCubeUV(
  167. scene, near, far,
  168. cubeUVRenderTarget ) {
  169. const fov = 90;
  170. const aspect = 1;
  171. const cubeCamera = new THREE.PerspectiveCamera( fov, aspect, near, far );
  172. const upSign = [ 1, 1, 1, 1, - 1, 1 ];
  173. const forwardSign = [ 1, 1, - 1, - 1, - 1, 1 ];
  174. const gammaOutput = this.renderer.gammaOutput;
  175. const toneMapping = this.renderer.toneMapping;
  176. const toneMappingExposure = this.renderer.toneMappingExposure;
  177. this.renderer.toneMapping = THREE.LinearToneMapping;
  178. this.renderer.toneMappingExposure = 1.0;
  179. this.renderer.gammaOutput = false;
  180. scene.scale.z *= - 1;
  181. this.renderer.setRenderTarget( cubeUVRenderTarget );
  182. for ( let i = 0; i < 6; i ++ ) {
  183. const col = i % 3;
  184. if ( col == 0 ) {
  185. cubeCamera.up.set( 0, upSign[ i ], 0 );
  186. cubeCamera.lookAt( forwardSign[ i ], 0, 0 );
  187. } else if ( col == 1 ) {
  188. cubeCamera.up.set( 0, 0, upSign[ i ] );
  189. cubeCamera.lookAt( 0, forwardSign[ i ], 0 );
  190. } else {
  191. cubeCamera.up.set( 0, upSign[ i ], 0 );
  192. cubeCamera.lookAt( 0, 0, forwardSign[ i ] );
  193. }
  194. _setViewport(
  195. col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
  196. this.renderer.render( scene, cubeCamera );
  197. }
  198. this.renderer.toneMapping = toneMapping;
  199. this.renderer.toneMappingExposure = toneMappingExposure;
  200. this.renderer.gammaOutput = gammaOutput;
  201. scene.scale.z *= - 1;
  202. }
  203. function _equirectangularToCubeUV(
  204. equirectangular, cubeUVRenderTarget ) {
  205. const scene = new THREE.Scene();
  206. scene.add( new THREE.Mesh( _lodPlanes[ 0 ], _blurMaterial ) );
  207. const uniforms = _blurMaterial.uniforms;
  208. uniforms[ 'envMap' ].value = equirectangular;
  209. uniforms[ 'copyEquirectangular' ].value = true;
  210. uniforms[ 'texelSize' ].value.set(
  211. 1.0 / equirectangular.image.width, 1.0 / equirectangular.image.height );
  212. uniforms[ 'inputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  213. uniforms[ 'outputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  214. this.renderer.setRenderTarget( cubeUVRenderTarget );
  215. _setViewport( 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
  216. this.renderer.render( scene, _flatCamera );
  217. }
  218. function _createRenderTarget( params ) {
  219. const cubeUVRenderTarget =
  220. new THREE.WebGLRenderTarget( 3 * SIZE_MAX, 3 * SIZE_MAX, params );
  221. cubeUVRenderTarget.texture.mapping = THREE.CubeUVReflectionMapping;
  222. cubeUVRenderTarget.texture.name = 'PMREM.cubeUv';
  223. return cubeUVRenderTarget;
  224. }
  225. function _setViewport( x, y, width, height ) {
  226. const dpr = this.threeRenderer.getPixelRatio();
  227. this.threeRenderer.setViewport( x / dpr, y / dpr, width / dpr, height / dpr );
  228. }
  229. function _applyPMREM( cubeUVRenderTarget ) {
  230. for ( let i = 1; i < TOTAL_LODS; i ++ ) {
  231. const sigma = Math.sqrt(
  232. _sigmas[ i ] * _sigmas[ i ] -
  233. _sigmas[ i - 1 ] * _sigmas[ i - 1 ] );
  234. const poleAxis =
  235. _axisDirections[ ( i - 1 ) % _axisDirections.length ];
  236. _blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis );
  237. }
  238. }
  239. /**
  240. * This is a two-pass Gaussian blur for a cubemap. Normally this is done
  241. * vertically and horizontally, but this breaks down on a cube. Here we apply
  242. * the blur latitudinally (around the poles), and then longitudinally (towards
  243. * the poles) to approximate the orthogonally-separable blur. It is least
  244. * accurate at the poles, but still does a decent job.
  245. */
  246. function _blur(
  247. cubeUVRenderTarget, lodIn, lodOut,
  248. sigma, poleAxis ) {
  249. _halfBlur(
  250. cubeUVRenderTarget,
  251. _pingPongRenderTarget,
  252. lodIn,
  253. lodOut,
  254. sigma,
  255. 'latitudinal',
  256. poleAxis );
  257. _halfBlur(
  258. _pingPongRenderTarget,
  259. cubeUVRenderTarget,
  260. lodOut,
  261. lodOut,
  262. sigma,
  263. 'longitudinal',
  264. poleAxis );
  265. }
  266. function _halfBlur(
  267. targetIn, targetOut, lodIn,
  268. lodOut, sigmaRadians, direction,
  269. poleAxis ) {
  270. if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) {
  271. console.error(
  272. 'blur direction must be either latitudinal or longitudinal!' );
  273. }
  274. // Number of standard deviations at which to cut off the discrete approximation.
  275. const STANDARD_DEVIATIONS = 3;
  276. const blurScene = new THREE.Scene();
  277. blurScene.add( new THREE.Mesh( _lodPlanes[ lodOut ], _blurMaterial ) );
  278. const blurUniforms = _blurMaterial.uniforms;
  279. const pixels = _sizeLods[ lodIn ] - 1;
  280. const radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 );
  281. const sigmaPixels = sigmaRadians / radiansPerPixel;
  282. const samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES;
  283. if ( samples > MAX_SAMPLES ) {
  284. console.warn( `sigmaRadians, ${
  285. sigmaRadians}, is too large and will clip, as it requested ${
  286. samples} samples when the maximum is set to ${MAX_SAMPLES}` );
  287. }
  288. let weights = [];
  289. let sum = 0;
  290. for ( let i = 0; i < MAX_SAMPLES; ++ i ) {
  291. const x = i / sigmaPixels;
  292. const weight = Math.exp( - x * x / 2 );
  293. weights.push( weight );
  294. if ( i == 0 ) {
  295. sum += weight;
  296. } else if ( i < samples ) {
  297. sum += 2 * weight;
  298. }
  299. }
  300. weights = weights.map( w => w / sum );
  301. blurUniforms[ 'envMap' ].value = targetIn.texture;
  302. blurUniforms[ 'copyEquirectangular' ].value = false;
  303. blurUniforms[ 'samples' ].value = samples;
  304. blurUniforms[ 'weights' ].value = weights;
  305. blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal';
  306. if ( poleAxis ) {
  307. blurUniforms[ 'poleAxis' ].value = poleAxis;
  308. }
  309. blurUniforms[ 'dTheta' ].value = radiansPerPixel;
  310. blurUniforms[ 'mipInt' ].value = LOD_MAX - lodIn;
  311. blurUniforms[ 'inputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  312. blurUniforms[ 'outputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  313. const outputSize = _sizeLods[ lodOut ];
  314. const x = 3 * Math.max( 0, SIZE_MAX - 2 * outputSize );
  315. const y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) +
  316. 2 * outputSize *
  317. ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
  318. this.renderer.autoClear = false;
  319. this.renderer.setRenderTarget( targetOut );
  320. _setViewport( x, y, 3 * outputSize, 2 * outputSize );
  321. this.renderer.render( blurScene, _flatCamera );
  322. }
  323. function _getShader( maxSamples ) {
  324. const weights = new Float32Array( maxSamples );
  325. const texelSize = new THREE.Vector2( 1, 1 );
  326. const poleAxis = new THREE.Vector3( 0, 1, 0 );
  327. var shaderMaterial = new THREE.RawShaderMaterial( {
  328. defines: { 'n': maxSamples },
  329. uniforms: {
  330. 'envMap': { value: null },
  331. 'copyEquirectangular': { value: false },
  332. 'texelSize': { value: texelSize },
  333. 'samples': { value: 1 },
  334. 'weights': { value: weights },
  335. 'latitudinal': { value: false },
  336. 'dTheta': { value: 0 },
  337. 'mipInt': { value: 0 },
  338. 'poleAxis': { value: poleAxis },
  339. 'inputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] },
  340. 'outputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] }
  341. },
  342. vertexShader: `
  343. precision mediump float;
  344. precision mediump int;
  345. attribute vec3 position;
  346. attribute vec2 uv;
  347. attribute float faceIndex;
  348. varying vec2 vUv;
  349. varying float vFaceIndex;
  350. void main() {
  351. vUv = uv;
  352. vFaceIndex = faceIndex;
  353. gl_Position = vec4( position, 1.0 );
  354. }
  355. `,
  356. fragmentShader: `
  357. precision mediump float;
  358. precision mediump int;
  359. varying vec2 vUv;
  360. varying float vFaceIndex;
  361. uniform sampler2D envMap;
  362. uniform bool copyEquirectangular;
  363. uniform vec2 texelSize;
  364. uniform int samples;
  365. uniform float weights[n];
  366. uniform bool latitudinal;
  367. uniform float dTheta;
  368. uniform float mipInt;
  369. uniform vec3 poleAxis;
  370. uniform int inputEncoding;
  371. uniform int outputEncoding;
  372. #include <encodings_pars_fragment>
  373. vec4 inputTexelToLinear(vec4 value){
  374. if(inputEncoding == 0){
  375. return value;
  376. }else if(inputEncoding == 1){
  377. return sRGBToLinear(value);
  378. }else if(inputEncoding == 2){
  379. return RGBEToLinear(value);
  380. }else if(inputEncoding == 3){
  381. return RGBMToLinear(value, 7.0);
  382. }else if(inputEncoding == 4){
  383. return RGBMToLinear(value, 16.0);
  384. }else if(inputEncoding == 5){
  385. return RGBDToLinear(value, 256.0);
  386. }else{
  387. return GammaToLinear(value, 2.2);
  388. }
  389. }
  390. vec4 linearToOutputTexel(vec4 value){
  391. if(outputEncoding == 0){
  392. return value;
  393. }else if(outputEncoding == 1){
  394. return LinearTosRGB(value);
  395. }else if(outputEncoding == 2){
  396. return LinearToRGBE(value);
  397. }else if(outputEncoding == 3){
  398. return LinearToRGBM(value, 7.0);
  399. }else if(outputEncoding == 4){
  400. return LinearToRGBM(value, 16.0);
  401. }else if(outputEncoding == 5){
  402. return LinearToRGBD(value, 256.0);
  403. }else{
  404. return LinearToGamma(value, 2.2);
  405. }
  406. }
  407. vec4 envMapTexelToLinear(vec4 color) {
  408. return inputTexelToLinear(color);
  409. }
  410. #define ENVMAP_TYPE_CUBE_UV
  411. #include <cube_uv_reflection_fragment>
  412. #define RECIPROCAL_PI 0.31830988618
  413. #define RECIPROCAL_PI2 0.15915494
  414. void main() {
  415. gl_FragColor = vec4(0.0);
  416. outputDirection = getDirection(vUv, vFaceIndex);
  417. if (copyEquirectangular) {
  418. vec3 direction = normalize(outputDirection);
  419. vec2 uv;
  420. uv.y = asin(clamp(direction.y, -1.0, 1.0)) * RECIPROCAL_PI + 0.5;
  421. uv.x = atan(direction.z, direction.x) * RECIPROCAL_PI2 + 0.5;
  422. vec2 f = fract(uv / texelSize - 0.5);
  423. uv -= f * texelSize;
  424. vec3 tl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  425. uv.x += texelSize.x;
  426. vec3 tr = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  427. uv.y += texelSize.y;
  428. vec3 br = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  429. uv.x -= texelSize.x;
  430. vec3 bl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  431. vec3 tm = mix(tl, tr, f.x);
  432. vec3 bm = mix(bl, br, f.x);
  433. gl_FragColor.rgb = mix(tm, bm, f.y);
  434. } else {
  435. for (int i = 0; i < n; i++) {
  436. if (i >= samples)
  437. break;
  438. for (int dir = -1; dir < 2; dir += 2) {
  439. if (i == 0 && dir == 1)
  440. continue;
  441. vec3 axis = latitudinal ? poleAxis : cross(poleAxis, outputDirection);
  442. if (all(equal(axis, vec3(0.0))))
  443. axis = cross(vec3(0.0, 1.0, 0.0), outputDirection);
  444. axis = normalize(axis);
  445. float theta = dTheta * float(dir * i);
  446. float cosTheta = cos(theta);
  447. // Rodrigues' axis-angle rotation
  448. vec3 sampleDirection = outputDirection * cosTheta
  449. + cross(axis, outputDirection) * sin(theta)
  450. + axis * dot(axis, outputDirection) * (1.0 - cosTheta);
  451. gl_FragColor.rgb +=
  452. weights[i] * bilinearCubeUV(envMap, sampleDirection, mipInt);
  453. }
  454. }
  455. }
  456. gl_FragColor = linearToOutputTexel(gl_FragColor);
  457. }
  458. `,
  459. blending: THREE.NoBlending,
  460. depthTest: false,
  461. depthWrite: false
  462. } );
  463. shaderMaterial.type = 'SphericalGaussianBlur';
  464. return shaderMaterial;
  465. }
  466. return PMREMGenerator;
  467. } )();