PMREMGenerator.js 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604
  1. /**
  2. * @author Emmett Lalish / elalish
  3. *
  4. * This class generates a Prefiltered, Mipmapped Radiance Environment Map
  5. * (PMREM) from a cubeMap environment texture. This allows different levels of
  6. * blur to be quickly accessed based on material roughness. It is packed into a
  7. * special CubeUV format that allows us to perform custom interpolation so that
  8. * we can support nonlinear formats such as RGBE. Unlike a traditional mipmap
  9. * chain, it only goes down to the LOD_MIN level (above), and then creates extra
  10. * even more filtered 'mips' at the same LOD_MIN resolution, associated with
  11. * higher roughness levels. In this way we maintain resolution to smoothly
  12. * interpolate diffuse lighting while limiting sampling computation.
  13. */
  14. THREE.PMREMGenerator = ( function () {
  15. var LOD_MIN = 4;
  16. var LOD_MAX = 8;
  17. var SIZE_MAX = Math.pow( 2, LOD_MAX );
  18. // The standard deviations (radians) associated with the extra mips. These are
  19. // chosen to approximate a Trowbridge-Reitz distribution function times the
  20. // geometric shadowing function. These sigma values squared must match the
  21. // variance #defines in cube_uv_reflection_fragment.glsl.js.
  22. var EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ];
  23. var TOTAL_LODS = LOD_MAX - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length;
  24. // The maximum length of the blur for loop, chosen to equal the number needed
  25. // for GENERATED_SIGMA. Smaller _sigmas will use fewer samples and exit early,
  26. // but not recompile the shader.
  27. var MAX_SAMPLES = 20;
  28. var ENCODINGS = {
  29. [ THREE.LinearEncoding ]: 0,
  30. [ THREE.sRGBEncoding ]: 1,
  31. [ THREE.RGBEEncoding ]: 2,
  32. [ THREE.RGBM7Encoding ]: 3,
  33. [ THREE.RGBM16Encoding ]: 4,
  34. [ THREE.RGBDEncoding ]: 5,
  35. [ THREE.GammaEncoding ]: 6
  36. };
  37. var _flatCamera = new THREE.OrthographicCamera();
  38. var _blurMaterial = _getShader( MAX_SAMPLES );
  39. var { _lodPlanes, _sizeLods, _sigmas } = _createPlanes();
  40. var _pingPongRenderTarget = null;
  41. var _renderer = null;
  42. // Golden Ratio
  43. var PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
  44. var INV_PHI = 1 / PHI;
  45. // Vertices of a dodecahedron (except the opposites, which represent the
  46. // same axis), used as axis directions evenly spread on a sphere.
  47. var _axisDirections = [
  48. new THREE.Vector3( 1, 1, 1 ),
  49. new THREE.Vector3( - 1, 1, 1 ),
  50. new THREE.Vector3( 1, 1, - 1 ),
  51. new THREE.Vector3( - 1, 1, - 1 ),
  52. new THREE.Vector3( 0, PHI, INV_PHI ),
  53. new THREE.Vector3( 0, PHI, - INV_PHI ),
  54. new THREE.Vector3( INV_PHI, 0, PHI ),
  55. new THREE.Vector3( - INV_PHI, 0, PHI ),
  56. new THREE.Vector3( PHI, INV_PHI, 0 ),
  57. new THREE.Vector3( - PHI, INV_PHI, 0 ) ];
  58. var PMREMGenerator = function ( renderer ) {
  59. _renderer = renderer;
  60. };
  61. PMREMGenerator.prototype = {
  62. constructor: PMREMGenerator,
  63. /**
  64. * Generates a PMREM from a supplied Scene, which can be faster than using an
  65. * image if networking bandwidth is low. Optional sigma specifies a blur radius
  66. * in radians to be applied to the scene before PMREM generation. Optional near
  67. * and far planes ensure the scene is rendered in its entirety (the cubeCamera
  68. * is placed at the origin).
  69. */
  70. fromScene: function ( scene, sigma = 0, near = 0.1, far = 100 ) {
  71. var cubeUVRenderTarget = _allocateTargets();
  72. _sceneToCubeUV( scene, near, far, cubeUVRenderTarget );
  73. if ( sigma > 0 ) {
  74. _blur( cubeUVRenderTarget, 0, 0, sigma );
  75. }
  76. _applyPMREM( cubeUVRenderTarget );
  77. _cleanup();
  78. return cubeUVRenderTarget;
  79. },
  80. /**
  81. * Generates a PMREM from an equirectangular texture, which can be either LDR
  82. * (RGBFormat) or HDR (RGBEFormat).
  83. */
  84. fromEquirectangular: function ( equirectangular ) {
  85. equirectangular.magFilter = THREE.NearestFilter;
  86. equirectangular.minFilter = THREE.NearestFilter;
  87. equirectangular.generateMipmaps = false;
  88. var cubeUVRenderTarget = _allocateTargets( equirectangular );
  89. _equirectangularToCubeUV( equirectangular, cubeUVRenderTarget );
  90. _applyPMREM( cubeUVRenderTarget );
  91. _cleanup();
  92. return cubeUVRenderTarget;
  93. },
  94. };
  95. function _createPlanes() {
  96. var _lodPlanes = [];
  97. var _sizeLods = [];
  98. var _sigmas = [];
  99. var lod = LOD_MAX;
  100. for ( var i = 0; i < TOTAL_LODS; i ++ ) {
  101. var sizeLod = Math.pow( 2, lod );
  102. _sizeLods.push( sizeLod );
  103. var sigma = 1.0 / sizeLod;
  104. if ( i > LOD_MAX - LOD_MIN ) {
  105. sigma = EXTRA_LOD_SIGMA[ i - LOD_MAX + LOD_MIN - 1 ];
  106. } else if ( i == 0 ) {
  107. sigma = 0;
  108. }
  109. _sigmas.push( sigma );
  110. var texelSize = 1.0 / ( sizeLod - 1 );
  111. var min = - texelSize / 2;
  112. var max = 1 + texelSize / 2;
  113. var uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ];
  114. var cubeFaces = 6;
  115. var vertices = 6;
  116. var positionSize = 3;
  117. var uvSize = 2;
  118. var faceIndexSize = 1;
  119. var position = new Float32Array( positionSize * vertices * cubeFaces );
  120. var uv = new Float32Array( uvSize * vertices * cubeFaces );
  121. var faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces );
  122. for ( var face = 0; face < cubeFaces; face ++ ) {
  123. var x = ( face % 3 ) * 2 / 3 - 1;
  124. var y = face > 2 ? 0 : - 1;
  125. var coordinates = [
  126. [ x, y, 0 ],
  127. [ x + 2 / 3, y, 0 ],
  128. [ x + 2 / 3, y + 1, 0 ],
  129. [ x, y, 0 ],
  130. [ x + 2 / 3, y + 1, 0 ],
  131. [ x, y + 1, 0 ]
  132. ];
  133. position.set( [].concat( ...coordinates ),
  134. positionSize * vertices * face );
  135. uv.set( uv1, uvSize * vertices * face );
  136. var fill = [ face, face, face, face, face, face ];
  137. faceIndex.set( fill, faceIndexSize * vertices * face );
  138. }
  139. var planes = new THREE.BufferGeometry();
  140. planes.addAttribute(
  141. 'position', new THREE.BufferAttribute( position, positionSize ) );
  142. planes.addAttribute( 'uv', new THREE.BufferAttribute( uv, uvSize ) );
  143. planes.addAttribute(
  144. 'faceIndex', new THREE.BufferAttribute( faceIndex, faceIndexSize ) );
  145. _lodPlanes.push( planes );
  146. if ( lod > LOD_MIN ) {
  147. lod --;
  148. }
  149. }
  150. return { _lodPlanes, _sizeLods, _sigmas };
  151. }
  152. function _allocateTargets( equirectangular ) {
  153. var params = {
  154. magFilter: THREE.NearestFilter,
  155. minFilter: THREE.NearestFilter,
  156. generateMipmaps: false,
  157. type: equirectangular ? equirectangular.type : THREE.UnsignedByteType,
  158. format: equirectangular ? equirectangular.format : THREE.RGBEFormat,
  159. encoding: equirectangular ? equirectangular.encoding : THREE.RGBEEncoding,
  160. depthBuffer: false,
  161. stencilBuffer: false
  162. };
  163. var cubeUVRenderTarget = _createRenderTarget(
  164. { ...params, depthBuffer: ( equirectangular ? false : true ) } );
  165. _pingPongRenderTarget = _createRenderTarget( params );
  166. return cubeUVRenderTarget;
  167. }
  168. function _cleanup() {
  169. _pingPongRenderTarget.dispose();
  170. _renderer.setRenderTarget( null );
  171. var size = _renderer.getSize();
  172. _renderer.setViewport( 0, 0, size.x, size.y );
  173. }
  174. function _sceneToCubeUV(
  175. scene, near, far,
  176. cubeUVRenderTarget ) {
  177. var fov = 90;
  178. var aspect = 1;
  179. var cubeCamera = new THREE.PerspectiveCamera( fov, aspect, near, far );
  180. var upSign = [ 1, 1, 1, 1, - 1, 1 ];
  181. var forwardSign = [ 1, 1, - 1, - 1, - 1, 1 ];
  182. var gammaOutput = _renderer.gammaOutput;
  183. var toneMapping = _renderer.toneMapping;
  184. var toneMappingExposure = _renderer.toneMappingExposure;
  185. _renderer.toneMapping = THREE.LinearToneMapping;
  186. _renderer.toneMappingExposure = 1.0;
  187. _renderer.gammaOutput = false;
  188. scene.scale.z *= - 1;
  189. _renderer.setRenderTarget( cubeUVRenderTarget );
  190. for ( var i = 0; i < 6; i ++ ) {
  191. var col = i % 3;
  192. if ( col == 0 ) {
  193. cubeCamera.up.set( 0, upSign[ i ], 0 );
  194. cubeCamera.lookAt( forwardSign[ i ], 0, 0 );
  195. } else if ( col == 1 ) {
  196. cubeCamera.up.set( 0, 0, upSign[ i ] );
  197. cubeCamera.lookAt( 0, forwardSign[ i ], 0 );
  198. } else {
  199. cubeCamera.up.set( 0, upSign[ i ], 0 );
  200. cubeCamera.lookAt( 0, 0, forwardSign[ i ] );
  201. }
  202. _setViewport(
  203. col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
  204. _renderer.render( scene, cubeCamera );
  205. }
  206. _renderer.toneMapping = toneMapping;
  207. _renderer.toneMappingExposure = toneMappingExposure;
  208. _renderer.gammaOutput = gammaOutput;
  209. scene.scale.z *= - 1;
  210. }
  211. function _equirectangularToCubeUV(
  212. equirectangular, cubeUVRenderTarget ) {
  213. var scene = new THREE.Scene();
  214. scene.add( new THREE.Mesh( _lodPlanes[ 0 ], _blurMaterial ) );
  215. var uniforms = _blurMaterial.uniforms;
  216. uniforms[ 'envMap' ].value = equirectangular;
  217. uniforms[ 'copyEquirectangular' ].value = true;
  218. uniforms[ 'texelSize' ].value.set(
  219. 1.0 / equirectangular.image.width, 1.0 / equirectangular.image.height );
  220. uniforms[ 'inputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  221. uniforms[ 'outputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
  222. _renderer.setRenderTarget( cubeUVRenderTarget );
  223. _setViewport( 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
  224. _renderer.render( scene, _flatCamera );
  225. }
  226. function _createRenderTarget( params ) {
  227. var cubeUVRenderTarget =
  228. new THREE.WebGLRenderTarget( 3 * SIZE_MAX, 3 * SIZE_MAX, params );
  229. cubeUVRenderTarget.texture.mapping = THREE.CubeUVReflectionMapping;
  230. cubeUVRenderTarget.texture.name = 'PMREM.cubeUv';
  231. return cubeUVRenderTarget;
  232. }
  233. function _setViewport( x, y, width, height ) {
  234. var dpr = _renderer.getPixelRatio();
  235. _renderer.setViewport( x / dpr, y / dpr, width / dpr, height / dpr );
  236. }
  237. function _applyPMREM( cubeUVRenderTarget ) {
  238. var autoClear = _renderer.autoClear;
  239. _renderer.autoClear = false;
  240. for ( var i = 1; i < TOTAL_LODS; i ++ ) {
  241. var sigma = Math.sqrt(
  242. _sigmas[ i ] * _sigmas[ i ] -
  243. _sigmas[ i - 1 ] * _sigmas[ i - 1 ] );
  244. var poleAxis =
  245. _axisDirections[ ( i - 1 ) % _axisDirections.length ];
  246. _blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis );
  247. }
  248. _renderer.autoClear = autoClear;
  249. }
  250. /**
  251. * This is a two-pass Gaussian blur for a cubemap. Normally this is done
  252. * vertically and horizontally, but this breaks down on a cube. Here we apply
  253. * the blur latitudinally (around the poles), and then longitudinally (towards
  254. * the poles) to approximate the orthogonally-separable blur. It is least
  255. * accurate at the poles, but still does a decent job.
  256. */
  257. function _blur(
  258. cubeUVRenderTarget, lodIn, lodOut,
  259. sigma, poleAxis ) {
  260. _halfBlur(
  261. cubeUVRenderTarget,
  262. _pingPongRenderTarget,
  263. lodIn,
  264. lodOut,
  265. sigma,
  266. 'latitudinal',
  267. poleAxis );
  268. _halfBlur(
  269. _pingPongRenderTarget,
  270. cubeUVRenderTarget,
  271. lodOut,
  272. lodOut,
  273. sigma,
  274. 'longitudinal',
  275. poleAxis );
  276. }
  277. function _halfBlur(
  278. targetIn, targetOut, lodIn,
  279. lodOut, sigmaRadians, direction,
  280. poleAxis ) {
  281. if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) {
  282. console.error(
  283. 'blur direction must be either latitudinal or longitudinal!' );
  284. }
  285. // Number of standard deviations at which to cut off the discrete approximation.
  286. var STANDARD_DEVIATIONS = 3;
  287. var blurScene = new THREE.Scene();
  288. blurScene.add( new THREE.Mesh( _lodPlanes[ lodOut ], _blurMaterial ) );
  289. var blurUniforms = _blurMaterial.uniforms;
  290. var pixels = _sizeLods[ lodIn ] - 1;
  291. var radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 );
  292. var sigmaPixels = sigmaRadians / radiansPerPixel;
  293. var samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES;
  294. if ( samples > MAX_SAMPLES ) {
  295. console.warn( `sigmaRadians, ${
  296. sigmaRadians}, is too large and will clip, as it requested ${
  297. samples} samples when the maximum is set to ${MAX_SAMPLES}` );
  298. }
  299. var weights = [];
  300. var sum = 0;
  301. for ( var i = 0; i < MAX_SAMPLES; ++ i ) {
  302. var x = i / sigmaPixels;
  303. var weight = Math.exp( - x * x / 2 );
  304. weights.push( weight );
  305. if ( i == 0 ) {
  306. sum += weight;
  307. } else if ( i < samples ) {
  308. sum += 2 * weight;
  309. }
  310. }
  311. weights = weights.map( w => w / sum );
  312. blurUniforms[ 'envMap' ].value = targetIn.texture;
  313. blurUniforms[ 'copyEquirectangular' ].value = false;
  314. blurUniforms[ 'samples' ].value = samples;
  315. blurUniforms[ 'weights' ].value = weights;
  316. blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal';
  317. if ( poleAxis ) {
  318. blurUniforms[ 'poleAxis' ].value = poleAxis;
  319. }
  320. blurUniforms[ 'dTheta' ].value = radiansPerPixel;
  321. blurUniforms[ 'mipInt' ].value = LOD_MAX - lodIn;
  322. blurUniforms[ 'inputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  323. blurUniforms[ 'outputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
  324. var outputSize = _sizeLods[ lodOut ];
  325. var x = 3 * Math.max( 0, SIZE_MAX - 2 * outputSize );
  326. var y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) +
  327. 2 * outputSize *
  328. ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
  329. _renderer.setRenderTarget( targetOut );
  330. _setViewport( x, y, 3 * outputSize, 2 * outputSize );
  331. _renderer.render( blurScene, _flatCamera );
  332. }
  333. function _getShader( maxSamples ) {
  334. var weights = new Float32Array( maxSamples );
  335. var texelSize = new THREE.Vector2( 1, 1 );
  336. var poleAxis = new THREE.Vector3( 0, 1, 0 );
  337. var shaderMaterial = new THREE.RawShaderMaterial( {
  338. defines: { 'n': maxSamples },
  339. uniforms: {
  340. 'envMap': { value: null },
  341. 'copyEquirectangular': { value: false },
  342. 'texelSize': { value: texelSize },
  343. 'samples': { value: 1 },
  344. 'weights': { value: weights },
  345. 'latitudinal': { value: false },
  346. 'dTheta': { value: 0 },
  347. 'mipInt': { value: 0 },
  348. 'poleAxis': { value: poleAxis },
  349. 'inputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] },
  350. 'outputEncoding': { value: ENCODINGS[ THREE.LinearEncoding ] }
  351. },
  352. vertexShader: `
  353. precision mediump float;
  354. precision mediump int;
  355. attribute vec3 position;
  356. attribute vec2 uv;
  357. attribute float faceIndex;
  358. varying vec2 vUv;
  359. varying float vFaceIndex;
  360. void main() {
  361. vUv = uv;
  362. vFaceIndex = faceIndex;
  363. gl_Position = vec4( position, 1.0 );
  364. }
  365. `,
  366. fragmentShader: `
  367. precision mediump float;
  368. precision mediump int;
  369. varying vec2 vUv;
  370. varying float vFaceIndex;
  371. uniform sampler2D envMap;
  372. uniform bool copyEquirectangular;
  373. uniform vec2 texelSize;
  374. uniform int samples;
  375. uniform float weights[n];
  376. uniform bool latitudinal;
  377. uniform float dTheta;
  378. uniform float mipInt;
  379. uniform vec3 poleAxis;
  380. uniform int inputEncoding;
  381. uniform int outputEncoding;
  382. #include <encodings_pars_fragment>
  383. vec4 inputTexelToLinear(vec4 value){
  384. if(inputEncoding == 0){
  385. return value;
  386. }else if(inputEncoding == 1){
  387. return sRGBToLinear(value);
  388. }else if(inputEncoding == 2){
  389. return RGBEToLinear(value);
  390. }else if(inputEncoding == 3){
  391. return RGBMToLinear(value, 7.0);
  392. }else if(inputEncoding == 4){
  393. return RGBMToLinear(value, 16.0);
  394. }else if(inputEncoding == 5){
  395. return RGBDToLinear(value, 256.0);
  396. }else{
  397. return GammaToLinear(value, 2.2);
  398. }
  399. }
  400. vec4 linearToOutputTexel(vec4 value){
  401. if(outputEncoding == 0){
  402. return value;
  403. }else if(outputEncoding == 1){
  404. return LinearTosRGB(value);
  405. }else if(outputEncoding == 2){
  406. return LinearToRGBE(value);
  407. }else if(outputEncoding == 3){
  408. return LinearToRGBM(value, 7.0);
  409. }else if(outputEncoding == 4){
  410. return LinearToRGBM(value, 16.0);
  411. }else if(outputEncoding == 5){
  412. return LinearToRGBD(value, 256.0);
  413. }else{
  414. return LinearToGamma(value, 2.2);
  415. }
  416. }
  417. vec4 envMapTexelToLinear(vec4 color) {
  418. return inputTexelToLinear(color);
  419. }
  420. #define ENVMAP_TYPE_CUBE_UV
  421. #include <cube_uv_reflection_fragment>
  422. #define RECIPROCAL_PI 0.31830988618
  423. #define RECIPROCAL_PI2 0.15915494
  424. void main() {
  425. gl_FragColor = vec4(0.0);
  426. vec3 outputDirection = getDirection(vUv, vFaceIndex);
  427. if (copyEquirectangular) {
  428. vec3 direction = normalize(outputDirection);
  429. vec2 uv;
  430. uv.y = asin(clamp(direction.y, -1.0, 1.0)) * RECIPROCAL_PI + 0.5;
  431. uv.x = atan(direction.z, direction.x) * RECIPROCAL_PI2 + 0.5;
  432. vec2 f = fract(uv / texelSize - 0.5);
  433. uv -= f * texelSize;
  434. vec3 tl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  435. uv.x += texelSize.x;
  436. vec3 tr = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  437. uv.y += texelSize.y;
  438. vec3 br = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  439. uv.x -= texelSize.x;
  440. vec3 bl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
  441. vec3 tm = mix(tl, tr, f.x);
  442. vec3 bm = mix(bl, br, f.x);
  443. gl_FragColor.rgb = mix(tm, bm, f.y);
  444. } else {
  445. for (int i = 0; i < n; i++) {
  446. if (i >= samples)
  447. break;
  448. for (int dir = -1; dir < 2; dir += 2) {
  449. if (i == 0 && dir == 1)
  450. continue;
  451. vec3 axis = latitudinal ? poleAxis : cross(poleAxis, outputDirection);
  452. if (all(equal(axis, vec3(0.0))))
  453. axis = cross(vec3(0.0, 1.0, 0.0), outputDirection);
  454. axis = normalize(axis);
  455. float theta = dTheta * float(dir * i);
  456. float cosTheta = cos(theta);
  457. // Rodrigues' axis-angle rotation
  458. vec3 sampleDirection = outputDirection * cosTheta
  459. + cross(axis, outputDirection) * sin(theta)
  460. + axis * dot(axis, outputDirection) * (1.0 - cosTheta);
  461. gl_FragColor.rgb +=
  462. weights[i] * bilinearCubeUV(envMap, sampleDirection, mipInt);
  463. }
  464. }
  465. }
  466. gl_FragColor = linearToOutputTexel(gl_FragColor);
  467. }
  468. `,
  469. blending: THREE.NoBlending,
  470. depthTest: false,
  471. depthWrite: false
  472. } );
  473. shaderMaterial.type = 'SphericalGaussianBlur';
  474. return shaderMaterial;
  475. }
  476. return PMREMGenerator;
  477. } )();