Jelajahi Sumber

PMREM works

Emmett Lalish 5 tahun lalu
induk
melakukan
42bc0a9b9e

File diff ditekan karena terlalu besar
+ 0 - 0
build/three.js


File diff ditekan karena terlalu besar
+ 0 - 1
build/three.min.js


File diff ditekan karena terlalu besar
+ 0 - 0
build/three.module.js


+ 38 - 24
examples/js/pmrem/PMREMGenerator.js

@@ -41,6 +41,7 @@ THREE.PMREMGenerator = ( function () {
 
 	var { _lodPlanes, _sizeLods, _sigmas } = _createPlanes();
 	var _pingPongRenderTarget = null;
+	var _renderer = null;
 
 	// Golden Ratio
 	const PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
@@ -61,7 +62,7 @@ THREE.PMREMGenerator = ( function () {
 
 	var PMREMGenerator = function ( renderer ) {
 
-		this.renderer = renderer;
+		_renderer = renderer;
 
 	};
 
@@ -86,8 +87,8 @@ THREE.PMREMGenerator = ( function () {
 
 			}
 			_applyPMREM( cubeUVRenderTarget );
+			_cleanUp();
 
-			_pingPongRenderTarget.dispose();
 			return cubeUVRenderTarget;
 
 		},
@@ -105,8 +106,8 @@ THREE.PMREMGenerator = ( function () {
 			const cubeUVRenderTarget = _allocateTargets( equirectangular );
 			_equirectangularToCubeUV( equirectangular, cubeUVRenderTarget );
 			_applyPMREM( cubeUVRenderTarget );
+			_cleanUp();
 
-			_pingPongRenderTarget.dispose();
 			return cubeUVRenderTarget;
 
 		},
@@ -163,7 +164,7 @@ THREE.PMREMGenerator = ( function () {
 					[ x + 2 / 3, y + 1, 0 ],
 					[ x, y + 1, 0 ]
 				];
-				position.set( Array.concat( ...coordinates ),
+				position.set( [].concat( ...coordinates ),
 					positionSize * vertices * face );
 				uv.set( uv1, uvSize * vertices * face );
 				const fill = [ face, face, face, face, face, face ];
@@ -208,6 +209,15 @@ THREE.PMREMGenerator = ( function () {
 
 	}
 
+	function _cleanUp() {
+
+		_pingPongRenderTarget.dispose();
+		_renderer.setRenderTarget( null );
+		var size = _renderer.getSize();
+		_renderer.setViewport( 0, 0, size.x, size.y );
+
+	}
+
 	function _sceneToCubeUV(
 		scene, near, far,
 		cubeUVRenderTarget ) {
@@ -218,16 +228,16 @@ THREE.PMREMGenerator = ( function () {
 	  const upSign = [ 1, 1, 1, 1, - 1, 1 ];
 	  const forwardSign = [ 1, 1, - 1, - 1, - 1, 1 ];
 
-	  const gammaOutput = this.renderer.gammaOutput;
-	  const toneMapping = this.renderer.toneMapping;
-	  const toneMappingExposure = this.renderer.toneMappingExposure;
+	  const gammaOutput = _renderer.gammaOutput;
+	  const toneMapping = _renderer.toneMapping;
+	  const toneMappingExposure = _renderer.toneMappingExposure;
 
-	  this.renderer.toneMapping = THREE.LinearToneMapping;
-	  this.renderer.toneMappingExposure = 1.0;
-	  this.renderer.gammaOutput = false;
+	  _renderer.toneMapping = THREE.LinearToneMapping;
+	  _renderer.toneMappingExposure = 1.0;
+	  _renderer.gammaOutput = false;
 	  scene.scale.z *= - 1;
 
-	  this.renderer.setRenderTarget( cubeUVRenderTarget );
+	  _renderer.setRenderTarget( cubeUVRenderTarget );
 	  for ( let i = 0; i < 6; i ++ ) {
 
 			const col = i % 3;
@@ -249,13 +259,13 @@ THREE.PMREMGenerator = ( function () {
 			}
 			_setViewport(
 				col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
-			this.renderer.render( scene, cubeCamera );
+			_renderer.render( scene, cubeCamera );
 
 		}
 
-	  this.renderer.toneMapping = toneMapping;
-	  this.renderer.toneMappingExposure = toneMappingExposure;
-	  this.renderer.gammaOutput = gammaOutput;
+	  _renderer.toneMapping = toneMapping;
+	  _renderer.toneMappingExposure = toneMappingExposure;
+	  _renderer.gammaOutput = gammaOutput;
 	  scene.scale.z *= - 1;
 
 	}
@@ -274,9 +284,9 @@ THREE.PMREMGenerator = ( function () {
 	  uniforms[ 'inputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
 	  uniforms[ 'outputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
 
-	  this.renderer.setRenderTarget( cubeUVRenderTarget );
+	  _renderer.setRenderTarget( cubeUVRenderTarget );
 	  _setViewport( 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
-	  this.renderer.render( scene, _flatCamera );
+	  _renderer.render( scene, _flatCamera );
 
 	}
 
@@ -292,14 +302,17 @@ THREE.PMREMGenerator = ( function () {
 
 	function _setViewport( x, y, width, height ) {
 
-		const dpr = this.threeRenderer.getPixelRatio();
-		this.threeRenderer.setViewport( x / dpr, y / dpr, width / dpr, height / dpr );
+		const dpr = _renderer.getPixelRatio();
+		_renderer.setViewport( x / dpr, y / dpr, width / dpr, height / dpr );
 
 	}
 
 	function _applyPMREM( cubeUVRenderTarget ) {
 
-	  for ( let i = 1; i < TOTAL_LODS; i ++ ) {
+		var autoClear = _renderer.autoClear;
+		_renderer.autoClear = false;
+
+	  	for ( let i = 1; i < TOTAL_LODS; i ++ ) {
 
 			const sigma = Math.sqrt(
 				_sigmas[ i ] * _sigmas[ i ] -
@@ -310,6 +323,8 @@ THREE.PMREMGenerator = ( function () {
 
 		}
 
+		_renderer.autoClear = autoClear;
+
 	}
 
 	/**
@@ -415,11 +430,10 @@ THREE.PMREMGenerator = ( function () {
 		const y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) +
 	  2 * outputSize *
 		  ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
-		this.renderer.autoClear = false;
 
-		this.renderer.setRenderTarget( targetOut );
+		_renderer.setRenderTarget( targetOut );
 		_setViewport( x, y, 3 * outputSize, 2 * outputSize );
-		this.renderer.render( blurScene, _flatCamera );
+		_renderer.render( blurScene, _flatCamera );
 
 	}
 
@@ -528,7 +542,7 @@ vec4 envMapTexelToLinear(vec4 color) {
 
 void main() {
   gl_FragColor = vec4(0.0);
-  outputDirection = getDirection(vUv, vFaceIndex);
+  vec3 outputDirection = getDirection(vUv, vFaceIndex);
   if (copyEquirectangular) {
     vec3 direction = normalize(outputDirection);
     vec2 uv;

+ 0 - 11
examples/jsm/pmrem/PMREMCubeUVPacker.d.ts

@@ -1,11 +0,0 @@
-import { CubeTexture, Renderer, WebGLRenderTarget } from '../../../src/Three';
-
-export class PMREMCubeUVPacker {
-
-	CubeUVRenderTarget:WebGLRenderTarget;
-
-	constructor( cubeTextureLods: CubeTexture[] );
-	update( renderer:Renderer ): void;
-	dispose(): void;
-
-}

+ 0 - 253
examples/jsm/pmrem/PMREMCubeUVPacker.js

@@ -1,253 +0,0 @@
-/**
- * @author Prashant Sharma / spidersharma03
- * @author Ben Houston / bhouston, https://clara.io
- *
- * This class takes the cube lods(corresponding to different roughness values), and creates a single cubeUV
- * Texture. The format for a given roughness set of faces is simply::
- * +X+Y+Z
- * -X-Y-Z
- * For every roughness a mip map chain is also saved, which is essential to remove the texture artifacts due to
- * minification.
- * Right now for every face a PlaneMesh is drawn, which leads to a lot of geometry draw calls, but can be replaced
- * later by drawing a single buffer and by sending the appropriate faceIndex via vertex attributes.
- * The arrangement of the faces is fixed, as assuming this arrangement, the sampling function has been written.
- */
-
-import {
-	BackSide,
-	CubeUVReflectionMapping,
-	LinearFilter,
-	LinearToneMapping,
-	Mesh,
-	NoBlending,
-	OrthographicCamera,
-	PlaneBufferGeometry,
-	RGBEEncoding,
-	RGBM16Encoding,
-	Scene,
-	ShaderMaterial,
-	Vector2,
-	Vector3,
-	WebGLRenderTarget
-} from "../../../build/three.module.js";
-
-var PMREMCubeUVPacker = ( function () {
-
-	var camera = new OrthographicCamera();
-	var scene = new Scene();
-	var shader = getShader();
-
-	var PMREMCubeUVPacker = function ( cubeTextureLods ) {
-
-		this.cubeLods = cubeTextureLods;
-		var size = cubeTextureLods[ 0 ].width * 4;
-
-		var sourceTexture = cubeTextureLods[ 0 ].texture;
-		var params = {
-			format: sourceTexture.format,
-			magFilter: sourceTexture.magFilter,
-			minFilter: sourceTexture.minFilter,
-			type: sourceTexture.type,
-			generateMipmaps: sourceTexture.generateMipmaps,
-			anisotropy: sourceTexture.anisotropy,
-			encoding: ( sourceTexture.encoding === RGBEEncoding ) ? RGBM16Encoding : sourceTexture.encoding
-		};
-
-		if ( params.encoding === RGBM16Encoding ) {
-
-			params.magFilter = LinearFilter;
-			params.minFilter = LinearFilter;
-
-		}
-
-		this.CubeUVRenderTarget = new WebGLRenderTarget( size, size, params );
-		this.CubeUVRenderTarget.texture.name = "PMREMCubeUVPacker.cubeUv";
-		this.CubeUVRenderTarget.texture.mapping = CubeUVReflectionMapping;
-
-		this.objects = [];
-
-		var geometry = new PlaneBufferGeometry( 1, 1 );
-
-		var faceOffsets = [];
-		faceOffsets.push( new Vector2( 0, 0 ) );
-		faceOffsets.push( new Vector2( 1, 0 ) );
-		faceOffsets.push( new Vector2( 2, 0 ) );
-		faceOffsets.push( new Vector2( 0, 1 ) );
-		faceOffsets.push( new Vector2( 1, 1 ) );
-		faceOffsets.push( new Vector2( 2, 1 ) );
-
-		var textureResolution = size;
-		size = cubeTextureLods[ 0 ].width;
-
-		var offset2 = 0;
-		var c = 4.0;
-		this.numLods = Math.log( cubeTextureLods[ 0 ].width ) / Math.log( 2 ) - 2; // IE11 doesn't support Math.log2
-		for ( var i = 0; i < this.numLods; i ++ ) {
-
-			var offset1 = ( textureResolution - textureResolution / c ) * 0.5;
-			if ( size > 16 ) c *= 2;
-			var nMips = size > 16 ? 6 : 1;
-			var mipOffsetX = 0;
-			var mipOffsetY = 0;
-			var mipSize = size;
-
-			for ( var j = 0; j < nMips; j ++ ) {
-
-				// Mip Maps
-				for ( var k = 0; k < 6; k ++ ) {
-
-					// 6 Cube Faces
-					var material = shader.clone();
-					material.uniforms[ 'envMap' ].value = this.cubeLods[ i ].texture;
-					material.envMap = this.cubeLods[ i ].texture;
-					material.uniforms[ 'faceIndex' ].value = k;
-					material.uniforms[ 'mapSize' ].value = mipSize;
-
-					var planeMesh = new Mesh( geometry, material );
-					planeMesh.position.x = faceOffsets[ k ].x * mipSize - offset1 + mipOffsetX;
-					planeMesh.position.y = faceOffsets[ k ].y * mipSize - offset1 + offset2 + mipOffsetY;
-					planeMesh.material.side = BackSide;
-					planeMesh.scale.setScalar( mipSize );
-					this.objects.push( planeMesh );
-
-				}
-				mipOffsetY += 1.75 * mipSize;
-				mipOffsetX += 1.25 * mipSize;
-				mipSize /= 2;
-
-			}
-			offset2 += 2 * size;
-			if ( size > 16 ) size /= 2;
-
-		}
-
-	};
-
-	PMREMCubeUVPacker.prototype = {
-
-		constructor: PMREMCubeUVPacker,
-
-		update: function ( renderer ) {
-
-			var size = this.cubeLods[ 0 ].width * 4;
-			// top and bottom are swapped for some reason?
-			camera.left = - size * 0.5;
-			camera.right = size * 0.5;
-			camera.top = - size * 0.5;
-			camera.bottom = size * 0.5;
-			camera.near = 0;
-			camera.far = 1;
-			camera.updateProjectionMatrix();
-
-			for ( var i = 0; i < this.objects.length; i ++ ) {
-
-				scene.add( this.objects[ i ] );
-
-			}
-
-			var gammaInput = renderer.gammaInput;
-			var gammaOutput = renderer.gammaOutput;
-			var toneMapping = renderer.toneMapping;
-			var toneMappingExposure = renderer.toneMappingExposure;
-			var currentRenderTarget = renderer.getRenderTarget();
-
-			renderer.gammaInput = false;
-			renderer.gammaOutput = false;
-			renderer.toneMapping = LinearToneMapping;
-			renderer.toneMappingExposure = 1.0;
-			renderer.setRenderTarget( this.CubeUVRenderTarget );
-			renderer.render( scene, camera );
-
-			renderer.setRenderTarget( currentRenderTarget );
-			renderer.toneMapping = toneMapping;
-			renderer.toneMappingExposure = toneMappingExposure;
-			renderer.gammaInput = gammaInput;
-			renderer.gammaOutput = gammaOutput;
-
-			for ( var i = 0; i < this.objects.length; i ++ ) {
-
-				scene.remove( this.objects[ i ] );
-
-			}
-
-		},
-
-		dispose: function () {
-
-			for ( var i = 0, l = this.objects.length; i < l; i ++ ) {
-
-				this.objects[ i ].material.dispose();
-
-			}
-
-			this.objects[ 0 ].geometry.dispose();
-
-		}
-
-	};
-
-	function getShader() {
-
-		var shaderMaterial = new ShaderMaterial( {
-
-			uniforms: {
-				"faceIndex": { value: 0 },
-				"mapSize": { value: 0 },
-				"envMap": { value: null },
-				"testColor": { value: new Vector3( 1, 1, 1 ) }
-			},
-
-			vertexShader:
-        "precision highp float;\
-        varying vec2 vUv;\
-        void main() {\
-          vUv = uv;\
-          gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\
-        }",
-
-			fragmentShader:
-        "precision highp float;\
-        varying vec2 vUv;\
-        uniform samplerCube envMap;\
-        uniform float mapSize;\
-        uniform vec3 testColor;\
-        uniform int faceIndex;\
-        \
-        void main() {\
-          vec3 sampleDirection;\
-          vec2 uv = vUv;\
-          uv = uv * 2.0 - 1.0;\
-          uv.y *= -1.0;\
-          if(faceIndex == 0) {\
-            sampleDirection = normalize(vec3(1.0, uv.y, -uv.x));\
-          } else if(faceIndex == 1) {\
-            sampleDirection = normalize(vec3(uv.x, 1.0, uv.y));\
-          } else if(faceIndex == 2) {\
-            sampleDirection = normalize(vec3(uv.x, uv.y, 1.0));\
-          } else if(faceIndex == 3) {\
-            sampleDirection = normalize(vec3(-1.0, uv.y, uv.x));\
-          } else if(faceIndex == 4) {\
-            sampleDirection = normalize(vec3(uv.x, -1.0, -uv.y));\
-          } else {\
-            sampleDirection = normalize(vec3(-uv.x, uv.y, -1.0));\
-          }\
-          vec4 color = envMapTexelToLinear( textureCube( envMap, sampleDirection ) );\
-          gl_FragColor = linearToOutputTexel( color );\
-        }",
-
-			blending: NoBlending
-
-		} );
-
-		shaderMaterial.type = 'PMREMCubeUVPacker';
-
-		return shaderMaterial;
-
-	}
-
-
-	return PMREMCubeUVPacker;
-
-} )();
-
-export { PMREMCubeUVPacker };

+ 556 - 237
examples/jsm/pmrem/PMREMGenerator.js

@@ -1,305 +1,624 @@
 /**
- * @author Prashant Sharma / spidersharma03
- * @author Ben Houston / bhouston, https://clara.io
+ * @author Emmett Lalish / elalish
  *
- * To avoid cube map seams, I create an extra pixel around each face. This way when the cube map is
- * sampled by an application later(with a little care by sampling the centre of the texel), the extra 1 border
- *	of pixels makes sure that there is no seams artifacts present. This works perfectly for cubeUV format as
- *	well where the 6 faces can be arranged in any manner whatsoever.
- * Code in the beginning of fragment shader's main function does this job for a given resolution.
- *	Run Scene_PMREM_Test.html in the examples directory to see the sampling from the cube lods generated
- *	by this class.
+ * This class generates a Prefiltered, Mipmapped Radiance Environment Map
+ * (PMREM) from a cubeMap environment texture. This allows different levels of
+ * blur to be quickly accessed based on material roughness. It is packed into a
+ * special CubeUV format that allows us to perform custom interpolation so that
+ * we can support nonlinear formats such as RGBE. Unlike a traditional mipmap
+ * chain, it only goes down to the LOD_MIN level (above), and then creates extra
+ * even more filtered 'mips' at the same LOD_MIN resolution, associated with
+ * higher roughness levels. In this way we maintain resolution to smoothly
+ * interpolate diffuse lighting while limiting sampling computation.
  */
 
 import {
-	DoubleSide,
+	BufferAttribute,
+	BufferGeometry,
+	CubeUVReflectionMapping,
 	GammaEncoding,
 	LinearEncoding,
-	LinearFilter,
 	LinearToneMapping,
 	Mesh,
 	NearestFilter,
 	NoBlending,
 	OrthographicCamera,
-	PlaneBufferGeometry,
+	PerspectiveCamera,
+	RGBDEncoding,
+	RGBEEncoding,
+	RGBEFormat,
+	RGBM16Encoding,
+	RGBM7Encoding,
+	RawShaderMaterial,
 	Scene,
-	ShaderMaterial,
-	WebGLRenderTargetCube,
+	UnsignedByteType,
+	Vector2,
+	Vector3,
+	WebGLRenderTarget,
 	sRGBEncoding
 } from "../../../build/three.module.js";
 
 var PMREMGenerator = ( function () {
 
-	var shader = getShader();
-	var camera = new OrthographicCamera( - 1, 1, 1, - 1, 0.0, 1000 );
-	var scene = new Scene();
-	var planeMesh = new Mesh( new PlaneBufferGeometry( 2, 2, 0 ), shader );
-	planeMesh.material.side = DoubleSide;
-	scene.add( planeMesh );
-	scene.add( camera );
-
-	var PMREMGenerator = function ( sourceTexture, samplesPerLevel, resolution ) {
-
-		this.sourceTexture = sourceTexture;
-		this.resolution = ( resolution !== undefined ) ? resolution : 256; // NODE: 256 is currently hard coded in the glsl code for performance reasons
-		this.samplesPerLevel = ( samplesPerLevel !== undefined ) ? samplesPerLevel : 32;
-
-		var monotonicEncoding = ( this.sourceTexture.encoding === LinearEncoding ) ||
-			( this.sourceTexture.encoding === GammaEncoding ) || ( this.sourceTexture.encoding === sRGBEncoding );
-
-		this.sourceTexture.minFilter = ( monotonicEncoding ) ? LinearFilter : NearestFilter;
-		this.sourceTexture.magFilter = ( monotonicEncoding ) ? LinearFilter : NearestFilter;
-		this.sourceTexture.generateMipmaps = this.sourceTexture.generateMipmaps && monotonicEncoding;
-
-		this.cubeLods = [];
-
-		var size = this.resolution;
-		var params = {
-			format: this.sourceTexture.format,
-			magFilter: this.sourceTexture.magFilter,
-			minFilter: this.sourceTexture.minFilter,
-			type: this.sourceTexture.type,
-			generateMipmaps: this.sourceTexture.generateMipmaps,
-			anisotropy: this.sourceTexture.anisotropy,
-			encoding: this.sourceTexture.encoding
-		};
+	const LOD_MIN = 4;
+	const LOD_MAX = 8;
+	const SIZE_MAX = Math.pow( 2, LOD_MAX );
+	// The standard deviations (radians) associated with the extra mips. These are
+	// chosen to approximate a Trowbridge-Reitz distribution function times the
+	// geometric shadowing function.
+	const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ];
+	const TOTAL_LODS = LOD_MAX - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length;
+	// The maximum length of the blur for loop, chosen to equal the number needed
+	// for GENERATED_SIGMA. Smaller _sigmas will use fewer samples and exit early,
+	// but not recompile the shader.
+	const MAX_SAMPLES = 20;
+	const ENCODINGS = {
+		[ LinearEncoding ]: 0,
+		[ sRGBEncoding ]: 1,
+		[ RGBEEncoding ]: 2,
+		[ RGBM7Encoding ]: 3,
+		[ RGBM16Encoding ]: 4,
+		[ RGBDEncoding ]: 5,
+		[ GammaEncoding ]: 6
+	  };
+
+	var _flatCamera = new OrthographicCamera();
+	var _blurMaterial = _getShader( MAX_SAMPLES );
+
+	var { _lodPlanes, _sizeLods, _sigmas } = _createPlanes();
+	var _pingPongRenderTarget = null;
+	var _renderer = null;
+
+	// Golden Ratio
+	const PHI = ( 1 + Math.sqrt( 5 ) ) / 2;
+	const INV_PHI = 1 / PHI;
+	// Vertices of a dodecahedron (except the opposites, which represent the
+	// same axis), used as axis directions evenly spread on a sphere.
+	var _axisDirections = [
+		new Vector3( 1, 1, 1 ),
+		new Vector3( - 1, 1, 1 ),
+		new Vector3( 1, 1, - 1 ),
+		new Vector3( - 1, 1, - 1 ),
+		new Vector3( 0, PHI, INV_PHI ),
+		new Vector3( 0, PHI, - INV_PHI ),
+		new Vector3( INV_PHI, 0, PHI ),
+		new Vector3( - INV_PHI, 0, PHI ),
+		new Vector3( PHI, INV_PHI, 0 ),
+		new Vector3( - PHI, INV_PHI, 0 ) ];
+
+	var PMREMGenerator = function ( renderer ) {
+
+		_renderer = renderer;
 
-		// how many LODs fit in the given CubeUV Texture.
-		this.numLods = Math.log( size ) / Math.log( 2 ) - 2; // IE11 doesn't support Math.log2
+	};
 
-		for ( var i = 0; i < this.numLods; i ++ ) {
+	PMREMGenerator.prototype = {
 
-			var renderTarget = new WebGLRenderTargetCube( size, size, params );
-			renderTarget.texture.name = "PMREMGenerator.cube" + i;
-			this.cubeLods.push( renderTarget );
-			size = Math.max( 16, size / 2 );
+		constructor: PMREMGenerator,
 
-		}
+		/**
+		 * Generates a PMREM from a supplied Scene, which can be faster than using an
+		 * image if networking bandwidth is low. Optional sigma specifies a blur radius
+		 * in radians to be applied to the scene before PMREM generation. Optional near
+		 * and far planes ensure the scene is rendered in its entirety (the cubeCamera
+		 * is placed at the origin).
+		 */
+		fromScene: function ( scene, sigma = 0, near = 0.1, far = 100 ) {
 
-	};
+			const cubeUVRenderTarget = _allocateTargets();
+			_sceneToCubeUV( scene, near, far, cubeUVRenderTarget );
+			if ( sigma > 0 ) {
 
-	PMREMGenerator.prototype = {
+				_blur( cubeUVRenderTarget, 0, 0, sigma );
 
-		constructor: PMREMGenerator,
+			}
+			_applyPMREM( cubeUVRenderTarget );
+			_cleanUp();
+
+			return cubeUVRenderTarget;
+
+		},
 
-		/*
-		 * Prashant Sharma / spidersharma03: More thought and work is needed here.
-		 * Right now it's a kind of a hack to use the previously convolved map to convolve the current one.
-		 * I tried to use the original map to convolve all the lods, but for many textures(specially the high frequency)
-		 * even a high number of samples(1024) dosen't lead to satisfactory results.
-		 * By using the previous convolved maps, a lower number of samples are generally sufficient(right now 32, which
-		 * gives okay results unless we see the reflection very carefully, or zoom in too much), however the math
-		 * goes wrong as the distribution function tries to sample a larger area than what it should be. So I simply scaled
-		 * the roughness by 0.9(totally empirical) to try to visually match the original result.
-		 * The condition "if(i <5)" is also an attemt to make the result match the original result.
-		 * This method requires the most amount of thinking I guess. Here is a paper which we could try to implement in future::
-		 * https://developer.nvidia.com/gpugems/GPUGems3/gpugems3_ch20.html
+		/**
+		 * Generates a PMREM from an equirectangular texture, which can be either LDR
+		 * (RGBFormat) or HDR (RGBEFormat).
 		 */
-		update: function ( renderer ) {
+		fromEquirectangular: function ( equirectangular ) {
 
-			// Texture should only be flipped for CubeTexture, not for
-			// a Texture created via WebGLRenderTargetCube.
-			var tFlip = ( this.sourceTexture.isCubeTexture ) ? - 1 : 1;
+			equirectangular.magFilter = NearestFilter;
+			equirectangular.minFilter = NearestFilter;
+			equirectangular.generateMipmaps = false;
 
-			shader.defines[ 'SAMPLES_PER_LEVEL' ] = this.samplesPerLevel;
-			shader.uniforms[ 'faceIndex' ].value = 0;
-			shader.uniforms[ 'envMap' ].value = this.sourceTexture;
-			shader.envMap = this.sourceTexture;
-			shader.needsUpdate = true;
+			const cubeUVRenderTarget = _allocateTargets( equirectangular );
+			_equirectangularToCubeUV( equirectangular, cubeUVRenderTarget );
+			_applyPMREM( cubeUVRenderTarget );
+			_cleanUp();
 
-			var gammaInput = renderer.gammaInput;
-			var gammaOutput = renderer.gammaOutput;
-			var toneMapping = renderer.toneMapping;
-			var toneMappingExposure = renderer.toneMappingExposure;
-			var currentRenderTarget = renderer.getRenderTarget();
+			return cubeUVRenderTarget;
 
-			renderer.toneMapping = LinearToneMapping;
-			renderer.toneMappingExposure = 1.0;
-			renderer.gammaInput = false;
-			renderer.gammaOutput = false;
+		},
+
+	};
+
+	function _createPlanes() {
+
+		var _lodPlanes = [];
+		var _sizeLods = [];
+		var _sigmas = [];
+
+		let lod = LOD_MAX;
+		for ( let i = 0; i < TOTAL_LODS; i ++ ) {
 
-			for ( var i = 0; i < this.numLods; i ++ ) {
+			const sizeLod = Math.pow( 2, lod );
+			_sizeLods.push( sizeLod );
+			let sigma = 1.0 / sizeLod;
+			if ( i > LOD_MAX - LOD_MIN ) {
 
-				var r = i / ( this.numLods - 1 );
-				shader.uniforms[ 'roughness' ].value = r * 0.9; // see comment above, pragmatic choice
-				// Only apply the tFlip for the first LOD
-				shader.uniforms[ 'tFlip' ].value = ( i == 0 ) ? tFlip : 1;
-				var size = this.cubeLods[ i ].width;
-				shader.uniforms[ 'mapSize' ].value = size;
-				this.renderToCubeMapTarget( renderer, this.cubeLods[ i ] );
+				sigma = EXTRA_LOD_SIGMA[ i - LOD_MAX + LOD_MIN - 1 ];
 
-				if ( i < 5 ) shader.uniforms[ 'envMap' ].value = this.cubeLods[ i ].texture;
+			} else if ( i == 0 ) {
+
+				sigma = 0;
 
 			}
+			_sigmas.push( sigma );
+
+			const texelSize = 1.0 / ( sizeLod - 1 );
+			const min = - texelSize / 2;
+			const max = 1 + texelSize / 2;
+			const uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ];
+
+			const cubeFaces = 6;
+			const vertices = 6;
+			const positionSize = 3;
+			const uvSize = 2;
+			const faceIndexSize = 1;
+
+			const position = new Float32Array( positionSize * vertices * cubeFaces );
+			const uv = new Float32Array( uvSize * vertices * cubeFaces );
+			const faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces );
+
+			for ( let face = 0; face < cubeFaces; face ++ ) {
+
+				const x = ( face % 3 ) * 2 / 3 - 1;
+				const y = face > 2 ? 0 : - 1;
+				const coordinates = [
+					[ x, y, 0 ],
+					[ x + 2 / 3, y, 0 ],
+					[ x + 2 / 3, y + 1, 0 ],
+					[ x, y, 0 ],
+					[ x + 2 / 3, y + 1, 0 ],
+					[ x, y + 1, 0 ]
+				];
+				position.set( [].concat( ...coordinates ),
+					positionSize * vertices * face );
+				uv.set( uv1, uvSize * vertices * face );
+				const fill = [ face, face, face, face, face, face ];
+				faceIndex.set( fill, faceIndexSize * vertices * face );
 
-			renderer.setRenderTarget( currentRenderTarget );
-			renderer.toneMapping = toneMapping;
-			renderer.toneMappingExposure = toneMappingExposure;
-			renderer.gammaInput = gammaInput;
-			renderer.gammaOutput = gammaOutput;
+			}
+			const planes = new BufferGeometry();
+			planes.addAttribute(
+				'position', new BufferAttribute( position, positionSize ) );
+			planes.addAttribute( 'uv', new BufferAttribute( uv, uvSize ) );
+			planes.addAttribute(
+				'faceIndex', new BufferAttribute( faceIndex, faceIndexSize ) );
+			_lodPlanes.push( planes );
 
-		},
+			if ( lod > LOD_MIN ) {
 
-		renderToCubeMapTarget: function ( renderer, renderTarget ) {
+				lod --;
 
-			for ( var i = 0; i < 6; i ++ ) {
+			}
 
-				this.renderToCubeMapTargetFace( renderer, renderTarget, i );
+		}
+		return { _lodPlanes, _sizeLods, _sigmas };
+
+	}
+
+	function _allocateTargets( equirectangular ) {
+
+		const params = {
+		  magFilter: NearestFilter,
+		  minFilter: NearestFilter,
+		  generateMipmaps: false,
+		  type: equirectangular ? equirectangular.type : UnsignedByteType,
+		  format: equirectangular ? equirectangular.format : RGBEFormat,
+		  encoding: equirectangular ? equirectangular.encoding : RGBEEncoding,
+		  depthBuffer: false,
+		  stencilBuffer: false
+		};
+		const cubeUVRenderTarget = _createRenderTarget(
+			{ ...params, depthBuffer: ( equirectangular ? false : true ) } );
+		_pingPongRenderTarget = _createRenderTarget( params );
+		return cubeUVRenderTarget;
+
+	}
+
+	function _cleanUp() {
+
+		_pingPongRenderTarget.dispose();
+		_renderer.setRenderTarget( null );
+		var size = _renderer.getSize();
+		_renderer.setViewport( 0, 0, size.x, size.y );
+
+	}
+
+	function _sceneToCubeUV(
+		scene, near, far,
+		cubeUVRenderTarget ) {
+
+	  const fov = 90;
+	  const aspect = 1;
+	  const cubeCamera = new PerspectiveCamera( fov, aspect, near, far );
+	  const upSign = [ 1, 1, 1, 1, - 1, 1 ];
+	  const forwardSign = [ 1, 1, - 1, - 1, - 1, 1 ];
+
+	  const gammaOutput = _renderer.gammaOutput;
+	  const toneMapping = _renderer.toneMapping;
+	  const toneMappingExposure = _renderer.toneMappingExposure;
+
+	  _renderer.toneMapping = LinearToneMapping;
+	  _renderer.toneMappingExposure = 1.0;
+	  _renderer.gammaOutput = false;
+	  scene.scale.z *= - 1;
+
+	  _renderer.setRenderTarget( cubeUVRenderTarget );
+	  for ( let i = 0; i < 6; i ++ ) {
+
+			const col = i % 3;
+			if ( col == 0 ) {
+
+		  cubeCamera.up.set( 0, upSign[ i ], 0 );
+		  cubeCamera.lookAt( forwardSign[ i ], 0, 0 );
+
+			} else if ( col == 1 ) {
+
+		  cubeCamera.up.set( 0, 0, upSign[ i ] );
+		  cubeCamera.lookAt( 0, forwardSign[ i ], 0 );
+
+			} else {
+
+		  cubeCamera.up.set( 0, upSign[ i ], 0 );
+		  cubeCamera.lookAt( 0, 0, forwardSign[ i ] );
 
 			}
+			_setViewport(
+				col * SIZE_MAX, i > 2 ? SIZE_MAX : 0, SIZE_MAX, SIZE_MAX );
+			_renderer.render( scene, cubeCamera );
 
-		},
+		}
 
-		renderToCubeMapTargetFace: function ( renderer, renderTarget, faceIndex ) {
+	  _renderer.toneMapping = toneMapping;
+	  _renderer.toneMappingExposure = toneMappingExposure;
+	  _renderer.gammaOutput = gammaOutput;
+	  scene.scale.z *= - 1;
 
-			shader.uniforms[ 'faceIndex' ].value = faceIndex;
-			renderer.setRenderTarget( renderTarget, faceIndex );
-			renderer.clear();
-			renderer.render( scene, camera );
+	}
 
-		},
+	function _equirectangularToCubeUV(
+		equirectangular, cubeUVRenderTarget ) {
 
-		dispose: function () {
+	  const scene = new Scene();
+	  scene.add( new Mesh( _lodPlanes[ 0 ], _blurMaterial ) );
+	  const uniforms = _blurMaterial.uniforms;
 
-			for ( var i = 0, l = this.cubeLods.length; i < l; i ++ ) {
+	  uniforms[ 'envMap' ].value = equirectangular;
+	  uniforms[ 'copyEquirectangular' ].value = true;
+	  uniforms[ 'texelSize' ].value.set(
+		  1.0 / equirectangular.image.width, 1.0 / equirectangular.image.height );
+	  uniforms[ 'inputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
+	  uniforms[ 'outputEncoding' ].value = ENCODINGS[ equirectangular.encoding ];
 
-				this.cubeLods[ i ].dispose();
+	  _renderer.setRenderTarget( cubeUVRenderTarget );
+	  _setViewport( 0, 0, 3 * SIZE_MAX, 2 * SIZE_MAX );
+	  _renderer.render( scene, _flatCamera );
+
+	}
+
+	function _createRenderTarget( params ) {
+
+	  const cubeUVRenderTarget =
+		  new WebGLRenderTarget( 3 * SIZE_MAX, 3 * SIZE_MAX, params );
+	  cubeUVRenderTarget.texture.mapping = CubeUVReflectionMapping;
+	  cubeUVRenderTarget.texture.name = 'PMREM.cubeUv';
+	  return cubeUVRenderTarget;
+
+	}
+
+	function _setViewport( x, y, width, height ) {
+
+		const dpr = _renderer.getPixelRatio();
+		_renderer.setViewport( x / dpr, y / dpr, width / dpr, height / dpr );
+
+	}
+
+	function _applyPMREM( cubeUVRenderTarget ) {
+
+		var autoClear = _renderer.autoClear;
+		_renderer.autoClear = false;
+
+	  	for ( let i = 1; i < TOTAL_LODS; i ++ ) {
+
+			const sigma = Math.sqrt(
+				_sigmas[ i ] * _sigmas[ i ] -
+			_sigmas[ i - 1 ] * _sigmas[ i - 1 ] );
+			const poleAxis =
+			_axisDirections[ ( i - 1 ) % _axisDirections.length ];
+			_blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis );
+
+		}
+
+		_renderer.autoClear = autoClear;
+
+	}
+
+	/**
+   * This is a two-pass Gaussian blur for a cubemap. Normally this is done
+   * vertically and horizontally, but this breaks down on a cube. Here we apply
+   * the blur latitudinally (around the poles), and then longitudinally (towards
+   * the poles) to approximate the orthogonally-separable blur. It is least
+   * accurate at the poles, but still does a decent job.
+   */
+	function _blur(
+		cubeUVRenderTarget, lodIn, lodOut,
+		sigma, poleAxis ) {
+
+		_halfBlur(
+	  cubeUVRenderTarget,
+	  _pingPongRenderTarget,
+	  lodIn,
+	  lodOut,
+	  sigma,
+	  'latitudinal',
+	  poleAxis );
+
+		_halfBlur(
+	  _pingPongRenderTarget,
+	  cubeUVRenderTarget,
+	  lodOut,
+	  lodOut,
+	  sigma,
+	  'longitudinal',
+	  poleAxis );
+
+	}
+
+	function _halfBlur(
+		targetIn, targetOut, lodIn,
+		lodOut, sigmaRadians, direction,
+		poleAxis ) {
+
+		if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) {
+
+			console.error(
+				'blur direction must be either latitudinal or longitudinal!' );
+
+		}
+
+		// Number of standard deviations at which to cut off the discrete approximation.
+		const STANDARD_DEVIATIONS = 3;
+
+		const blurScene = new Scene();
+		blurScene.add( new Mesh( _lodPlanes[ lodOut ], _blurMaterial ) );
+		const blurUniforms = _blurMaterial.uniforms;
+
+		const pixels = _sizeLods[ lodIn ] - 1;
+		const radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 );
+		const sigmaPixels = sigmaRadians / radiansPerPixel;
+		const samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES;
+
+		if ( samples > MAX_SAMPLES ) {
+
+			console.warn( `sigmaRadians, ${
+				sigmaRadians}, is too large and will clip, as it requested ${
+				samples} samples when the maximum is set to ${MAX_SAMPLES}` );
+
+		}
+
+		let weights = [];
+		let sum = 0;
+		for ( let i = 0; i < MAX_SAMPLES; ++ i ) {
+
+			const x = i / sigmaPixels;
+			const weight = Math.exp( - x * x / 2 );
+			weights.push( weight );
+			if ( i == 0 ) {
+
+	  			 sum += weight;
+
+			} else if ( i < samples ) {
+
+	  			sum += 2 * weight;
 
 			}
 
-			shader.dispose();
+		}
+		weights = weights.map( w => w / sum );
+
+		blurUniforms[ 'envMap' ].value = targetIn.texture;
+		blurUniforms[ 'copyEquirectangular' ].value = false;
+		blurUniforms[ 'samples' ].value = samples;
+		blurUniforms[ 'weights' ].value = weights;
+		blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal';
+		if ( poleAxis ) {
 
-		},
+			blurUniforms[ 'poleAxis' ].value = poleAxis;
 
-	};
+		}
+		blurUniforms[ 'dTheta' ].value = radiansPerPixel;
+		blurUniforms[ 'mipInt' ].value = LOD_MAX - lodIn;
+		blurUniforms[ 'inputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
+		blurUniforms[ 'outputEncoding' ].value = ENCODINGS[ targetIn.texture.encoding ];
 
-	function getShader() {
+		const outputSize = _sizeLods[ lodOut ];
+		const x = 3 * Math.max( 0, SIZE_MAX - 2 * outputSize );
+		const y = ( lodOut === 0 ? 0 : 2 * SIZE_MAX ) +
+	  2 * outputSize *
+		  ( lodOut > LOD_MAX - LOD_MIN ? lodOut - LOD_MAX + LOD_MIN : 0 );
 
-		var shaderMaterial = new ShaderMaterial( {
+		_renderer.setRenderTarget( targetOut );
+		_setViewport( x, y, 3 * outputSize, 2 * outputSize );
+		_renderer.render( blurScene, _flatCamera );
 
-			defines: {
-				"SAMPLES_PER_LEVEL": 20,
-			},
+	}
+
+	function _getShader( maxSamples ) {
+
+		const weights = new Float32Array( maxSamples );
+		const texelSize = new Vector2( 1, 1 );
+		const poleAxis = new Vector3( 0, 1, 0 );
+		var shaderMaterial = new RawShaderMaterial( {
+
+			defines: { 'n': maxSamples },
 
 			uniforms: {
-				"faceIndex": { value: 0 },
-				"roughness": { value: 0.5 },
-				"mapSize": { value: 0.5 },
-				"envMap": { value: null },
-				"tFlip": { value: - 1 },
+				'envMap': { value: null },
+				'copyEquirectangular': { value: false },
+				'texelSize': { value: texelSize },
+				'samples': { value: 1 },
+				'weights': { value: weights },
+				'latitudinal': { value: false },
+				'dTheta': { value: 0 },
+				'mipInt': { value: 0 },
+				'poleAxis': { value: poleAxis },
+				'inputEncoding': { value: ENCODINGS[ LinearEncoding ] },
+				'outputEncoding': { value: ENCODINGS[ LinearEncoding ] }
 			},
 
-			vertexShader:
-				"varying vec2 vUv;\n\
-				void main() {\n\
-					vUv = uv;\n\
-					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
-				}",
-
-			fragmentShader:
-				"#include <common>\n\
-				varying vec2 vUv;\n\
-				uniform int faceIndex;\n\
-				uniform float roughness;\n\
-				uniform samplerCube envMap;\n\
-				uniform float mapSize;\n\
-				uniform float tFlip;\n\
-				\n\
-				float GGXRoughnessToBlinnExponent( const in float ggxRoughness ) {\n\
-					float a = ggxRoughness + 0.0001;\n\
-					a *= a;\n\
-					return ( 2.0 / a - 2.0 );\n\
-				}\n\
-				vec3 ImportanceSamplePhong(vec2 uv, mat3 vecSpace, float specPow) {\n\
-					float phi = uv.y * 2.0 * PI;\n\
-					float cosTheta = pow(1.0 - uv.x, 1.0 / (specPow + 1.0));\n\
-					float sinTheta = sqrt(1.0 - cosTheta * cosTheta);\n\
-					vec3 sampleDir = vec3(cos(phi) * sinTheta, sin(phi) * sinTheta, cosTheta);\n\
-					return vecSpace * sampleDir;\n\
-				}\n\
-				vec3 ImportanceSampleGGX( vec2 uv, mat3 vecSpace, float Roughness )\n\
-				{\n\
-					float a = Roughness * Roughness;\n\
-					float Phi = 2.0 * PI * uv.x;\n\
-					float CosTheta = sqrt( (1.0 - uv.y) / ( 1.0 + (a*a - 1.0) * uv.y ) );\n\
-					float SinTheta = sqrt( 1.0 - CosTheta * CosTheta );\n\
-					return vecSpace * vec3(SinTheta * cos( Phi ), SinTheta * sin( Phi ), CosTheta);\n\
-				}\n\
-				mat3 matrixFromVector(vec3 n) {\n\
-					float a = 1.0 / (1.0 + n.z);\n\
-					float b = -n.x * n.y * a;\n\
-					vec3 b1 = vec3(1.0 - n.x * n.x * a, b, -n.x);\n\
-					vec3 b2 = vec3(b, 1.0 - n.y * n.y * a, -n.y);\n\
-					return mat3(b1, b2, n);\n\
-				}\n\
-				\n\
-				vec4 testColorMap(float Roughness) {\n\
-					vec4 color;\n\
-					if(faceIndex == 0)\n\
-						color = vec4(1.0,0.0,0.0,1.0);\n\
-					else if(faceIndex == 1)\n\
-						color = vec4(0.0,1.0,0.0,1.0);\n\
-					else if(faceIndex == 2)\n\
-						color = vec4(0.0,0.0,1.0,1.0);\n\
-					else if(faceIndex == 3)\n\
-						color = vec4(1.0,1.0,0.0,1.0);\n\
-					else if(faceIndex == 4)\n\
-						color = vec4(0.0,1.0,1.0,1.0);\n\
-					else\n\
-						color = vec4(1.0,0.0,1.0,1.0);\n\
-					color *= ( 1.0 - Roughness );\n\
-					return color;\n\
-				}\n\
-				void main() {\n\
-					vec3 sampleDirection;\n\
-					vec2 uv = vUv*2.0 - 1.0;\n\
-					float offset = -1.0/mapSize;\n\
-					const float a = -1.0;\n\
-					const float b = 1.0;\n\
-					float c = -1.0 + offset;\n\
-					float d = 1.0 - offset;\n\
-					float bminusa = b - a;\n\
-					uv.x = (uv.x - a)/bminusa * d - (uv.x - b)/bminusa * c;\n\
-					uv.y = (uv.y - a)/bminusa * d - (uv.y - b)/bminusa * c;\n\
-					if (faceIndex==0) {\n\
-						sampleDirection = vec3(1.0, -uv.y, -uv.x);\n\
-					} else if (faceIndex==1) {\n\
-						sampleDirection = vec3(-1.0, -uv.y, uv.x);\n\
-					} else if (faceIndex==2) {\n\
-						sampleDirection = vec3(uv.x, 1.0, uv.y);\n\
-					} else if (faceIndex==3) {\n\
-						sampleDirection = vec3(uv.x, -1.0, -uv.y);\n\
-					} else if (faceIndex==4) {\n\
-						sampleDirection = vec3(uv.x, -uv.y, 1.0);\n\
-					} else {\n\
-						sampleDirection = vec3(-uv.x, -uv.y, -1.0);\n\
-					}\n\
-					vec3 correctedDirection = vec3( tFlip * sampleDirection.x, sampleDirection.yz );\n\
-					mat3 vecSpace = matrixFromVector( normalize( correctedDirection ) );\n\
-					vec3 rgbColor = vec3(0.0);\n\
-					const int NumSamples = SAMPLES_PER_LEVEL;\n\
-					vec3 vect;\n\
-					float weight = 0.0;\n\
-					for( int i = 0; i < NumSamples; i ++ ) {\n\
-						float sini = sin(float(i));\n\
-						float cosi = cos(float(i));\n\
-						float r = rand(vec2(sini, cosi));\n\
-						vect = ImportanceSampleGGX(vec2(float(i) / float(NumSamples), r), vecSpace, roughness);\n\
-						float dotProd = dot(vect, normalize(sampleDirection));\n\
-						weight += dotProd;\n\
-						vec3 color = envMapTexelToLinear(textureCube(envMap, vect)).rgb;\n\
-						rgbColor.rgb += color;\n\
-					}\n\
-					rgbColor /= float(NumSamples);\n\
-					//rgbColor = testColorMap( roughness ).rgb;\n\
-					gl_FragColor = linearToOutputTexel( vec4( rgbColor, 1.0 ) );\n\
-				}",
-
-			blending: NoBlending
+			vertexShader: `
+precision mediump float;
+precision mediump int;
+attribute vec3 position;
+attribute vec2 uv;
+attribute float faceIndex;
+varying vec2 vUv;
+varying float vFaceIndex;
+void main() {
+	vUv = uv;
+	vFaceIndex = faceIndex;
+    gl_Position = vec4( position, 1.0 );
+}
+      		`,
+
+			fragmentShader: `
+precision mediump float;
+precision mediump int;
+varying vec2 vUv;
+varying float vFaceIndex;
+uniform sampler2D envMap;
+uniform bool copyEquirectangular;
+uniform vec2 texelSize;
+uniform int samples;
+uniform float weights[n];
+uniform bool latitudinal;
+uniform float dTheta;
+uniform float mipInt;
+uniform vec3 poleAxis;
+uniform int inputEncoding;
+uniform int outputEncoding;
+
+#include <encodings_pars_fragment>
+
+vec4 inputTexelToLinear(vec4 value){
+    if(inputEncoding == 0){
+        return value;
+    }else if(inputEncoding == 1){
+        return sRGBToLinear(value);
+    }else if(inputEncoding == 2){
+        return RGBEToLinear(value);
+    }else if(inputEncoding == 3){
+        return RGBMToLinear(value, 7.0);
+    }else if(inputEncoding == 4){
+        return RGBMToLinear(value, 16.0);
+    }else if(inputEncoding == 5){
+        return RGBDToLinear(value, 256.0);
+    }else{
+        return GammaToLinear(value, 2.2);
+    }
+}
+
+vec4 linearToOutputTexel(vec4 value){
+    if(outputEncoding == 0){
+        return value;
+    }else if(outputEncoding == 1){
+        return LinearTosRGB(value);
+    }else if(outputEncoding == 2){
+        return LinearToRGBE(value);
+    }else if(outputEncoding == 3){
+        return LinearToRGBM(value, 7.0);
+    }else if(outputEncoding == 4){
+        return LinearToRGBM(value, 16.0);
+    }else if(outputEncoding == 5){
+        return LinearToRGBD(value, 256.0);
+    }else{
+        return LinearToGamma(value, 2.2);
+    }
+}
+
+vec4 envMapTexelToLinear(vec4 color) {
+  return inputTexelToLinear(color);
+}
+
+#define ENVMAP_TYPE_CUBE_UV
+#include <cube_uv_reflection_fragment>
+
+#define RECIPROCAL_PI 0.31830988618
+#define RECIPROCAL_PI2 0.15915494
+
+void main() {
+  gl_FragColor = vec4(0.0);
+  vec3 outputDirection = getDirection(vUv, vFaceIndex);
+  if (copyEquirectangular) {
+    vec3 direction = normalize(outputDirection);
+    vec2 uv;
+    uv.y = asin(clamp(direction.y, -1.0, 1.0)) * RECIPROCAL_PI + 0.5;
+    uv.x = atan(direction.z, direction.x) * RECIPROCAL_PI2 + 0.5;
+    vec2 f = fract(uv / texelSize - 0.5);
+    uv -= f * texelSize;
+    vec3 tl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
+    uv.x += texelSize.x;
+    vec3 tr = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
+    uv.y += texelSize.y;
+    vec3 br = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
+    uv.x -= texelSize.x;
+    vec3 bl = envMapTexelToLinear(texture2D(envMap, uv)).rgb;
+    vec3 tm = mix(tl, tr, f.x);
+    vec3 bm = mix(bl, br, f.x);
+    gl_FragColor.rgb = mix(tm, bm, f.y);
+  } else {
+    for (int i = 0; i < n; i++) {
+      if (i >= samples)
+        break;
+      for (int dir = -1; dir < 2; dir += 2) {
+        if (i == 0 && dir == 1)
+          continue;
+        vec3 axis = latitudinal ? poleAxis : cross(poleAxis, outputDirection);
+        if (all(equal(axis, vec3(0.0))))
+          axis = cross(vec3(0.0, 1.0, 0.0), outputDirection);
+        axis = normalize(axis);
+        float theta = dTheta * float(dir * i);
+        float cosTheta = cos(theta);
+        // Rodrigues' axis-angle rotation
+        vec3 sampleDirection = outputDirection * cosTheta 
+            + cross(axis, outputDirection) * sin(theta) 
+            + axis * dot(axis, outputDirection) * (1.0 - cosTheta);
+        gl_FragColor.rgb +=
+            weights[i] * bilinearCubeUV(envMap, sampleDirection, mipInt);
+      }
+    }
+  }
+  gl_FragColor = linearToOutputTexel(gl_FragColor);
+}
+     		`,
+
+			blending: NoBlending,
+			depthTest: false,
+	   		depthWrite: false
 
 		} );
 
-		shaderMaterial.type = 'PMREMGenerator';
+		shaderMaterial.type = 'SphericalGaussianBlur';
 
 		return shaderMaterial;
 

+ 3 - 17
examples/webgl_loader_gltf.html

@@ -25,7 +25,6 @@
 			import { GLTFLoader } from './jsm/loaders/GLTFLoader.js';
 			import { RGBELoader } from './jsm/loaders/RGBELoader.js';
 			import { PMREMGenerator } from './jsm/pmrem/PMREMGenerator.js';
-			import { PMREMCubeUVPacker } from './jsm/pmrem/PMREMCubeUVPacker.js';
 
 			var container, stats, controls;
 			var camera, scene, renderer;
@@ -48,20 +47,10 @@
 					.setPath( 'textures/equirectangular/' )
 					.load( 'pedestrian_overpass_2k.hdr', function ( texture ) {
 
-						var options = {
-							minFilter: texture.minFilter,
-							magFilter: texture.magFilter
-						};
+						var pmremGenerator = new PMREMGenerator( renderer );
+						scene.background = pmremGenerator.fromEquirectangular(texture).texture;
 
-						scene.background = new THREE.WebGLRenderTargetCube( 1024, 1024, options ).fromEquirectangularTexture( renderer, texture );
-
-						var pmremGenerator = new PMREMGenerator( scene.background.texture );
-						pmremGenerator.update( renderer );
-
-						var pmremCubeUVPacker = new PMREMCubeUVPacker( pmremGenerator.cubeLods );
-						pmremCubeUVPacker.update( renderer );
-
-						var envMap = pmremCubeUVPacker.CubeUVRenderTarget.texture;
+						var envMap = scene.background;
 
 						// model
 
@@ -82,9 +71,6 @@
 
 						} );
 
-						pmremGenerator.dispose();
-						pmremCubeUVPacker.dispose();
-
 					} );
 
 				renderer = new THREE.WebGLRenderer( { antialias: true } );

+ 0 - 1
utils/modularize.js

@@ -144,7 +144,6 @@ var files = [
 	{ path: 'objects/Water.js', dependencies: [], ignoreList: [] },
 	{ path: 'objects/Water2.js', dependencies: [ { name: 'Reflector', path: 'objects/Reflector.js' }, { name: 'Refractor', path: 'objects/Refractor.js' } ], ignoreList: [] },
 
-	{ path: 'pmrem/PMREMCubeUVPacker.js', dependencies: [], ignoreList: [] },
 	{ path: 'pmrem/PMREMGenerator.js', dependencies: [], ignoreList: [] },
 
 	{ path: 'postprocessing/AdaptiveToneMappingPass.js', dependencies: [ { name: 'Pass', path: 'postprocessing/Pass.js' }, { name: 'CopyShader', path: 'shaders/CopyShader.js' }, { name: 'LuminosityShader', path: 'shaders/LuminosityShader.js' }, { name: 'ToneMapShader', path: 'shaders/ToneMapShader.js' } ], ignoreList: [] },

Beberapa file tidak ditampilkan karena terlalu banyak file yang berubah dalam diff ini