ソースを参照

Merge branch 'dev' into convert_car_script_to_module

Lewy Blue 6 年 前
コミット
cdcb80c180
58 ファイル変更8217 行追加3718 行削除
  1. 0 1
      .editorconfig
  2. 141 0
      docs/examples/loaders/BasisTextureLoader.html
  3. 1 0
      docs/list.js
  4. 9 1
      docs/manual/en/introduction/Import-via-modules.html
  5. 3 3
      examples/js/libs/basis/README.md
  6. 153 75
      examples/js/loaders/BasisTextureLoader.js
  7. 2236 1631
      examples/js/loaders/LWOLoader.js
  8. 0 0
      examples/js/misc/ConvexObjectBreaker.js
  9. 0 0
      examples/js/misc/GPUComputationRenderer.js
  10. 12 8
      examples/js/misc/Ocean.js
  11. 10 9
      examples/js/shaders/OceanShaders.js
  12. 311 311
      examples/js/shaders/VolumeShader.js
  13. 16 0
      examples/jsm/animation/AnimationClipCreator.d.ts
  14. 125 0
      examples/jsm/animation/AnimationClipCreator.js
  15. 20 0
      examples/jsm/animation/TimelinerController.d.ts
  16. 283 0
      examples/jsm/animation/TimelinerController.js
  17. 23 14
      examples/jsm/loaders/LWOLoader.d.ts
  18. 2237 1631
      examples/jsm/loaders/LWOLoader.js
  19. 15 0
      examples/jsm/loaders/VTKLoader.d.ts
  20. 1186 0
      examples/jsm/loaders/VTKLoader.js
  21. 68 0
      examples/jsm/misc/Ocean.d.ts
  22. 411 0
      examples/jsm/misc/Ocean.js
  23. 1 1
      examples/jsm/postprocessing/EffectComposer.d.ts
  24. 1 1
      examples/jsm/postprocessing/RenderPass.d.ts
  25. 75 0
      examples/jsm/shaders/OceanShaders.d.ts
  26. 377 0
      examples/jsm/shaders/OceanShaders.js
  27. 16 0
      examples/jsm/shaders/VolumeShader.d.ts
  28. 331 0
      examples/jsm/shaders/VolumeShader.js
  29. BIN
      examples/models/3mf/truck.3mf
  30. BIN
      examples/models/lwo/Images/Env_map_sphere_1.jpg
  31. BIN
      examples/models/lwo/Images/environments/kiara_8_sunset.jpg
  32. BIN
      examples/models/lwo/Images/environments/kloofendal_48d_partly_cloudy.jpg
  33. BIN
      examples/models/lwo/Images/material-PBR-abstract/Abstract_008_basecolor.jpg
  34. BIN
      examples/models/lwo/Images/material-PBR-abstract/Abstract_008_bump.jpg
  35. BIN
      examples/models/lwo/Images/material-PBR-abstract/Abstract_008_metallic.jpg
  36. BIN
      examples/models/lwo/Images/material-PBR-abstract/Abstract_008_normal.jpg
  37. BIN
      examples/models/lwo/Images/material-PBR-abstract/Abstract_008_roughness.jpg
  38. BIN
      examples/models/lwo/Images/material-Phong-metal/167_BUMP.jpg
  39. BIN
      examples/models/lwo/Images/material-Phong-metal/167_COLOR.JPG
  40. BIN
      examples/models/lwo/Images/material-Phong-metal/167_GLOSS.jpg
  41. BIN
      examples/models/lwo/Images/material-Phong-metal/167_SPEC.jpg
  42. BIN
      examples/models/lwo/Images/normal_2.jpg
  43. 3 0
      examples/models/lwo/LICENSE.txt
  44. BIN
      examples/models/lwo/Objects/LWO3/Demo.lwo
  45. BIN
      examples/models/lwo/StandardMaterials.lwo
  46. BIN
      examples/textures/compressed/PavingStones.basis
  47. BIN
      examples/textures/compressed/kodim20.basis
  48. 1 1
      examples/webgl_gpgpu_birds.html
  49. 1 1
      examples/webgl_gpgpu_protoplanet.html
  50. 1 1
      examples/webgl_gpgpu_water.html
  51. 40 25
      examples/webgl_loader_lwo.html
  52. 5 1
      examples/webgl_loader_texture_basis.html
  53. 1 1
      examples/webgl_physics_convex_break.html
  54. 1 1
      examples/webgl_shaders_ocean2.html
  55. 2 0
      src/materials/SpriteMaterial.d.ts
  56. 93 0
      src/renderers/WebGLRenderTargetCube.js
  57. 1 1
      src/renderers/shaders/ShaderChunk/skinning_pars_vertex.glsl.js
  58. 7 0
      utils/modularize.js

+ 0 - 1
.editorconfig

@@ -9,7 +9,6 @@ insert_final_newline = true
 [*.{js,ts,html}]
 charset = utf-8
 indent_style = tab
-indent_size = 2
 
 [*.{js,ts}]
 trim_trailing_whitespace = true

+ 141 - 0
docs/examples/loaders/BasisTextureLoader.html

@@ -0,0 +1,141 @@
+<!DOCTYPE html>
+<html lang="en">
+	<head>
+		<meta charset="utf-8" />
+		<base href="../../" />
+		<script src="list.js"></script>
+		<script src="page.js"></script>
+		<link type="text/css" rel="stylesheet" href="page.css" />
+	</head>
+	<body>
+		[page:Loader] &rarr;
+		<h1>[name]</h1>
+
+		<p class="desc">
+			Loader for Basis Universal GPU Texture Codec.<br><br>
+
+			[link:https://github.com/BinomialLLC/basis_universal/ Basis Universal] is a
+			"supercompressed" GPU texture and texture video compression system that
+			outputs a highly compressed intermediate file format (.basis) that can be
+			quickly transcoded to a wide variety of GPU texture compression formats.
+		</p>
+
+		<p>
+			This loader parallelizes the transcoding process across a configurable number
+			of web workers, before transferring the transcoded compressed texture back
+			to the main thread. The required WASM transcoder and JS wrapper are available from the
+			[link:https://github.com/mrdoob/three.js/tree/dev/examples/js/libs/basis examples/js/libs/basis]
+			directory.
+		</p>
+
+		<h2>Example</h2>
+
+		<code>
+		var basisLoader = new THREE.BasisTextureLoader();
+		basisLoader.setTranscoderPath( 'examples/js/libs/basis/' );
+		basisLoader.detectSupport( renderer );
+		basisLoader.load( 'diffuse.basis', function ( texture ) {
+
+			var material = new THREE.MeshStandardMaterial( { map: texture } );
+
+		}, function () {
+
+			console.log( 'onProgress' );
+
+		}, function ( e ) {
+
+			console.error( e );
+
+		} );
+		</code>
+
+		[example:webgl_loader_texture_basis]
+
+		<h2>Browser compatibility</h2>
+
+		<p>
+			BasisTextureLoader transcodes input textures in '.basis' format to an
+			appropriate compressed texture format for the target device, where
+			possible. This allows the same source texture to be served across
+			desktop, Android, and iOS devices, and transcoded into DXT, ETC1, or
+			PVRTC1. Other output formats may be supported in the future.
+		</p>
+		<p>
+			Transcoding to PVRTC1 (for iOS) requires square power-of-two textures.
+		</p>
+		<p>
+			This loader relies on ES6 Promises and Web Assembly, which are not
+			supported in IE11.
+		</p>
+
+		<br>
+		<hr>
+
+		<h2>Constructor</h2>
+
+		<h3>[name]( [param:LoadingManager manager] )</h3>
+		<p>
+		[page:LoadingManager manager] — The [page:LoadingManager] for the loader to use. Default is [page:LoadingManager THREE.DefaultLoadingManager].
+		</p>
+		<p>
+		Creates a new [name].
+		</p>
+
+		<h2>Methods</h2>
+
+		<h3>[method:null load]( [param:String url], [param:Function onLoad], [param:Function onProgress], [param:Function onError] )</h3>
+		<p>
+		[page:String url] — A string containing the path/URL of the <em>.basis</em> file.<br />
+		[page:Function onLoad] — A function to be called after the loading is successfully completed.<br />
+		[page:Function onProgress] — (optional) A function to be called while the loading is in progress. The argument will be the XMLHttpRequest instance, that contains .[page:Integer total] and .[page:Integer loaded] bytes.<br />
+		[page:Function onError] — (optional) A function to be called if an error occurs during loading. The function receives error as an argument.<br />
+		</p>
+		<p>
+		Load from url and call the <em>onLoad</em> function with the transcoded [page:CompressedTexture].
+		</p>
+
+		<h3>[method:this detectSupport]( [param:WebGLRenderer renderer] )</h3>
+		<p>
+		[page:WebGLRenderer renderer] — A renderer instance.
+		</p>
+		<p>
+		Detects hardware support for available compressed texture formats, to determine
+		the output format for the transcoder. Must be called before loading a texture.
+		</p>
+
+		<h3>[method:this setCrossOrigin]( [param:String crossOrigin] )</h3>
+		<p>
+		[page:String crossOrigin] — Options are '', 'anonymous', or 'use-credentials'. Default is 'anonymous'.
+		</p>
+		<p>
+		Sets options for CORS requests.
+		</p>
+
+		<h3>[method:this setTranscoderPath]( [param:String path] )</h3>
+		<p>
+		[page:String path] — Path to folder containing the WASM transcoder and JS wrapper.
+		</p>
+		<p>
+		The WASM transcoder and JS wrapper are available from the
+		[link:https://github.com/mrdoob/three.js/tree/dev/examples/js/libs/basis examples/js/libs/basis]
+		directory.
+		</p>
+
+		<h3>[method:this setWorkerLimit]( [param:Number limit] )</h3>
+		<p>
+		[page:Number limit] — Maximum number of workers. Default is '4'.
+		</p>
+		<p>
+		Sets the maximum number of web workers to be allocated by this instance.
+		</p>
+
+		<h3>[method:this dispose]()</h3>
+		<p>
+		Disposes the loader object, de-allocating any Web Workers created.
+		</p>
+
+		<h2>Source</h2>
+
+		[link:https://github.com/mrdoob/three.js/blob/master/examples/js/loaders/BasisTextureLoader.js examples/js/loaders/BasisTextureLoader.js]
+	</body>
+</html>

+ 1 - 0
docs/list.js

@@ -365,6 +365,7 @@ var list = {
 
 			"Loaders": {
 				"BabylonLoader": "examples/loaders/BabylonLoader",
+				"BasisTextureLoader": "examples/loaders/BasisTextureLoader",
 				"DRACOLoader": "examples/loaders/DRACOLoader",
 				"GLTFLoader": "examples/loaders/GLTFLoader",
 				"MMDLoader": "examples/loaders/MMDLoader",

+ 9 - 1
docs/manual/en/introduction/Import-via-modules.html

@@ -66,7 +66,7 @@
 		<p>
 			The core of three.js is focused on the most important components of a 3D engine. Many other components like loaders or controls are part of the
 			examples directory. three.js ensures that these files are kept in sync with the core but users have to import them separately if they are required
-			for their project. However, most of these files are not modules which makes their usage in certain cases inconvenient. In order to address this issue,
+			for their project. However, not all files are modules which makes their usage in certain cases inconvenient. In order to address this issue,
 			we are working to provide all the examples as modules in the [link:https://github.com/mrdoob/three.js/tree/master/examples/jsm examples/jsm] directory.
 			If you install three.js via npm, you can import them like so:
 		</p>
@@ -76,6 +76,12 @@
 		<p>
 			The following examples files are already available as modules:
 			<ul>
+				<li>animation
+					<ul>
+						<li>AnimationClipCreator</li>
+						<li>TimelinerController</li>
+					</ul>
+				</li>
 				<li>cameras
 					<ul>
 						<li>CinematicCamera</li>
@@ -183,6 +189,7 @@
 						<li>TDSLoader</li>
 						<li>TGALoader</li>
 						<li>VRMLLoader</li>
+						<li>VTKLoader</li>
 					</ul>
 				</li>
 				<li>math
@@ -311,6 +318,7 @@
 						<li>VerticalBlurShader</li>
 						<li>VerticalTiltShiftShader</li>
 						<li>VignetteShader</li>
+						<li>VolumeShader</li>
 						<li>WaterRefractionShader</li>
 					</ul>
 				</li>

+ 3 - 3
examples/js/libs/basis/README.md

@@ -22,15 +22,15 @@ basisLoader.setTranscoderPath( 'examples/js/libs/basis/' );
 basisLoader.detectSupport( renderer );
 basisLoader.load( 'diffuse.basis', function ( texture ) {
 
-  var material = new THREE.MeshStandardMaterial( { map: texture } );
+	var material = new THREE.MeshStandardMaterial( { map: texture } );
 
 }, function () {
 
-  console.log( 'onProgress' );
+	console.log( 'onProgress' );
 
 }, function ( e ) {
 
-  console.error( e );
+	console.error( e );
 
 } );
 ```

+ 153 - 75
examples/js/loaders/BasisTextureLoader.js

@@ -16,46 +16,66 @@
  * of web workers, before transferring the transcoded compressed texture back
  * to the main thread.
  */
-// TODO(donmccurdy): Don't use ES6 classes.
-THREE.BasisTextureLoader = class BasisTextureLoader {
+THREE.BasisTextureLoader = function ( manager ) {
 
-	constructor ( manager ) {
+	this.manager = manager || THREE.DefaultLoadingManager;
 
-		// TODO(donmccurdy): Loading manager is unused.
-		this.manager = manager || THREE.DefaultLoadingManager;
+	this.crossOrigin = 'anonymous';
 
-		this.transcoderPath = '';
-		this.transcoderBinary = null;
-		this.transcoderPending = null;
+	this.transcoderPath = '';
+	this.transcoderBinary = null;
+	this.transcoderPending = null;
 
-		this.workerLimit = 4;
-		this.workerPool = [];
-		this.workerNextTaskID = 1;
-		this.workerSourceURL = '';
-		this.workerConfig = {
-			format: null,
-			etcSupported: false,
-			dxtSupported: false,
-			pvrtcSupported: false,
-		};
+	this.workerLimit = 4;
+	this.workerPool = [];
+	this.workerNextTaskID = 1;
+	this.workerSourceURL = '';
+	this.workerConfig = {
+		format: null,
+		etcSupported: false,
+		dxtSupported: false,
+		pvrtcSupported: false,
+	};
 
-	}
+};
+
+THREE.BasisTextureLoader.prototype = {
 
-	setTranscoderPath ( path ) {
+	constructor: THREE.BasisTextureLoader,
+
+	setCrossOrigin: function ( crossOrigin ) {
+
+		this.crossOrigin = crossOrigin;
+
+		return this;
+
+	},
+
+	setTranscoderPath: function ( path ) {
 
 		this.transcoderPath = path;
 
-	}
+		return this;
+
+	},
+
+	setWorkerLimit: function ( workerLimit ) {
+
+		this.workerLimit = workerLimit;
 
-	detectSupport ( renderer ) {
+		return this;
+
+	},
+
+	detectSupport: function ( renderer ) {
 
 		var context = renderer.context;
 		var config = this.workerConfig;
 
-		config.etcSupported = !! context.getExtension('WEBGL_compressed_texture_etc1');
-		config.dxtSupported = !! context.getExtension('WEBGL_compressed_texture_s3tc');
-		config.pvrtcSupported = !! context.getExtension('WEBGL_compressed_texture_pvrtc')
-			|| !! context.getExtension('WEBKIT_WEBGL_compressed_texture_pvrtc');
+		config.etcSupported = !! context.getExtension( 'WEBGL_compressed_texture_etc1' );
+		config.dxtSupported = !! context.getExtension( 'WEBGL_compressed_texture_s3tc' );
+		config.pvrtcSupported = !! context.getExtension( 'WEBGL_compressed_texture_pvrtc' )
+			|| !! context.getExtension( 'WEBKIT_WEBGL_compressed_texture_pvrtc' );
 
 		if ( config.etcSupported ) {
 
@@ -77,36 +97,44 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 
 		return this;
 
-	}
+	},
 
-	load ( url, onLoad, onProgress, onError ) {
+	load: function ( url, onLoad, onProgress, onError ) {
 
-		// TODO(donmccurdy): Use THREE.FileLoader.
-		fetch( url )
-			.then( ( res ) => res.arrayBuffer() )
-			.then( ( buffer ) => this._createTexture( buffer ) )
-			.then( onLoad )
-			.catch( onError );
+		var loader = new THREE.FileLoader( this.manager );
 
-	}
+		loader.setResponseType( 'arraybuffer' );
+
+		loader.load( url, ( buffer ) => {
+
+			this._createTexture( buffer )
+				.then( onLoad )
+				.catch( onError );
+
+		}, onProgress, onError );
+
+	},
 
 	/**
 	 * @param  {ArrayBuffer} buffer
 	 * @return {Promise<THREE.CompressedTexture>}
 	 */
-	_createTexture ( buffer ) {
+	_createTexture: function ( buffer ) {
+
+		var worker;
+		var taskID;
 
-		return this.getWorker()
-			.then( ( worker ) => {
+		var texturePending = this._getWorker()
+			.then( ( _worker ) => {
 
-				return new Promise( ( resolve ) => {
+				worker = _worker;
+				taskID = this.workerNextTaskID ++;
 
-					var taskID = this.workerNextTaskID++;
+				return new Promise( ( resolve, reject ) => {
 
-					worker._callbacks[ taskID ] = resolve;
+					worker._callbacks[ taskID ] = { resolve, reject };
 					worker._taskCosts[ taskID ] = buffer.byteLength;
 					worker._taskLoad += worker._taskCosts[ taskID ];
-					worker._taskCount++;
 
 					worker.postMessage( { type: 'transcode', id: taskID, buffer }, [ buffer ] );
 
@@ -139,27 +167,54 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 
 				}
 
-				texture.minFilter = THREE.LinearMipMapLinearFilter;
+				texture.minFilter = mipmaps.length === 1 ? THREE.LinearFilter : THREE.LinearMipMapLinearFilter;
 				texture.magFilter = THREE.LinearFilter;
 				texture.generateMipmaps = false;
 				texture.needsUpdate = true;
 
 				return texture;
 
-			});
+			} );
 
-	}
+		texturePending
+			.finally( () => {
+
+				if ( worker && taskID ) {
+
+					worker._taskLoad -= worker._taskCosts[ taskID ];
+					delete worker._callbacks[ taskID ];
+					delete worker._taskCosts[ taskID ];
+
+				}
 
-	_initTranscoder () {
+			} );
+
+		return texturePending;
+
+	},
+
+	_initTranscoder: function () {
 
 		if ( ! this.transcoderBinary ) {
 
-			// TODO(donmccurdy): Use THREE.FileLoader.
-			var jsContent = fetch( this.transcoderPath + 'basis_transcoder.js' )
-				.then( ( response ) => response.text() );
+			// Load transcoder wrapper.
+			var jsLoader = new THREE.FileLoader( this.manager );
+			jsLoader.setPath( this.transcoderPath );
+			var jsContent = new Promise( ( resolve, reject ) => {
+
+				jsLoader.load( 'basis_transcoder.js', resolve, undefined, reject );
 
-			var binaryContent = fetch( this.transcoderPath + 'basis_transcoder.wasm' )
-				.then( ( response ) => response.arrayBuffer() );
+			} );
+
+			// Load transcoder WASM binary.
+			var binaryLoader = new THREE.FileLoader( this.manager );
+			binaryLoader.setPath( this.transcoderPath );
+			binaryLoader.setResponseType( 'arraybuffer' );
+			var binaryContent = new Promise( ( resolve, reject ) => {
+
+				binaryLoader.load( 'basis_transcoder.wasm', resolve, undefined, reject );
+
+			} );
 
 			this.transcoderPending = Promise.all( [ jsContent, binaryContent ] )
 				.then( ( [ jsContent, binaryContent ] ) => {
@@ -187,9 +242,9 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 
 		return this.transcoderPending;
 
-	}
+	},
 
-	getWorker () {
+	_getWorker: function () {
 
 		return this._initTranscoder().then( () => {
 
@@ -200,7 +255,6 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 				worker._callbacks = {};
 				worker._taskCosts = {};
 				worker._taskLoad = 0;
-				worker._taskCount = 0;
 
 				worker.postMessage( {
 					type: 'init',
@@ -215,24 +269,29 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 					switch ( message.type ) {
 
 						case 'transcode':
-							worker._callbacks[ message.id ]( message );
-							worker._taskLoad -= worker._taskCosts[ message.id ];
-							delete worker._callbacks[ message.id ];
-							delete worker._taskCosts[ message.id ];
+							worker._callbacks[ message.id ].resolve( message );
+							break;
+
+						case 'error':
+							worker._callbacks[ message.id ].reject( message );
 							break;
 
 						default:
-							throw new Error( 'THREE.BasisTextureLoader: Unexpected message, "' + message.type + '"' );
+							console.error( 'THREE.BasisTextureLoader: Unexpected message, "' + message.type + '"' );
 
 					}
 
-				}
+				};
 
 				this.workerPool.push( worker );
 
 			} else {
 
-				this.workerPool.sort( function ( a, b ) { return a._taskLoad > b._taskLoad ? -1 : 1; } );
+				this.workerPool.sort( function ( a, b ) {
+
+					return a._taskLoad > b._taskLoad ? - 1 : 1;
+
+				} );
 
 			}
 
@@ -240,11 +299,11 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 
 		} );
 
-	}
+	},
 
-	dispose () {
+	dispose: function () {
 
-		for ( var i = 0; i < this.workerPool.length; i++ ) {
+		for ( var i = 0; i < this.workerPool.length; i ++ ) {
 
 			this.workerPool[ i ].terminate();
 
@@ -252,8 +311,10 @@ THREE.BasisTextureLoader = class BasisTextureLoader {
 
 		this.workerPool.length = 0;
 
+		return this;
+
 	}
-}
+};
 
 /* CONSTANTS */
 
@@ -285,6 +346,7 @@ THREE.BasisTextureLoader.DXT_FORMAT_MAP[ THREE.BasisTextureLoader.BASIS_FORMAT.c
 /* WEB WORKER */
 
 THREE.BasisTextureLoader.BasisWorker = function () {
+
 	var config;
 	var transcoderPending;
 	var _BasisFile;
@@ -303,17 +365,27 @@ THREE.BasisTextureLoader.BasisWorker = function () {
 			case 'transcode':
 				transcoderPending.then( () => {
 
-					var { width, height, mipmaps } = transcode( message.buffer );
+					try {
 
-					var buffers = [];
+						var { width, height, mipmaps } = transcode( message.buffer );
 
-					for ( var i = 0; i < mipmaps.length; ++i ) {
+						var buffers = [];
 
-						buffers.push( mipmaps[i].data.buffer );
+						for ( var i = 0; i < mipmaps.length; ++ i ) {
 
-					}
+							buffers.push( mipmaps[ i ].data.buffer );
+
+						}
+
+						self.postMessage( { type: 'transcode', id: message.id, width, height, mipmaps }, buffers );
+
+					} catch ( error ) {
 
-					self.postMessage( { type: 'transcode', id: message.id, width, height, mipmaps }, buffers );
+						console.error( error );
+
+						self.postMessage( { type: 'error', id: message.id, error: error.message } );
+
+					}
 
 				} );
 				break;
@@ -322,7 +394,7 @@ THREE.BasisTextureLoader.BasisWorker = function () {
 
 	};
 
-	function init ( wasmBinary ) {
+	function init( wasmBinary ) {
 
 		transcoderPending = new Promise( ( resolve ) => {
 
@@ -348,7 +420,7 @@ THREE.BasisTextureLoader.BasisWorker = function () {
 
 	}
 
-	function transcode ( buffer ) {
+	function transcode( buffer ) {
 
 		var basisFile = new _BasisFile( new Uint8Array( buffer ) );
 
@@ -356,7 +428,7 @@ THREE.BasisTextureLoader.BasisWorker = function () {
 		var height = basisFile.getImageHeight( 0, 0 );
 		var levels = basisFile.getNumLevels( 0 );
 
-		function cleanup () {
+		function cleanup() {
 
 			basisFile.close();
 			basisFile.delete();
@@ -377,9 +449,15 @@ THREE.BasisTextureLoader.BasisWorker = function () {
 
 		}
 
+		if ( basisFile.getHasAlpha() ) {
+
+			console.warn( 'THREE.BasisTextureLoader: Alpha not yet implemented.' );
+
+		}
+
 		var mipmaps = [];
 
-		for ( var mip = 0; mip < levels; mip++ ) {
+		for ( var mip = 0; mip < levels; mip ++ ) {
 
 			var mipWidth = basisFile.getImageWidth( 0, mip );
 			var mipHeight = basisFile.getImageHeight( 0, mip );

+ 2236 - 1631
examples/js/loaders/LWOLoader.js

@@ -1,7 +1,10 @@
 /**
+ * @version 1.1.1
+ *
  * @author Lewy Blue https://github.com/looeee
+ * @author Guilherme Avila https://github/sciecode
  *
- * Load files in LWO3 and LWO2 format
+ * @desc Load files in LWO3 and LWO2 format on Three.js
  *
  * LWO3 format specification:
  * 	http://static.lightwave3d.com/sdk/2018/html/filefmts/lwo3.html
@@ -9,2442 +12,3044 @@
  * LWO2 format specification:
  * 	http://static.lightwave3d.com/sdk/2018/html/filefmts/lwo2.html
  *
- */
+ * Development and test repository:
+ *	https://github.com/threejs/lwoloader
+ *
+ **/
 
-THREE.LWOLoader = ( function () {
+function LWO2Parser( IFFParser ) {
 
-	var lwoTree;
+	this.IFF = IFFParser;
 
-	function LWOLoader( manager ) {
+}
 
-		this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager;
+LWO2Parser.prototype = {
 
-	}
+	constructor: LWO2Parser,
 
-	LWOLoader.prototype = {
+	parseBlock: function () {
 
-		constructor: LWOLoader,
+		this.IFF.debugger.offset = this.IFF.reader.offset;
+		this.IFF.debugger.closeForms();
 
-		crossOrigin: 'anonymous',
+		var blockID = this.IFF.reader.getIDTag();
+		var length = this.IFF.reader.getUint32(); // size of data in bytes
+		if ( length > this.IFF.reader.dv.byteLength - this.IFF.reader.offset ) {
 
-		load: function ( url, onLoad, onProgress, onError ) {
+			this.IFF.reader.offset -= 4;
+			length = this.IFF.reader.getUint16();
 
-			var self = this;
+		}
 
-			var path = ( self.path === undefined ) ? THREE.LoaderUtils.extractUrlBase( url ) : self.path;
+		this.IFF.debugger.dataOffset = this.IFF.reader.offset;
+		this.IFF.debugger.length = length;
+
+		// Data types may be found in either LWO2 OR LWO3 spec
+		switch ( blockID ) {
+
+			case 'FORM': // form blocks may consist of sub -chunks or sub-forms
+				this.IFF.parseForm( length );
+				break;
+
+			// SKIPPED CHUNKS
+			// if break; is called directly, the position in the lwoTree is not created
+			// any sub chunks and forms are added to the parent form instead
+			// MISC skipped
+			case 'ICON': // Thumbnail Icon Image
+			case 'VMPA': // Vertex Map Parameter
+			case 'BBOX': // bounding box
+			// case 'VMMD':
+			// case 'VTYP':
+
+			// normal maps can be specified, normally on models imported from other applications. Currently ignored
+			case 'NORM':
+
+			// ENVL FORM skipped
+			case 'PRE ':
+			case 'POST':
+			case 'KEY ':
+			case 'SPAN':
+
+			// CLIP FORM skipped
+			case 'TIME':
+			case 'CLRS':
+			case 'CLRA':
+			case 'FILT':
+			case 'DITH':
+			case 'CONT':
+			case 'BRIT':
+			case 'SATR':
+			case 'HUE ':
+			case 'GAMM':
+			case 'NEGA':
+			case 'IFLT':
+			case 'PFLT':
+
+			// Image Map Layer skipped
+			case 'PROJ':
+			case 'AXIS':
+			case 'AAST':
+			case 'PIXB':
+			case 'AUVO':
+			case 'STCK':
+
+			// Procedural Textures skipped
+			case 'PROC':
+			case 'VALU':
+			case 'FUNC':
+
+			// Gradient Textures skipped
+			case 'PNAM':
+			case 'INAM':
+			case 'GRST':
+			case 'GREN':
+			case 'GRPT':
+			case 'FKEY':
+			case 'IKEY':
+
+			// Texture Mapping Form skipped
+			case 'CSYS':
+
+			// Surface CHUNKs skipped
+			case 'OPAQ': // top level 'opacity' checkbox
+			case 'CMAP': // clip map
+
+			// Surface node CHUNKS skipped
+			// These mainly specify the node editor setup in LW
+			case 'NLOC':
+			case 'NZOM':
+			case 'NVER':
+			case 'NSRV':
+			case 'NVSK': // unknown
+			case 'NCRD':
+			case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
+			case 'WRPH': // image wrap h
+			case 'NMOD':
+			case 'NPRW':
+			case 'NPLA':
+			case 'NODS':
+			case 'VERS':
+			case 'ENUM':
+			case 'TAG ':
+			case 'OPAC':
+
+			// Car Material CHUNKS
+			case 'CGMD':
+			case 'CGTY':
+			case 'CGST':
+			case 'CGEN':
+			case 'CGTS':
+			case 'CGTE':
+			case 'OSMP':
+			case 'OMDE':
+			case 'OUTR':
+			case 'FLAG':
+
+			case 'TRNL':
+			case 'GLOW':
+			case 'GVAL': // glow intensity
+			case 'SHRP':
+			case 'RFOP':
+			case 'RSAN':
+			case 'TROP':
+			case 'RBLR':
+			case 'TBLR':
+			case 'CLRH':
+			case 'CLRF':
+			case 'ADTR':
+			case 'LINE':
+			case 'ALPH':
+			case 'VCOL':
+			case 'ENAB':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length );
+				break;
+
+			case 'SURF':
+				this.IFF.parseSurfaceLwo2( length );
+				break;
+
+			case 'CLIP':
+				this.IFF.parseClipLwo2( length );
+				break;
+
+			// Texture node chunks (not in spec)
+			case 'IPIX': // usePixelBlending
+			case 'IMIP': // useMipMaps
+			case 'IMOD': // imageBlendingMode
+			case 'AMOD': // unknown
+			case 'IINV': // imageInvertAlpha
+			case 'INCR': // imageInvertColor
+			case 'IAXS': // imageAxis ( for non-UV maps)
+			case 'IFOT': // imageFallofType
+			case 'ITIM': // timing for animated textures
+			case 'IWRL':
+			case 'IUTI':
+			case 'IINX':
+			case 'IINY':
+			case 'IINZ':
+			case 'IREF': // possibly a VX for reused texture nodes
+				if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
+				else this.IFF.reader.skip( length );
+				break;
+
+			case 'OTAG':
+				this.IFF.parseObjectTag();
+				break;
+
+			case 'LAYR':
+				this.IFF.parseLayer( length );
+				break;
+
+			case 'PNTS':
+				this.IFF.parsePoints( length );
+				break;
+
+			case 'VMAP':
+				this.IFF.parseVertexMapping( length );
+				break;
+
+			case 'AUVU':
+			case 'AUVN':
+				this.IFF.reader.skip( length - 1 );
+				this.IFF.reader.getVariableLengthIndex(); // VX
+				break;
+
+			case 'POLS':
+				this.IFF.parsePolygonList( length );
+				break;
+
+			case 'TAGS':
+				this.IFF.parseTagStrings( length );
+				break;
+
+			case 'PTAG':
+				this.IFF.parsePolygonTagMapping( length );
+				break;
+
+			case 'VMAD':
+				this.IFF.parseVertexMapping( length, true );
+				break;
+
+			// Misc CHUNKS
+			case 'DESC': // Description Line
+				this.IFF.currentForm.description = this.IFF.reader.getString();
+				break;
+
+			case 'TEXT':
+			case 'CMNT':
+			case 'NCOM':
+				this.IFF.currentForm.comment = this.IFF.reader.getString();
+				break;
+
+			// Envelope Form
+			case 'NAME':
+				this.IFF.currentForm.channelName = this.IFF.reader.getString();
+				break;
+
+			// Image Map Layer
+			case 'WRAP':
+				this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
+				break;
+
+			case 'IMAG':
+				var index = this.IFF.reader.getVariableLengthIndex();
+				this.IFF.currentForm.imageIndex = index;
+				break;
+
+			// Texture Mapping Form
+			case 'OREF':
+				this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
+				break;
+
+			case 'ROID':
+				this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
+				break;
+
+			// Surface Blocks
+			case 'SSHN':
+				this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
+				break;
+
+			case 'AOVN':
+				this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
+				break;
+
+			// Nodal Blocks
+			case 'NSTA':
+				this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
+				break;
+
+			case 'NRNM':
+				this.IFF.currentForm.realName = this.IFF.reader.getString();
+				break;
+
+			case 'NNME':
+				this.IFF.currentForm.refName = this.IFF.reader.getString();
+				this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
+				break;
+
+			// Nodal Blocks : connections
+			case 'INME':
+				if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
+				this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINN':
+				if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
+				this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINM':
+				if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
+				this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IONM':
+				if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
+				this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'FNAM':
+				this.IFF.currentForm.fileName = this.IFF.reader.getString();
+				break;
+
+			case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
+				if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
+				else this.IFF.reader.skip( length );
+				break;
+
+			// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+			case 'SMAN':
+				var maxSmoothingAngle = this.IFF.reader.getFloat32();
+				this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
+				break;
+
+			// LWO2: Basic Surface Parameters
+			case 'COLR':
+				this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
+				this.IFF.reader.skip( 2 ); // VX: envelope
+				break;
+
+			case 'LUMI':
+				this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SPEC':
+				this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'DIFF':
+				this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'REFL':
+				this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'GLOS':
+				this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TRAN':
+				this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'BUMP':
+				this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SIDE':
+				this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
+				break;
+
+			case 'RIMG':
+				this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'RIND':
+				this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TIMG':
+				this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'IMAP':
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TMAP':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length ); // needs implementing
+				break;
+
+			case 'IUVI': // uv channel name
+				this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
+				break;
+
+			case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+				this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
+				break;
+			case 'IVTL': // heightWrappingMode
+				this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
+				break;
+
+			// LWO2 USE
+			case 'BLOK':
+				// skip
+				break;
+
+			default:
+				this.IFF.parseUnknownCHUNK( blockID, length );
 
-			// give the mesh a default name based on the filename
-			var modelName = url.split( path ).pop().split( '.' )[ 0 ];
+		}
 
-			var loader = new THREE.FileLoader( this.manager );
-			loader.setPath( self.path );
-			loader.setResponseType( 'arraybuffer' );
+		if ( blockID != 'FORM' ) {
 
-			loader.load( url, function ( buffer ) {
+			this.IFF.debugger.node = 1;
+			this.IFF.debugger.nodeID = blockID;
+			this.IFF.debugger.log();
 
-				// console.time( 'Total parsing: ' );
-				onLoad( self.parse( buffer, path, modelName ) );
-				// console.timeEnd( 'Total parsing: ' );
+		}
 
-			}, onProgress, onError );
+		if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
 
-		},
+			this.IFF.currentForm = this.IFF.parentForm;
 
-		setCrossOrigin: function ( value ) {
+		}
 
-			this.crossOrigin = value;
-			return this;
+	}
 
-		},
+};
+
+function LWO3Parser( IFFParser ) {
+
+	this.IFF = IFFParser;
+
+}
+
+LWO3Parser.prototype = {
+
+	constructor: LWO3Parser,
+
+	parseBlock: function () {
+
+		this.IFF.debugger.offset = this.IFF.reader.offset;
+		this.IFF.debugger.closeForms();
+
+		var blockID = this.IFF.reader.getIDTag();
+		var length = this.IFF.reader.getUint32(); // size of data in bytes
+
+		this.IFF.debugger.dataOffset = this.IFF.reader.offset;
+		this.IFF.debugger.length = length;
+
+		// Data types may be found in either LWO2 OR LWO3 spec
+		switch ( blockID ) {
+
+			case 'FORM': // form blocks may consist of sub -chunks or sub-forms
+				this.IFF.parseForm( length );
+				break;
+
+			// SKIPPED CHUNKS
+			// MISC skipped
+			case 'ICON': // Thumbnail Icon Image
+			case 'VMPA': // Vertex Map Parameter
+			case 'BBOX': // bounding box
+			// case 'VMMD':
+			// case 'VTYP':
+
+			// normal maps can be specified, normally on models imported from other applications. Currently ignored
+			case 'NORM':
+
+			// ENVL FORM skipped
+			case 'PRE ':
+			case 'POST':
+			case 'KEY ':
+			case 'SPAN':
+
+			// CLIP FORM skipped
+			case 'TIME':
+			case 'CLRS':
+			case 'CLRA':
+			case 'FILT':
+			case 'DITH':
+			case 'CONT':
+			case 'BRIT':
+			case 'SATR':
+			case 'HUE ':
+			case 'GAMM':
+			case 'NEGA':
+			case 'IFLT':
+			case 'PFLT':
+
+			// Image Map Layer skipped
+			case 'PROJ':
+			case 'AXIS':
+			case 'AAST':
+			case 'PIXB':
+			case 'STCK':
+
+			// Procedural Textures skipped
+			case 'VALU':
+
+			// Gradient Textures skipped
+			case 'PNAM':
+			case 'INAM':
+			case 'GRST':
+			case 'GREN':
+			case 'GRPT':
+			case 'FKEY':
+			case 'IKEY':
+
+			// Texture Mapping Form skipped
+			case 'CSYS':
+
+				// Surface CHUNKs skipped
+			case 'OPAQ': // top level 'opacity' checkbox
+			case 'CMAP': // clip map
+
+			// Surface node CHUNKS skipped
+			// These mainly specify the node editor setup in LW
+			case 'NLOC':
+			case 'NZOM':
+			case 'NVER':
+			case 'NSRV':
+			case 'NCRD':
+			case 'NMOD':
+			case 'NSEL':
+			case 'NPRW':
+			case 'NPLA':
+			case 'VERS':
+			case 'ENUM':
+			case 'TAG ':
+
+			// Car Material CHUNKS
+			case 'CGMD':
+			case 'CGTY':
+			case 'CGST':
+			case 'CGEN':
+			case 'CGTS':
+			case 'CGTE':
+			case 'OSMP':
+			case 'OMDE':
+			case 'OUTR':
+			case 'FLAG':
+
+			case 'TRNL':
+			case 'GLOS':
+			case 'SHRP':
+			case 'RFOP':
+			case 'RSAN':
+			case 'TROP':
+			case 'RBLR':
+			case 'TBLR':
+			case 'CLRH':
+			case 'CLRF':
+			case 'ADTR':
+			case 'GLOW':
+			case 'LINE':
+			case 'ALPH':
+			case 'VCOL':
+			case 'ENAB':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length );
+				break;
+
+			// Texture node chunks (not in spec)
+			case 'IPIX': // usePixelBlending
+			case 'IMIP': // useMipMaps
+			case 'IMOD': // imageBlendingMode
+			case 'AMOD': // unknown
+			case 'IINV': // imageInvertAlpha
+			case 'INCR': // imageInvertColor
+			case 'IAXS': // imageAxis ( for non-UV maps)
+			case 'IFOT': // imageFallofType
+			case 'ITIM': // timing for animated textures
+			case 'IWRL':
+			case 'IUTI':
+			case 'IINX':
+			case 'IINY':
+			case 'IINZ':
+			case 'IREF': // possibly a VX for reused texture nodes
+				if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
+				else this.IFF.reader.skip( length );
+				break;
+
+			case 'OTAG':
+				this.IFF.parseObjectTag();
+				break;
+
+			case 'LAYR':
+				this.IFF.parseLayer( length );
+				break;
+
+			case 'PNTS':
+				this.IFF.parsePoints( length );
+				break;
+
+			case 'VMAP':
+				this.IFF.parseVertexMapping( length );
+				break;
+
+			case 'POLS':
+				this.IFF.parsePolygonList( length );
+				break;
+
+			case 'TAGS':
+				this.IFF.parseTagStrings( length );
+				break;
+
+			case 'PTAG':
+				this.IFF.parsePolygonTagMapping( length );
+				break;
+
+			case 'VMAD':
+				this.IFF.parseVertexMapping( length, true );
+				break;
+
+			// Misc CHUNKS
+			case 'DESC': // Description Line
+				this.IFF.currentForm.description = this.IFF.reader.getString();
+				break;
+
+			case 'TEXT':
+			case 'CMNT':
+			case 'NCOM':
+				this.IFF.currentForm.comment = this.IFF.reader.getString();
+				break;
+
+			// Envelope Form
+			case 'NAME':
+				this.IFF.currentForm.channelName = this.IFF.reader.getString();
+				break;
+
+			// Image Map Layer
+			case 'WRAP':
+				this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
+				break;
+
+			case 'IMAG':
+				var index = this.IFF.reader.getVariableLengthIndex();
+				this.IFF.currentForm.imageIndex = index;
+				break;
+
+			// Texture Mapping Form
+			case 'OREF':
+				this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
+				break;
+
+			case 'ROID':
+				this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
+				break;
+
+			// Surface Blocks
+			case 'SSHN':
+				this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
+				break;
+
+			case 'AOVN':
+				this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
+				break;
+
+			// Nodal Blocks
+			case 'NSTA':
+				this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
+				break;
+
+			case 'NRNM':
+				this.IFF.currentForm.realName = this.IFF.reader.getString();
+				break;
+
+			case 'NNME':
+				this.IFF.currentForm.refName = this.IFF.reader.getString();
+				this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
+				break;
+
+			// Nodal Blocks : connections
+			case 'INME':
+				if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
+				this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINN':
+				if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
+				this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINM':
+				if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
+				this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IONM':
+				if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
+				this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'FNAM':
+				this.IFF.currentForm.fileName = this.IFF.reader.getString();
+				break;
+
+			case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
+				if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
+				else this.IFF.reader.skip( length );
+				break;
+
+			// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+			case 'SMAN':
+				var maxSmoothingAngle = this.IFF.reader.getFloat32();
+				this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
+				break;
+
+			// LWO2: Basic Surface Parameters
+			case 'COLR':
+				this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
+				this.IFF.reader.skip( 2 ); // VX: envelope
+				break;
+
+			case 'LUMI':
+				this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SPEC':
+				this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'DIFF':
+				this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'REFL':
+				this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'GLOS':
+				this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TRAN':
+				this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'BUMP':
+				this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SIDE':
+				this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
+				break;
+
+			case 'RIMG':
+				this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'RIND':
+				this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TIMG':
+				this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'IMAP':
+				this.IFF.currentSurface.attributes.imageMapIndex = this.IFF.reader.getUint32();
+				break;
+
+			case 'IUVI': // uv channel name
+				this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
+				break;
+
+			case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+				this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
+				break;
+			case 'IVTL': // heightWrappingMode
+				this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
+				break;
+
+			default:
+				this.IFF.parseUnknownCHUNK( blockID, length );
 
-		setPath: function ( value ) {
+		}
 
-			this.path = value;
-			return this;
+		if ( blockID != 'FORM' ) {
 
-		},
+			this.IFF.debugger.node = 1;
+			this.IFF.debugger.nodeID = blockID;
+			this.IFF.debugger.log();
 
-		setResourcePath: function ( value ) {
+		}
 
-			this.resourcePath = value;
-			return this;
+		if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
 
-		},
+			this.IFF.currentForm = this.IFF.parentForm;
 
-		parse: function ( iffBuffer, path, modelName ) {
+		}
 
-			lwoTree = new IFFParser().parse( iffBuffer );
+	}
 
-			// console.log( 'lwoTree', lwoTree );
+};
 
-			var textureLoader = new THREE.TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
+/**
+ * === IFFParser ===
+ * - Parses data from the IFF buffer.
+ * - LWO3 files are in IFF format and can contain the following data types, referred to by shorthand codes
+ *
+ * ATOMIC DATA TYPES
+ *  ID Tag - 4x 7 bit uppercase ASCII chars: ID4
+ *  signed integer, 1, 2, or 4 byte length: I1, I2, I4
+ *  unsigned integer, 1, 2, or 4 byte length: U1, U2, U4
+ *  float, 4 byte length: F4
+ *  string, series of ASCII chars followed by null byte (If the length of the string including the null terminating byte is odd, an extra null is added so that the data that follows will begin on an even byte boundary): S0
+ *
+ * COMPOUND DATA TYPES
+ *  Variable-length Index (index into an array or collection): U2 or U4 : VX
+ *  Color (RGB): F4 + F4 + F4: COL12
+ *  Coordinate (x, y, z): F4 + F4 + F4: VEC12
+ *  Percentage F4 data type from 0->1 with 1 = 100%: FP4
+ *  Angle in radian F4: ANG4
+ *  Filename (string) S0: FNAM0
+ *  XValue F4 + index (VX) + optional envelope( ENVL ): XVAL
+ *  XValue vector VEC12 + index (VX) + optional envelope( ENVL ): XVAL3
+ *
+ *  The IFF file is arranged in chunks:
+ *  CHUNK = ID4 + length (U4) + length X bytes of data + optional 0 pad byte
+ *  optional 0 pad byte is there to ensure chunk ends on even boundary, not counted in size
+ *
+ * COMPOUND DATA TYPES
+ * - Chunks are combined in Forms (collections of chunks)
+ * - FORM = string 'FORM' (ID4) + length (U4) + type (ID4) + optional ( CHUNK | FORM )
+ * - CHUNKS and FORMS are collectively referred to as blocks
+ * - The entire file is contained in one top level FORM
+ *
+ **/
 
-			return new LWOTreeParser( textureLoader ).parse( modelName );
+function IFFParser( ) {
 
-		}
+	this.debugger = new Debugger();
+	// this.debugger.enable(); // un-comment to log IFF hierarchy.
 
-	};
+}
 
-	// Parse the lwoTree object
-	function LWOTreeParser( textureLoader ) {
+IFFParser.prototype = {
 
-		this.textureLoader = textureLoader;
+	constructor: IFFParser,
 
-	}
+	parse: function ( buffer ) {
 
-	LWOTreeParser.prototype = {
+		this.reader = new DataViewReader( buffer );
 
-		constructor: LWOTreeParser,
+		this.tree = {
+			materials: {},
+			layers: [],
+			tags: [],
+			textures: [],
+		};
 
-		parse: function ( modelName ) {
+		// start out at the top level to add any data before first layer is encountered
+		this.currentLayer = this.tree;
+		this.currentForm = this.tree;
 
-			this.materials = new MaterialParser( this.textureLoader ).parse();
-			this.defaultLayerName = modelName;
+		this.parseTopForm();
 
-			this.meshes = this.parseLayers();
+		if ( this.tree.format === undefined ) return;
 
-			return {
-				materials: this.materials,
-				meshes: this.meshes,
-			};
+		if ( this.tree.format === 'LWO2' ) {
 
-		},
+			this.parser = new LWO2Parser( this );
+			while ( ! this.reader.endOfFile() ) this.parser.parseBlock();
 
-		parseLayers() {
+		} else if ( this.tree.format === 'LWO3' ) {
 
-			// array of all meshes for building hierarchy
-			var meshes = [];
+			this.parser = new LWO3Parser( this );
+			while ( ! this.reader.endOfFile() ) this.parser.parseBlock();
 
-			// final array containing meshes with scene graph hierarchy set up
-			var finalMeshes = [];
+		}
 
-			var geometryParser = new GeometryParser();
+		this.debugger.offset = this.reader.offset;
+		this.debugger.closeForms();
 
-			var self = this;
-			lwoTree.layers.forEach( function ( layer ) {
+		return this.tree;
 
-				var geometry = geometryParser.parse( layer.geometry, layer );
+	},
 
-				var mesh = self.parseMesh( geometry, layer );
+	parseTopForm() {
 
-				meshes[ layer.number ] = mesh;
+		this.debugger.offset = this.reader.offset;
 
-				if ( layer.parent === - 1 ) finalMeshes.push( mesh );
-				else meshes[ layer.parent ].add( mesh );
+		var topForm = this.reader.getIDTag();
 
+		if ( topForm !== 'FORM' ) {
 
-			} );
+			console.warn( "LWOLoader: Top-level FORM missing." );
+			return;
 
-			this.applyPivots( finalMeshes );
+		}
 
-			return finalMeshes;
+		var length = this.reader.getUint32();
 
-		},
+		this.debugger.dataOffset = this.reader.offset;
+		this.debugger.length = length;
 
-		parseMesh( geometry, layer ) {
+		var type = this.reader.getIDTag();
 
-			var mesh;
+		if ( type === 'LWO2' ) {
 
-			var materials = this.getMaterials( geometry.userData.matNames, layer.geometry.type );
+			this.tree.format = type;
 
-			this.duplicateUVs( geometry, materials );
+		} else if ( type === 'LWO3' ) {
 
-			if ( layer.geometry.type === 'points' ) mesh = new THREE.Points( geometry, materials );
-			else if ( layer.geometry.type === 'lines' ) mesh = new THREE.LineSegments( geometry, materials );
-			else mesh = new THREE.Mesh( geometry, materials );
+			this.tree.format = type;
 
-			if ( layer.name ) mesh.name = layer.name;
-			else mesh.name = this.defaultLayerName + '_layer_' + layer.number;
+		}
 
-			mesh.userData.pivot = layer.pivot;
+		this.debugger.node = 0;
+		this.debugger.nodeID = type;
+		this.debugger.log();
+
+		return;
+
+	},
+
+
+	///
+	// FORM PARSING METHODS
+	///
+
+	// Forms are organisational and can contain any number of sub chunks and sub forms
+	// FORM ::= 'FORM'[ID4], length[U4], type[ID4], ( chunk[CHUNK] | form[FORM] ) * }
+	parseForm( length ) {
+
+		var type = this.reader.getIDTag();
+
+		switch ( type ) {
+
+			// SKIPPED FORMS
+			// if skipForm( length ) is called, the entire form and any sub forms and chunks are skipped
+
+			case 'ISEQ': // Image sequence
+			case 'ANIM': // plug in animation
+			case 'STCC': // Color-cycling Still
+			case 'VPVL':
+			case 'VPRM':
+			case 'NROT':
+			case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
+			case 'WRPH': // image wrap h
+			case 'FUNC':
+			case 'FALL':
+			case 'OPAC':
+			case 'GRAD': // gradient texture
+			case 'ENVS':
+			case 'VMOP':
+			case 'VMBG':
+
+			// Car Material FORMS
+			case 'OMAX':
+			case 'STEX':
+			case 'CKBG':
+			case 'CKEY':
+			case 'VMLA':
+			case 'VMLB':
+				this.debugger.skipped = true;
+				this.skipForm( length ); // not currently supported
+				break;
+
+			// if break; is called directly, the position in the lwoTree is not created
+			// any sub chunks and forms are added to the parent form instead
+			case 'META':
+			case 'NNDS':
+			case 'NODS':
+			case 'NDTA':
+			case 'ADAT':
+			case 'AOVS':
+			case 'BLOK':
+
+			// used by texture nodes
+			case 'IBGC': // imageBackgroundColor
+			case 'IOPC': // imageOpacity
+			case 'IIMG': // hold reference to image path
+			case 'TXTR':
+				// this.setupForm( type, length );
+				this.debugger.length = 4;
+				this.debugger.skipped = true;
+				break;
+
+			case 'IFAL': // imageFallof
+			case 'ISCL': // imageScale
+			case 'IPOS': // imagePosition
+			case 'IROT': // imageRotation
+			case 'IBMP':
+			case 'IUTD':
+			case 'IVTD':
+				this.parseTextureNodeAttribute( type );
+				break;
+
+			case 'ENVL':
+				this.parseEnvelope( length );
+				break;
+
+				// CLIP FORM AND SUB FORMS
+
+			case 'CLIP':
+				if ( this.tree.format === 'LWO2' ) {
 
-			return mesh;
+					this.parseForm( length );
 
-		},
+				} else {
 
-		// TODO: may need to be reversed in z to convert LWO to three.js coordinates
-		applyPivots( meshes ) {
+					this.parseClip( length );
 
-			meshes.forEach( function ( mesh ) {
+				}
+				break;
 
-				mesh.traverse( function ( child ) {
+			case 'STIL':
+				this.parseImage();
+				break;
 
-					var pivot = child.userData.pivot;
+			case 'XREF': // clone of another STIL
+				this.reader.skip( 8 ); // unknown
+				this.currentForm.referenceTexture = {
+					index: this.reader.getUint32(),
+					refName: this.reader.getString() // internal unique ref
+				};
+				break;
 
-					child.position.x += pivot[ 0 ];
-					child.position.y += pivot[ 1 ];
-					child.position.z += pivot[ 2 ];
+				// Not in spec, used by texture nodes
 
-					if ( child.parent ) {
+			case 'IMST':
+				this.parseImageStateForm( length );
+				break;
 
-						var parentPivot = child.parent.userData.pivot;
+				// SURF FORM AND SUB FORMS
 
-						child.position.x -= parentPivot[ 0 ];
-						child.position.y -= parentPivot[ 1 ];
-						child.position.z -= parentPivot[ 2 ];
+			case 'SURF':
+				this.parseSurfaceForm( length );
+				break;
 
-					}
+			case 'VALU': // Not in spec
+				this.parseValueForm( length );
+				break;
 
-				} );
+			case 'NTAG':
+				this.parseSubNode( length );
+				break;
 
-			} );
+			case 'ATTR': // BSDF Node Attributes
+			case 'SATR': // Standard Node Attributes
+				this.setupForm( 'attributes', length );
+				break;
 
-		},
+			case 'NCON':
+				this.parseConnections( length );
+				break;
 
-		getMaterials( namesArray, type ) {
+			case 'SSHA':
+				this.parentForm = this.currentForm;
+				this.currentForm = this.currentSurface;
+				this.setupForm( 'surfaceShader', length );
+				break;
 
-			var materials = [];
+			case 'SSHD':
+				this.setupForm( 'surfaceShaderData', length );
+				break;
 
-			var self = this;
+			case 'ENTR': // Not in spec
+				this.parseEntryForm( length );
+				break;
 
-			namesArray.forEach( function ( name, i ) {
+				// Image Map Layer
 
-				materials[ i ] = self.getMaterialByName( name );
+			case 'IMAP':
+				this.parseImageMap( length );
+				break;
 
-			} );
+			case 'TAMP':
+				this.parseXVAL( 'amplitude', length );
+				break;
 
-			// convert materials to line or point mats if required
-			if ( type === 'points' || type === 'lines' ) {
+				//Texture Mapping Form
 
-				materials.forEach( function ( mat, i ) {
+			case 'TMAP':
+				this.setupForm( 'textureMap', length );
+				break;
 
-					var spec = {
-						color: mat.color,
-					};
+			case 'CNTR':
+				this.parseXVAL3( 'center', length );
+				break;
 
-					if ( type === 'points' ) {
+			case 'SIZE':
+				this.parseXVAL3( 'scale', length );
+				break;
 
-						spec.size = 0.1;
-						spec.map = mat.map;
-						spec.morphTargets = mat.morphTargets;
-						materials[ i ] = new THREE.PointsMaterial( spec );
+			case 'ROTA':
+				this.parseXVAL3( 'rotation', length );
+				break;
 
-					} else if ( type === 'lines' ) {
+			default:
+				this.parseUnknownForm( type, length );
 
-						materials[ i ] = new THREE.LineBasicMaterial( spec );
+		}
 
-					}
+		this.debugger.node = 0;
+		this.debugger.nodeID = type;
+		this.debugger.log();
 
-				} );
+	},
 
-			}
+	setupForm( type, length ) {
 
-			// if there is only one material, return that directly instead of array
-			var filtered = materials.filter( Boolean );
-			if ( filtered.length === 1 ) return filtered[ 0 ];
+		if ( ! this.currentForm ) this.currentForm = this.currentNode;
 
-			return materials;
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-		},
+		if ( ! this.currentForm[ type ] ) {
 
-		getMaterialByName( name ) {
+			this.currentForm[ type ] = {};
+			this.currentForm = this.currentForm[ type ];
 
-			return this.materials.filter( function ( m ) {
 
-				return m.name === name;
+		} else {
 
-			} )[ 0 ];
+			// should never see this unless there's a bug in the reader
+			console.warn( 'LWOLoader: form already exists on parent: ', type, this.currentForm );
 
-		},
+			this.currentForm = this.currentForm[ type ];
 
-		// If the material has an aoMap, duplicate UVs
-		duplicateUVs( geometry, materials ) {
+		}
 
-			var duplicateUVs = false;
 
-			if ( ! Array.isArray( materials ) ) {
+	},
 
-				if ( materials.aoMap ) duplicateUVs = true;
+	skipForm( length ) {
 
-			} else {
+		this.reader.skip( length - 4 );
 
-				materials.forEach( function ( material ) {
+	},
 
-					if ( material.aoMap ) duplicateUVs = true;
+	parseUnknownForm( type, length ) {
 
-				} );
+		console.warn( 'LWOLoader: unknown FORM encountered: ' + type, length );
 
-			}
+		printBuffer( this.reader.dv.buffer, this.reader.offset, length - 4 );
+		this.reader.skip( length - 4 );
 
-			if ( ! duplicateUVs ) return;
+	},
 
-			geometry.addAttribute( 'uv2', new THREE.BufferAttribute( geometry.attributes.uv.array, 2 ) );
+	parseSurfaceForm( length ) {
 
-		},
+		this.reader.skip( 8 ); // unknown Uint32 x2
 
-	};
+		var name = this.reader.getString();
 
-	function MaterialParser( textureLoader ) {
+		var surface = {
+			attributes: {}, // LWO2 style non-node attributes will go here
+			connections: {},
+			name: name,
+			inputName: name,
+			nodes: {},
+			source: this.reader.getString(),
+		};
 
-		this.textureLoader = textureLoader;
+		this.tree.materials[ name ] = surface;
+		this.currentSurface = surface;
 
-	}
+		this.parentForm = this.tree.materials;
+		this.currentForm = surface;
+		this.currentFormEnd = this.reader.offset + length;
 
-	MaterialParser.prototype = {
+	},
 
-		constructor: MaterialParser,
+	parseSurfaceLwo2( length ) {
 
-		parse: function () {
+		var name = this.reader.getString();
 
-			var materials = [];
-			this.textures = {};
+		var surface = {
+			attributes: {}, // LWO2 style non-node attributes will go here
+			connections: {},
+			name: name,
+			nodes: {},
+			source: this.reader.getString(),
+		};
 
-			for ( var name in lwoTree.materials ) {
+		this.tree.materials[ name ] = surface;
+		this.currentSurface = surface;
 
-				if ( lwoTree.format === 'LWO3' ) {
+		this.parentForm = this.tree.materials;
+		this.currentForm = surface;
+		this.currentFormEnd = this.reader.offset + length;
 
-					materials.push( this.parseMaterial( lwoTree.materials[ name ], name, lwoTree.textures ) );
+	},
 
-				} else if ( lwoTree.format === 'LWO2' ) {
+	parseSubNode( length ) {
 
-					materials.push( this.parseMaterialLwo2( lwoTree.materials[ name ], name /*, lwoTree.textures */ ) );
+		// parse the NRNM CHUNK of the subnode FORM to get
+		// a meaningful name for the subNode
+		// some subnodes can be renamed, but Input and Surface cannot
 
-				}
+		this.reader.skip( 8 ); // NRNM + length
+		var name = this.reader.getString();
 
-			}
+		var node = {
+			name: name
+		};
+		this.currentForm = node;
+		this.currentNode = node;
 
-			return materials;
+		this.currentFormEnd = this.reader.offset + length;
 
-		},
 
-		parseMaterial( materialData, name, textures ) {
+	},
 
-			var params = {
-				name: name,
-				side: this.getSide( materialData.attributes ),
-				flatShading: this.getSmooth( materialData.attributes ),
-			};
+	// collect attributes from all nodes at the top level of a surface
+	parseConnections( length ) {
 
-			var connections = this.parseConnections( materialData.connections, materialData.nodes );
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-			var maps = this.parseTextureNodes( connections.maps );
+		this.currentForm = this.currentSurface.connections;
 
-			this.parseAttributeImageMaps( connections.attributes, textures, maps, materialData.maps );
+	},
 
-			var attributes = this.parseAttributes( connections.attributes, maps );
+	// surface node attribute data, e.g. specular, roughness etc
+	parseEntryForm( length ) {
 
-			this.parseEnvMap( connections, maps, attributes );
+		this.reader.skip( 8 ); // NAME + length
+		var name = this.reader.getString();
+		this.currentForm = this.currentNode.attributes;
 
-			params = Object.assign( maps, params );
-			params = Object.assign( params, attributes );
+		this.setupForm( name, length );
 
-			var materialCtor = connections.attributes.Roughness ? THREE.MeshStandardMaterial : THREE.MeshPhongMaterial;
+	},
 
-			return new materialCtor( params );
+	// parse values from material - doesn't match up to other LWO3 data types
+	// sub form of entry form
+	parseValueForm() {
 
-		},
+		this.reader.skip( 8 ); // unknown + length
 
-		parseMaterialLwo2( materialData, name /*, textures */ ) {
+		var valueType = this.reader.getString();
 
-			var params = {
-				name: name,
-				side: this.getSide( materialData.attributes ),
-				flatShading: this.getSmooth( materialData.attributes ),
-			};
+		if ( valueType === 'double' ) {
 
-			var attributes = this.parseAttributes( materialData.attributes, {} );
-			params = Object.assign( params, attributes );
-			return new THREE.MeshPhongMaterial( params );
+			this.currentForm.value = this.reader.getUint64();
 
-		},
+		} else if ( valueType === 'int' ) {
 
-		// Note: converting from left to right handed coords by switching x -> -x in vertices, and
-		// then switching mat FrontSide -> BackSide
-		// NB: this means that THREE.FrontSide and THREE.BackSide have been switched!
-		getSide( attributes ) {
+			this.currentForm.value = this.reader.getUint32();
 
-			if ( ! attributes.side ) return THREE.BackSide;
+		} else if ( valueType === 'vparam' ) {
 
-			switch ( attributes.side ) {
+			this.reader.skip( 24 );
+			this.currentForm.value = this.reader.getFloat64();
 
-				case 0:
-				case 1:
-					return THREE.BackSide;
-				case 2: return THREE.FrontSide;
-				case 3: return THREE.DoubleSide;
+		} else if ( valueType === 'vparam3' ) {
 
-			}
+			this.reader.skip( 24 );
+			this.currentForm.value = this.reader.getFloat64Array( 3 );
 
-		},
+		}
 
-		getSmooth( attributes ) {
+	},
 
-			if ( ! attributes.smooth ) return true;
-			return ! attributes.smooth;
+	// holds various data about texture node image state
+	// Data other thanmipMapLevel unknown
+	parseImageStateForm() {
 
-		},
+		this.reader.skip( 8 ); // unknown
 
-		parseConnections( connections, nodes ) {
+		this.currentForm.mipMapLevel = this.reader.getFloat32();
 
-			var materialConnections = {
-				maps: {}
-			};
+	},
 
-			var inputName = connections.inputName;
-			var inputNodeName = connections.inputNodeName;
-			var nodeName = connections.nodeName;
+	// LWO2 style image data node OR LWO3 textures defined at top level in editor (not as SURF node)
+	parseImageMap( length ) {
 
-			var self = this;
-			inputName.forEach( function ( name, index ) {
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-				if ( name === 'Material' ) {
+		if ( ! this.currentForm.maps ) this.currentForm.maps = [];
 
-					var matNode = self.getNodeByRefName( inputNodeName[ index ], nodes );
-					materialConnections.attributes = matNode.attributes;
-					materialConnections.envMap = matNode.fileName;
-					materialConnections.name = inputNodeName[ index ];
+		var map = {};
+		this.currentForm.maps.push( map );
+		this.currentForm = map;
 
-				}
+		this.reader.skip( 10 ); // unknown, could be an issue if it contains a VX
 
-			} );
+	},
 
-			nodeName.forEach( function ( name, index ) {
+	parseTextureNodeAttribute( type ) {
 
-				if ( name === materialConnections.name ) {
+		this.reader.skip( 28 ); // FORM + length + VPRM + unknown + Uint32 x2 + float32
 
-					materialConnections.maps[ inputName[ index ] ] = self.getNodeByRefName( inputNodeName[ index ], nodes );
+		this.reader.skip( 20 ); // FORM + length + VPVL + float32 + Uint32
 
-				}
+		switch ( type ) {
 
-			} );
+			case 'ISCL':
+				this.currentNode.scale = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IPOS':
+				this.currentNode.position = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IROT':
+				this.currentNode.rotation = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IFAL':
+				this.currentNode.falloff = this.reader.getFloat32Array( 3 );
+				break;
 
-			return materialConnections;
+			case 'IBMP':
+				this.currentNode.amplitude = this.reader.getFloat32();
+				break;
+			case 'IUTD':
+				this.currentNode.uTiles = this.reader.getFloat32();
+				break;
+			case 'IVTD':
+				this.currentNode.vTiles = this.reader.getFloat32();
+				break;
 
-		},
+		}
 
-		getNodeByRefName( refName, nodes ) {
+		this.reader.skip( 2 ); // unknown
 
-			for ( var name in nodes ) {
 
-				if ( nodes[ name ].refName === refName ) return nodes[ name ];
+	},
 
-			}
+	// ENVL forms are currently ignored
+	parseEnvelope( length ) {
 
-		},
+		this.reader.skip( length - 4 ); // skipping  entirely for now
 
-		parseTextureNodes( textureNodes ) {
+	},
 
-			var maps = {};
+	///
+	// CHUNK PARSING METHODS
+	///
 
-			for ( var name in textureNodes ) {
+	// clips can either be defined inside a surface node, or at the top
+	// level and they have a different format in each case
+	parseClip( length ) {
 
-				var node = textureNodes[ name ];
-				var path = node.fileName;
+		var tag = this.reader.getIDTag();
 
-				if ( ! path ) return;
+		// inside surface node
+		if ( tag === 'FORM' ) {
 
-				var texture = this.loadTexture( path );
+			this.reader.skip( 16 );
 
-				if ( node.widthWrappingMode !== undefined ) texture.wrapS = this.getWrappingType( node.widthWrappingMode );
-				if ( node.heightWrappingMode !== undefined ) texture.wrapT = this.getWrappingType( node.heightWrappingMode );
+			this.currentNode.fileName = this.reader.getString();
 
-				switch ( name ) {
+			return;
 
-					case 'Color':
-						maps.map = texture;
-						break;
-					case 'Roughness':
-						maps.roughnessMap = texture;
-						maps.roughness = 0.5;
-						break;
-					case 'Specular':
-						maps.specularMap = texture;
-						maps.specular = 0xffffff;
-						break;
-					case 'Luminous':
-						maps.emissiveMap = texture;
-						maps.emissive = 0x808080;
-						break;
-					case 'Metallic':
-						maps.metalnessMap = texture;
-						maps.metalness = 0.5;
-						break;
-					case 'Transparency':
-					case 'Alpha':
-						maps.alphaMap = texture;
-						maps.transparent = true;
-						break;
-					case 'Normal':
-						maps.normalMap = texture;
-						if ( node.amplitude !== undefined ) maps.normalScale = new THREE.Vector2( node.amplitude, node.amplitude );
-						break;
-					case 'Bump':
-						maps.bumpMap = texture;
-						break;
+		}
 
-				}
+		// otherwise top level
+		this.reader.setOffset( this.reader.offset - 4 );
+
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
+
+		this.reader.skip( 8 ); // unknown
+
+		var texture = {
+			index: this.reader.getUint32()
+		};
+		this.tree.textures.push( texture );
+		this.currentForm = texture;
+
+	},
+
+	parseClipLwo2( length ) {
+
+		var texture = {
+			index: this.reader.getUint32(),
+			fileName: ""
+		};
+
+		// seach STIL block
+		while ( true ) {
+
+			var tag = this.reader.getIDTag();
+			var n_length = this.reader.getUint16();
+			if ( tag === 'STIL' ) {
+
+				texture.fileName = this.reader.getString();
+				break;
 
 			}
 
-			// LWO BSDF materials can have both spec and rough, but this is not valid in three
-			if ( maps.roughnessMap && maps.specularMap ) delete maps.specularMap;
-
-			return maps;
-
-		},
-
-		// maps can also be defined on individual material attributes, parse those here
-		// This occurs on Standard (Phong) surfaces
-		parseAttributeImageMaps( attributes, textures, maps ) {
-
-			for ( var name in attributes ) {
-
-				var attribute = attributes[ name ];
-
-				if ( attribute.maps ) {
-
-					var mapData = attribute.maps[ 0 ];
-
-					var path = this.getTexturePathByIndex( mapData.imageIndex, textures );
-					if ( ! path ) return;
-
-					var texture = this.loadTexture( path );
-
-					if ( mapData.wrap !== undefined ) texture.wrapS = this.getWrappingType( mapData.wrap.w );
-					if ( mapData.wrap !== undefined ) texture.wrapT = this.getWrappingType( mapData.wrap.h );
-
-					switch ( name ) {
-
-						case 'Color':
-							maps.map = texture;
-							break;
-						case 'Diffuse':
-							maps.aoMap = texture;
-							break;
-						case 'Roughness':
-							maps.roughnessMap = texture;
-							maps.roughness = 1;
-							break;
-						case 'Specular':
-							maps.specularMap = texture;
-							maps.specular = 0xffffff;
-							break;
-						case 'Luminosity':
-							maps.emissiveMap = texture;
-							maps.emissive = 0x808080;
-							break;
-						case 'Metallic':
-							maps.metalnessMap = texture;
-							maps.metalness = 1;
-							break;
-						case 'Transparency':
-						case 'Alpha':
-							maps.alphaMap = texture;
-							maps.transparent = true;
-							break;
-						case 'Normal':
-							maps.normalMap = texture;
-							break;
-						case 'Bump':
-							maps.bumpMap = texture;
-							break;
-
-					}
+			if ( n_length >= length ) {
 
-				}
+				break;
 
 			}
 
-		},
+		}
 
-		parseAttributes( attributes, maps ) {
+		this.tree.textures.push( texture );
+		this.currentForm = texture;
 
-			var params = {};
+	},
 
-			// don't use color data if color map is present
-			if ( attributes.Color && ! maps.map ) {
+	parseImage() {
 
-				params.color = new THREE.Color().fromArray( attributes.Color.value );
+		this.reader.skip( 8 ); // unknown
+		this.currentForm.fileName = this.reader.getString();
 
-			} else params.color = new THREE.Color();
+	},
 
+	parseXVAL( type, length ) {
 
-			if ( attributes.Transparency && attributes.Transparency.value !== 0 ) {
+		var endOffset = this.reader.offset + length - 4;
+		this.reader.skip( 8 );
 
-				params.opacity = 1 - attributes.Transparency.value;
-				params.transparent = true;
+		this.currentForm[ type ] = this.reader.getFloat32();
 
-			}
+		this.reader.setOffset( endOffset ); // set end offset directly to skip optional envelope
 
-			if ( attributes[ 'Bump Height' ] ) params.bumpScale = attributes[ 'Bump Height' ].value * 0.1;
+	},
 
-			if ( attributes[ 'Refraction Index' ] ) params.refractionRatio = 1 / attributes[ 'Refraction Index' ].value;
+	parseXVAL3( type, length ) {
 
-			this.parseStandardAttributes( params, attributes, maps );
-			this.parsePhongAttributes( params, attributes, maps );
+		var endOffset = this.reader.offset + length - 4;
+		this.reader.skip( 8 );
 
-			return params;
+		this.currentForm[ type ] = {
+			x: this.reader.getFloat32(),
+			y: this.reader.getFloat32(),
+			z: this.reader.getFloat32(),
+		};
 
-		},
+		this.reader.setOffset( endOffset );
 
-		parseStandardAttributes( params, attributes, maps ) {
+	},
 
-			if ( attributes.Luminous && attributes.Luminous.value !== 0 && attributes[ 'Luminous Color' ] ) {
+	// Tags associated with an object
+	// OTAG { type[ID4], tag-string[S0] }
+	parseObjectTag() {
 
-				var emissiveColor = attributes[ 'Luminous Color' ].value.map( function ( val ) {
+		if ( ! this.tree.objectTags ) this.tree.objectTags = {};
 
-					return val * attributes.Luminous.value;
+		this.tree.objectTags[ this.reader.getIDTag() ] = {
+			tagString: this.reader.getString()
+		};
 
-				} );
+	},
 
-				params.emissive = new THREE.Color().fromArray( emissiveColor );
+	// Signals the start of a new layer. All the data chunks which follow will be included in this layer until another layer chunk is encountered.
+	// LAYR: number[U2], flags[U2], pivot[VEC12], name[S0], parent[U2]
+	parseLayer( length ) {
 
-			}
-			if ( attributes.Roughness && ! maps.roughnessMap ) params.roughness = attributes.Roughness.value;
-			if ( attributes.Metallic && ! maps.metalnessMap ) params.metalness = attributes.Metallic.value;
+		var layer = {
+			number: this.reader.getUint16(),
+			flags: this.reader.getUint16(), // If the least significant bit of flags is set, the layer is hidden.
+			pivot: this.reader.getFloat32Array( 3 ), // Note: this seems to be superflous, as the geometry is translated when pivot is present
+			name: this.reader.getString(),
+		};
 
-		},
+		this.tree.layers.push( layer );
+		this.currentLayer = layer;
 
-		parsePhongAttributes( params, attributes, maps ) {
+		var parsedLength = 16 + stringOffset( this.currentLayer.name ); // index ( 2 ) + flags( 2 ) + pivot( 12 ) + stringlength
 
-			if ( attributes.Diffuse ) params.color.multiplyScalar( attributes.Diffuse.value );
+		// if we have not reached then end of the layer block, there must be a parent defined
+		this.currentLayer.parent = ( parsedLength < length ) ? this.reader.getUint16() : - 1; // omitted or -1 for no parent
 
-			if ( attributes.Reflection ) {
+	},
 
-				params.reflectivity = attributes.Reflection.value;
-				params.combine = THREE.AddOperation;
+	// VEC12 * ( F4 + F4 + F4 ) array of x,y,z vectors
+	// Converting from left to right handed coordinate system:
+	// x -> -x and switch material FrontSide -> BackSide
+	parsePoints( length ) {
 
-			}
+		this.currentPoints = [];
+		for ( var i = 0; i < length / 4; i += 3 ) {
 
-			if ( attributes.Luminosity && ! maps.emissiveMap ) params.emissive = new THREE.Color().setScalar( attributes.Luminosity.value );
+			// z -> -z to match three.js right handed coords
+			this.currentPoints.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
 
-			if ( attributes.Glossiness !== undefined ) params.shininess = 5 + Math.pow( attributes.Glossiness.value * 7, 6 );
+		}
 
-			// parse specular if there is no roughness - we will interpret the material as 'Phong' in this case
-			if ( ! attributes.Roughness && attributes.Specular && ! maps.specularMap ) params.specular = new THREE.Color().setScalar( attributes.Specular.value * 1.5 );
+	},
 
-		},
+	// parse VMAP or VMAD
+	// Associates a set of floating-point vectors with a set of points.
+	// VMAP: { type[ID4], dimension[U2], name[S0], ( vert[VX], value[F4] # dimension ) * }
 
-		parseEnvMap( connections, maps, attributes ) {
+	// VMAD Associates a set of floating-point vectors with the vertices of specific polygons.
+	// Similar to VMAP UVs, but associates with polygon vertices rather than points
+	// to solve to problem of UV seams:  VMAD chunks are paired with VMAPs of the same name,
+	// if they exist. The vector values in the VMAD will then replace those in the
+	// corresponding VMAP, but only for calculations involving the specified polygons.
+	// VMAD { type[ID4], dimension[U2], name[S0], ( vert[VX], poly[VX], value[F4] # dimension ) * }
+	parseVertexMapping( length, discontinuous ) {
 
-			if ( connections.envMap ) {
+		var finalOffset = this.reader.offset + length;
 
-				var envMap = this.loadTexture( connections.envMap );
+		var channelName = this.reader.getString();
 
-				if ( attributes.transparent && attributes.opacity < 0.999 ) {
+		if ( this.reader.offset === finalOffset ) {
 
-					envMap.mapping = THREE.EquirectangularRefractionMapping;
+			// then we are in a texture node and the VMAP chunk is just a reference to a UV channel name
+			this.currentForm.UVChannel = channelName;
+			return;
 
-					// Reflectivity and refraction mapping don't work well together in Phong materials
-					if ( attributes.reflectivity !== undefined ) {
+		}
 
-						delete attributes.reflectivity;
-						delete attributes.combine;
+		// otherwise reset to initial length and parse normal VMAP CHUNK
+		this.reader.setOffset( this.reader.offset - stringOffset( channelName ) );
+
+		var type = this.reader.getIDTag();
+
+		this.reader.getUint16(); // dimension
+		var name = this.reader.getString();
+
+		var remainingLength = length - 6 - stringOffset( name );
+
+		switch ( type ) {
+
+			case 'TXUV':
+				this.parseUVMapping( name, finalOffset, discontinuous );
+				break;
+			case 'MORF':
+			case 'SPOT':
+				this.parseMorphTargets( name, finalOffset, type ); // can't be discontinuous
+				break;
+			// unsupported VMAPs
+			case 'APSL':
+			case 'NORM':
+			case 'WGHT':
+			case 'MNVW':
+			case 'PICK':
+			case 'RGB ':
+			case 'RGBA':
+				this.reader.skip( remainingLength );
+				break;
+			default:
+				console.warn( 'LWOLoader: unknown vertex map type: ' + type );
+				this.reader.skip( remainingLength );
 
-					}
+		}
 
-					if ( attributes.metalness !== undefined ) {
+	},
 
-						delete attributes.metalness;
+	parseUVMapping( name, finalOffset, discontinuous ) {
 
-					}
+		var uvIndices = [];
+		var polyIndices = [];
+		var uvs = [];
 
-				} else envMap.mapping = THREE.EquirectangularReflectionMapping;
+		while ( this.reader.offset < finalOffset ) {
 
-				maps.envMap = envMap;
+			uvIndices.push( this.reader.getVariableLengthIndex() );
 
-			}
+			if ( discontinuous ) polyIndices.push( this.reader.getVariableLengthIndex() );
 
-		},
+			uvs.push( this.reader.getFloat32(), this.reader.getFloat32() );
 
-		// get texture defined at top level by its index
-		getTexturePathByIndex( index ) {
+		}
 
-			var fileName = '';
+		if ( discontinuous ) {
 
-			if ( ! lwoTree.textures ) return fileName;
+			if ( ! this.currentLayer.discontinuousUVs ) this.currentLayer.discontinuousUVs = {};
 
-			lwoTree.textures.forEach( function ( texture ) {
+			this.currentLayer.discontinuousUVs[ name ] = {
+				uvIndices: uvIndices,
+				polyIndices: polyIndices,
+				uvs: uvs,
+			};
 
-				if ( texture.index === index ) fileName = texture.fileName;
+		} else {
 
-			} );
+			if ( ! this.currentLayer.uvs ) this.currentLayer.uvs = {};
 
-			return fileName;
+			this.currentLayer.uvs[ name ] = {
+				uvIndices: uvIndices,
+				uvs: uvs,
+			};
 
-		},
+		}
 
-		loadTexture( path ) {
+	},
 
-			if ( ! path ) return null;
+	parseMorphTargets( name, finalOffset, type ) {
 
-			return this.textureLoader.load( this.cleanPath( path ) );
+		var indices = [];
+		var points = [];
 
-		},
+		type = ( type === 'MORF' ) ? 'relative' : 'absolute';
 
-		// Lightwave expects textures to be in folder called Images relative
-		// to the model
-		// Otherwise, the full absolute path is stored: D://some_directory/textures/bumpMap.png
-		// In this case, we'll strip out everything and load 'bumpMap.png' from the same directory as the model
-		cleanPath( path ) {
+		while ( this.reader.offset < finalOffset ) {
 
-			if ( path.toLowerCase().indexOf( 'images' ) === 0 ) return './' + path;
-			return path.split( '/' ).pop().split( '\\' ).pop();
+			indices.push( this.reader.getVariableLengthIndex() );
+			// z -> -z to match three.js right handed coords
+			points.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
 
-		},
+		}
 
-		// 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
-		getWrappingType( num ) {
+		if ( ! this.currentLayer.morphTargets ) this.currentLayer.morphTargets = {};
 
-			switch ( num ) {
+		this.currentLayer.morphTargets[ name ] = {
+			indices: indices,
+			points: points,
+			type: type,
+		};
 
-				case 0:
-					console.warn( 'LWOLoader: "Reset" texture wrapping type is not supported in three.js' );
-					return THREE.ClampToEdgeWrapping;
-				case 1: return THREE.RepeatWrapping;
-				case 2: return THREE.MirroredRepeatWrapping;
-				case 3: return THREE.ClampToEdgeWrapping;
+	},
 
-			}
+	// A list of polygons for the current layer.
+	// POLS { type[ID4], ( numvert+flags[U2], vert[VX] # numvert ) * }
+	parsePolygonList( length ) {
 
-		},
+		var finalOffset = this.reader.offset + length;
+		var type = this.reader.getIDTag();
 
-		getType( nodeData ) {
+		var indices = [];
 
-			if ( nodeData.roughness ) return 'Standard';
-			return 'Phong';
+		// hold a list of polygon sizes, to be split up later
+		var polygonDimensions = [];
 
-		},
+		while ( this.reader.offset < finalOffset ) {
 
-	};
+			var numverts = this.reader.getUint16();
 
-	function GeometryParser() {}
+			//var flags = numverts & 64512; // 6 high order bits are flags - ignoring for now
+			numverts = numverts & 1023; // remaining ten low order bits are vertex num
+			polygonDimensions.push( numverts );
 
-	GeometryParser.prototype = {
+			for ( var j = 0; j < numverts; j ++ ) indices.push( this.reader.getVariableLengthIndex() );
 
-		constructor: GeometryParser,
+		}
 
-		parse( geoData, layer ) {
+		var geometryData = {
+			type: type,
+			vertexIndices: indices,
+			polygonDimensions: polygonDimensions,
+			points: this.currentPoints
+		};
 
-			var geometry = new THREE.BufferGeometry();
+		// Note: assuming that all polys will be lines or points if the first is
+		if ( polygonDimensions[ 0 ] === 1 ) geometryData.type = 'points';
+		else if ( polygonDimensions[ 0 ] === 2 ) geometryData.type = 'lines';
 
-			geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( geoData.points, 3 ) );
+		this.currentLayer.geometry = geometryData;
 
-			var indices = this.splitIndices( geoData.vertexIndices, geoData.polygonDimensions );
-			geometry.setIndex( indices );
+	},
 
-			this.parseGroups( geometry, geoData );
+	// Lists the tag strings that can be associated with polygons by the PTAG chunk.
+	// TAGS { tag-string[S0] * }
+	parseTagStrings( length ) {
 
-			geometry.computeVertexNormals();
+		this.tree.tags = this.reader.getStringArray( length );
 
-			this.parseUVs( geometry, layer, indices );
-			this.parseMorphTargets( geometry, layer, indices );
+	},
 
-			// TODO: z may need to be reversed to account for coordinate system change
-			geometry.translate( - layer.pivot[ 0 ], - layer.pivot[ 1 ], - layer.pivot[ 2 ] );
+	// Associates tags of a given type with polygons in the most recent POLS chunk.
+	// PTAG { type[ID4], ( poly[VX], tag[U2] ) * }
+	parsePolygonTagMapping( length ) {
 
-			// var userData = geometry.userData;
-			// geometry = geometry.toNonIndexed()
-			// geometry.userData = userData;
+		var finalOffset = this.reader.offset + length;
+		var type = this.reader.getIDTag();
+		if ( type === 'SURF' ) this.parseMaterialIndices( finalOffset );
+		else { //PART, SMGP, COLR not supported
 
-			return geometry;
+			this.reader.skip( length - 4 );
 
-		},
+		}
 
-		// split quads into tris
-		splitIndices( indices, polygonDimensions ) {
+	},
 
-			var remappedIndices = [];
+	parseMaterialIndices( finalOffset ) {
 
-			var i = 0;
-			polygonDimensions.forEach( function ( dim ) {
+		// array holds polygon index followed by material index
+		this.currentLayer.geometry.materialIndices = [];
 
-				if ( dim < 4 ) {
+		while ( this.reader.offset < finalOffset ) {
 
-					for ( var k = 0; k < dim; k ++ ) remappedIndices.push( indices[ i + k ] );
+			var polygonIndex = this.reader.getVariableLengthIndex();
+			var materialIndex = this.reader.getUint16();
 
-				} else if ( dim === 4 ) {
+			this.currentLayer.geometry.materialIndices.push( polygonIndex, materialIndex );
 
-					remappedIndices.push(
-						indices[ i ],
-						indices[ i + 1 ],
-						indices[ i + 2 ],
+		}
 
-						indices[ i ],
-						indices[ i + 2 ],
-						indices[ i + 3 ]
+	},
 
-					);
+	parseUnknownCHUNK( blockID, length ) {
 
-				} else if ( dim > 4 ) {
+		console.warn( 'LWOLoader: unknown chunk type: ' + blockID + ' length: ' + length );
 
-					for ( var k = 1; k < dim - 1; k ++ ) {
+		// print the chunk plus some bytes padding either side
+		// printBuffer( this.reader.dv.buffer, this.reader.offset - 20, length + 40 );
 
-						remappedIndices.push( indices[ i ], indices[ i + k ], indices[ i + k + 1 ] );
+		var data = this.reader.getString( length );
 
-					}
+		this.currentForm[ blockID ] = data;
 
-					console.warn( 'LWOLoader: polygons with greater than 4 sides are not supported' );
+	}
 
-				}
+};
 
-				i += dim;
+function DataViewReader( buffer ) {
 
-			} );
+	this.dv = new DataView( buffer );
+	this.offset = 0;
 
-			return remappedIndices;
+}
 
-		},
+DataViewReader.prototype = {
 
-		// NOTE: currently ignoring poly indices and assuming that they are intelligently ordered
-		parseGroups( geometry, geoData ) {
+	constructor: DataViewReader,
 
-			var tags = lwoTree.tags;
-			var matNames = [];
+	size: function () {
 
-			var elemSize = 3;
-			if ( geoData.type === 'lines' ) elemSize = 2;
-			if ( geoData.type === 'points' ) elemSize = 1;
+		return this.dv.buffer.byteLength;
 
-			var remappedIndices = this.splitMaterialIndices( geoData.polygonDimensions, geoData.materialIndices );
+	},
 
-			var indexNum = 0; // create new indices in numerical order
-			var indexPairs = {}; // original indices mapped to numerical indices
+	setOffset( offset ) {
 
-			var prevMaterialIndex;
+		if ( offset > 0 && offset < this.dv.buffer.byteLength ) {
 
-			var prevStart = 0;
-			var currentCount = 0;
+			this.offset = offset;
 
-			for ( var i = 0; i < remappedIndices.length; i += 2 ) {
+		} else {
 
-				var materialIndex = remappedIndices[ i + 1 ];
+			console.error( 'LWOLoader: invalid buffer offset' );
 
-				if ( i === 0 ) matNames[ indexNum ] = tags[ materialIndex ];
+		}
 
-				if ( prevMaterialIndex === undefined ) prevMaterialIndex = materialIndex;
+	},
 
-				if ( materialIndex !== prevMaterialIndex ) {
+	endOfFile: function () {
 
-					var currentIndex;
-					if ( indexPairs[ tags[ prevMaterialIndex ] ] ) {
+		if ( this.offset >= this.size() ) return true;
+		return false;
 
-						currentIndex = indexPairs[ tags[ prevMaterialIndex ] ];
+	},
 
-					} else {
+	skip: function ( length ) {
 
-						currentIndex = indexNum;
-						indexPairs[ tags[ prevMaterialIndex ] ] = indexNum;
-						matNames[ indexNum ] = tags[ prevMaterialIndex ];
-						indexNum ++;
+		this.offset += length;
 
-					}
+	},
 
-					geometry.addGroup( prevStart, currentCount, currentIndex );
+	getUint8: function () {
 
-					prevStart += currentCount;
+		var value = this.dv.getUint8( this.offset );
+		this.offset += 1;
+		return value;
 
-					prevMaterialIndex = materialIndex;
-					currentCount = 0;
+	},
 
-				}
+	getUint16: function () {
 
-				currentCount += elemSize;
+		var value = this.dv.getUint16( this.offset );
+		this.offset += 2;
+		return value;
 
-			}
+	},
 
-			// the loop above doesn't add the last group, do that here.
-			if ( geometry.groups.length > 0 ) {
+	getInt32: function () {
 
-				var currentIndex;
-				if ( indexPairs[ tags[ materialIndex ] ] ) {
+		var value = this.dv.getInt32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-					currentIndex = indexPairs[ tags[ materialIndex ] ];
+	},
 
-				} else {
+	getUint32: function () {
 
-					currentIndex = indexNum;
-					indexPairs[ tags[ materialIndex ] ] = indexNum;
-					matNames[ indexNum ] = tags[ materialIndex ];
+		var value = this.dv.getUint32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-				}
+	},
 
-				geometry.addGroup( prevStart, currentCount, currentIndex );
+	getUint64: function () {
 
-			}
+		var low, high;
 
-			// Mat names from TAGS chunk, used to build up an array of materials for this geometry
-			geometry.userData.matNames = matNames;
+		high = this.getUint32();
+		low = this.getUint32();
+		return high * 0x100000000 + low;
 
-		},
+	},
 
-		splitMaterialIndices( polygonDimensions, indices ) {
+	getFloat32: function () {
 
-			var remappedIndices = [];
+		var value = this.dv.getFloat32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-			polygonDimensions.forEach( function ( dim, i ) {
+	},
 
-				if ( dim <= 3 ) {
+	getFloat32Array: function ( size ) {
 
-					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
+		var a = [];
 
-				} else if ( dim === 4 ) {
+		for ( var i = 0; i < size; i ++ ) {
 
-					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ], indices[ i * 2 ], indices[ i * 2 + 1 ] );
+			a.push( this.getFloat32() );
 
-				} else {
+		}
 
-					 // ignore > 4 for now
-					for ( var k = 0; k < dim - 2; k ++ ) {
+		return a;
 
-						remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
+	},
 
-					}
+	getFloat64: function () {
 
-				}
+		var value = this.dv.getFloat64( this.offset, this.littleEndian );
+		this.offset += 8;
+		return value;
 
-			} );
+	},
 
-			return remappedIndices;
+	getFloat64Array: function ( size ) {
 
-		},
+		var a = [];
 
-		// UV maps:
-		// 1: are defined via index into an array of points, not into a geometry
-		// - the geometry is also defined by an index into this array, but the indexes may not match
-		// 2: there can be any number of UV maps for a single geometry. Here these are combined,
-		// 	with preference given to the first map encountered
-		// 3: UV maps can be partial - that is, defined for only a part of the geometry
-		// 4: UV maps can be VMAP or VMAD (discontinuous, to allow for seams). In practice, most
-		// UV maps are defined as partially VMAP and partially VMAD
-		// VMADs are currently not supported
-		parseUVs( geometry, layer ) {
+		for ( var i = 0; i < size; i ++ ) {
 
-			// start by creating a UV map set to zero for the whole geometry
-			var remappedUVs = Array.from( Array( geometry.attributes.position.count * 2 ), function () {
+			a.push( this.getFloat64() );
 
-				return 0;
+		}
 
-			} );
+		return a;
 
-			for ( var name in layer.uvs ) {
+	},
 
-				var uvs = layer.uvs[ name ].uvs;
-				var uvIndices = layer.uvs[ name ].uvIndices;
+	// get variable-length index data type
+	// VX ::= index[U2] | (index + 0xFF000000)[U4]
+	// If the index value is less than 65,280 (0xFF00),then VX === U2
+	// otherwise VX === U4 with bits 24-31 set
+	// When reading an index, if the first byte encountered is 255 (0xFF), then
+	// the four-byte form is being used and the first byte should be discarded or masked out.
+	getVariableLengthIndex() {
 
-				uvIndices.forEach( function ( i, j ) {
+		var firstByte = this.getUint8();
 
-					remappedUVs[ i * 2 ] = uvs[ j * 2 ];
-					remappedUVs[ i * 2 + 1 ] = uvs[ j * 2 + 1 ];
+		if ( firstByte === 255 ) {
 
-				} );
+			return this.getUint8() * 65536 + this.getUint8() * 256 + this.getUint8();
 
-			}
+		}
 
-			geometry.addAttribute( 'uv', new THREE.Float32BufferAttribute( remappedUVs, 2 ) );
+		return firstByte * 256 + this.getUint8();
 
-		},
+	},
 
-		parseMorphTargets( geometry, layer ) {
+	// An ID tag is a sequence of 4 bytes containing 7-bit ASCII values
+	getIDTag() {
 
-			var num = 0;
-			for ( var name in layer.morphTargets ) {
+		return this.getString( 4 );
 
-				var remappedPoints = geometry.attributes.position.array.slice();
+	},
 
-				if ( ! geometry.morphAttributes.position ) geometry.morphAttributes.position = [];
+	getString: function ( size ) {
 
-				var morphPoints = layer.morphTargets[ name ].points;
-				var morphIndices = layer.morphTargets[ name ].indices;
-				var type = layer.morphTargets[ name ].type;
+		if ( size === 0 ) return;
 
-				morphIndices.forEach( function ( i, j ) {
+		// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
+		var a = [];
 
-					if ( type === 'relative' ) {
+		if ( size ) {
 
-						remappedPoints[ i * 3 ] += morphPoints[ j * 3 ];
-						remappedPoints[ i * 3 + 1 ] += morphPoints[ j * 3 + 1 ];
-						remappedPoints[ i * 3 + 2 ] += morphPoints[ j * 3 + 2 ];
+			for ( var i = 0; i < size; i ++ ) {
 
-					} else {
+				a[ i ] = this.getUint8();
 
-						remappedPoints[ i * 3 ] = morphPoints[ j * 3 ];
-						remappedPoints[ i * 3 + 1 ] = morphPoints[ j * 3 + 1 ];
-						remappedPoints[ i * 3 + 2 ] = morphPoints[ j * 3 + 2 ];
+			}
 
-					}
+		} else {
 
-				} );
+			var currentChar;
+			var len = 0;
 
-				geometry.morphAttributes.position[ num ] = new THREE.Float32BufferAttribute( remappedPoints, 3 );
-				geometry.morphAttributes.position[ num ].name = name;
+			while ( currentChar !== 0 ) {
 
-				num ++;
+				currentChar = this.getUint8();
+				if ( currentChar !== 0 ) a.push( currentChar );
+				len ++;
 
 			}
 
-		},
-
-	};
-
-	// parse data from the IFF buffer.
-	// LWO3 files are in IFF format and can contain the following data types, referred to by shorthand codes
-	//
-	// ATOMIC DATA TYPES
-	// ID Tag - 4x 7 bit uppercase ASCII chars: ID4
-	// signed integer, 1, 2, or 4 byte length: I1, I2, I4
-	// unsigned integer, 1, 2, or 4 byte length: U1, U2, U4
-	// float, 4 byte length: F4
-	// string, series of ASCII chars followed by null byte (If the length of the string including the null terminating byte is odd, an extra null is added so that the data that follows will begin on an even byte boundary): S0
-	//
-	//  COMPOUND DATA TYPES
-	// Variable-length Index (index into an array or collection): U2 or U4 : VX
-	// Color (RGB): F4 + F4 + F4: COL12
-	// Coordinate (x, y, z): F4 + F4 + F4: VEC12
-	// Percentage F4 data type from 0->1 with 1 = 100%: FP4
-	// Angle in radian F4: ANG4
-	// Filename (string) S0: FNAM0
-	// XValue F4 + index (VX) + optional envelope( ENVL ): XVAL
-	// XValue vector VEC12 + index (VX) + optional envelope( ENVL ): XVAL3
-	//
-	// The IFF file is arranged in chunks:
-	// CHUNK = ID4 + length (U4) + length X bytes of data + optional 0 pad byte
-	// optional 0 pad byte is there to ensure chunk ends on even boundary, not counted in size
-
-	// Chunks are combined in Forms (collections of chunks)
-	// FORM = string 'FORM' (ID4) + length (U4) + type (ID4) + optional ( CHUNK | FORM )
-
-	// CHUNKS and FORMS are collectively referred to as blocks
-
-	// The entire file is contained in one top level FORM
-	function IFFParser() {}
-
-	IFFParser.prototype = {
-
-		constructor: IFFParser,
-
-		parse: function ( buffer ) {
-
-			// dump the whole buffer as a string for testing
-			// printBuffer( buffer );
-
-			this.reader = new DataViewReader( buffer );
-
-			this.tree = {
-				materials: {},
-				layers: [],
-				tags: [],
-				textures: [],
-			};
+			if ( ! isEven( len + 1 ) ) this.getUint8(); // if string with terminating nullbyte is uneven, extra nullbyte is added
 
-			// start out at the top level to add any data before first layer is encountered
-			this.currentLayer = this.tree;
-			this.currentForm = this.tree;
+		}
 
-			// parse blocks until end of file is reached
-			while ( ! this.reader.endOfFile() ) this.parseBlock();
+		return THREE.LoaderUtils.decodeText( new Uint8Array( a ) );
 
-			return this.tree;
+	},
 
-		},
+	getStringArray: function ( size ) {
 
-		parseBlock() {
+		var a = this.getString( size );
+		a = a.split( '\0' );
 
-			var blockID = this.reader.getIDTag();
-			var length = this.reader.getUint32(); // size of data in bytes
-			if ( this.tree.format === 'LWO2' && length > this.reader.dv.byteLength - this.reader.offset ) {
+		return a.filter( Boolean ); // return array with any empty strings removed
 
-				this.reader.offset -= 4;
-				length = this.reader.getUint16();
+	}
 
-			}
+};
 
+// ************** DEBUGGER  **************
 
-			// Data types may be found in either LWO2 OR LWO3 spec
-			switch ( blockID ) {
+function Debugger( ) {
 
-				case 'FORM': // form blocks may consist of sub -chunks or sub-forms
-					this.parseForm( length );
-					break;
+	this.active = false;
+	this.depth = 0;
+	this.formList = [];
 
-					// SKIPPED CHUNKS
-
-				// MISC skipped
-				case 'ICON': // Thumbnail Icon Image
-				case 'VMPA': // Vertex Map Parameter
-				case 'BBOX': // bounding box
-				// case 'VMMD':
-				// case 'VTYP':
-
-				// normal maps can be specified, normally on models imported from other applications. Currently ignored
-				case 'NORM':
-
-				// ENVL FORM skipped
-				case 'PRE ':
-				case 'POST':
-				case 'KEY ':
-				case 'SPAN':
-
-				// CLIP FORM skipped
-				case 'TIME':
-				case 'CLRS':
-				case 'CLRA':
-				case 'FILT':
-				case 'DITH':
-				case 'CONT':
-				case 'BRIT':
-				case 'SATR':
-				case 'HUE ':
-				case 'GAMM':
-				case 'NEGA':
-				case 'IFLT':
-				case 'PFLT':
-
-				// Image Map Layer skipped
-				case 'PROJ':
-				case 'AXIS':
-				case 'AAST':
-				case 'PIXB':
-				case 'STCK':
-
-				// Procedural Textures skipped
-				case 'VALU':
-
-				// Gradient Textures skipped
-				case 'PNAM':
-				case 'INAM':
-				case 'GRST':
-				case 'GREN':
-				case 'GRPT':
-				case 'FKEY':
-				case 'IKEY':
-
-				// Texture Mapping Form skipped
-				case 'CSYS':
-
-					// Surface CHUNKs skipped
-				case 'OPAQ': // top level 'opacity' checkbox
-				case 'CMAP': // clip map
-
-				// Surface node CHUNKS skipped
-				// These mainly specify the node editor setup in LW
-				case 'NLOC':
-				case 'NZOM':
-				case 'NVER':
-				case 'NSRV':
-				case 'NCRD':
-				case 'NMOD':
-				case 'NPRW':
-				case 'NPLA':
-				case 'VERS':
-				case 'ENUM':
-				case 'TAG ':
-
-				// Car Material CHUNKS
-				case 'CGMD':
-				case 'CGTY':
-				case 'CGST':
-				case 'CGEN':
-				case 'CGTS':
-				case 'CGTE':
-				case 'OSMP':
-				case 'OMDE':
-				case 'OUTR':
-					this.reader.skip( length );
-					break;
+}
 
-				case 'FLAG':
-					if ( this.tree.format === 'LWO2' ) {
+Debugger.prototype = {
 
-						this.reader.skip( 4 ); // not suported
+	constructor: Debugger,
 
-					} else {
+	enable: function () {
 
-						this.reader.skip( length );
+		this.active = true;
 
-					}
-					break;
-				// Skipped LWO2 chunks
-				case 'DIFF': // diffuse level, may be necessary to modulate COLR with this
-					this.currentSurface.diffusePower = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
-				case 'TRNL':
-				case 'REFL':
-				case 'GLOS':
-				case 'SHRP':
-				case 'RFOP':
-				case 'RSAN':
-				case 'TROP':
-				case 'RBLR':
-				case 'TBLR':
-				case 'CLRH':
-				case 'CLRF':
-				case 'ADTR':
-				case 'GLOW':
-				case 'LINE':
-				case 'ALPH':
-				case 'VCOL':
-				case 'ENAB':
-					this.reader.skip( length );
-					break;
-				case 'SURF':
-					if ( this.tree.format === 'LWO2' ) {
+	},
 
-						this.parseSurfaceLwo2( length );
+	log: function () {
 
-					}
-					break;
-				case 'CLIP':
-					if ( this.tree.format === 'LWO2' ) {
+		if ( ! this.active ) return;
 
-						this.parseClipLwo2( length );
+		var nodeType;
 
-					}
-					break;
-				// Texture node chunks (not in spec)
-				case 'IPIX': // usePixelBlending
-				case 'IMIP': // useMipMaps
-				case 'IMOD': // imageBlendingMode
-				case 'AMOD': // unknown
-				case 'IINV': // imageInvertAlpha
-				case 'INCR': // imageInvertColor
-				case 'IAXS': // imageAxis ( for non-UV maps)
-				case 'IFOT': // imageFallofType
-				case 'ITIM': // timing for animated textures
-				case 'IWRL':
-				case 'IUTI':
-				case 'IINX':
-				case 'IINY':
-				case 'IINZ':
-				case 'IREF': // possibly a VX for reused texture nodes
-					if ( length === 4 ) this.currentNode[ blockID ] = this.reader.getInt32();
-					else this.reader.skip( length );
-					break;
+		switch ( this.node ) {
 
-				case 'OTAG':
-					this.parseObjectTag();
-					break;
+			case 0:
+				nodeType = "FORM";
+				break;
 
-				case 'LAYR':
-					this.parseLayer( length );
-					break;
+			case 1:
+				nodeType = "CHK";
+				break;
 
-				case 'PNTS':
-					this.parsePoints( length );
-					break;
+			case 2:
+				nodeType = "S-CHK";
+				break;
 
-				case 'VMAP':
-					this.parseVertexMapping( length );
-					break;
+		}
 
-				case 'POLS':
-					this.parsePolygonList( length );
-					break;
+		console.log(
+			"| ".repeat( this.depth ) +
+			nodeType,
+			this.nodeID,
+			`( ${this.offset} ) -> ( ${this.dataOffset + this.length} )`,
+			( ( this.node == 0 ) ? " {" : "" ),
+			( ( this.skipped ) ? "SKIPPED" : "" ),
+			( ( this.node == 0 && this.skipped ) ? "}" : "" )
+		);
 
-				case 'TAGS':
-					this.parseTagStrings( length );
-					break;
+		if ( this.node == 0 && ! this.skipped ) {
 
-				case 'PTAG':
-					this.parsePolygonTagMapping( length );
-					break;
+			this.depth += 1;
+			this.formList.push( this.dataOffset + this.length );
 
-				case 'VMAD':
-					this.parseVertexMapping( length, true );
-					break;
+		}
 
-				// Misc CHUNKS
-				case 'DESC': // Description Line
-					this.currentForm.description = this.reader.getString();
-					break;
+		this.skipped = false;
 
-				case 'TEXT':
-				case 'CMNT':
-				case 'NCOM':
-					this.currentForm.comment = this.reader.getString();
-					break;
+	},
 
-					// Envelope Form
-				case 'NAME':
-					this.currentForm.channelName = this.reader.getString();
-					break;
+	closeForms: function () {
 
-					// Image Map Layer
+		if ( ! this.active ) return;
 
-				case 'WRAP':
-					this.currentForm.wrap = { w: this.reader.getUint16(), h: this.reader.getUint16() };
-					break;
+		for ( var i = this.formList.length - 1; i >= 0; i -- ) {
 
-				case 'IMAG':
-					var index = this.reader.getVariableLengthIndex();
-					this.currentForm.imageIndex = index;
-					break;
+			if ( this.offset >= this.formList[ i ] ) {
 
-					// Texture Mapping Form
+				this.depth -= 1;
+				console.log( "| ".repeat( this.depth ) + "}" );
+				this.formList.splice( - 1, 1 );
 
-				case 'OREF':
-					this.currentForm.referenceObject = this.reader.getString();
-					break;
+			}
 
-				case 'ROID':
-					this.currentForm.referenceObjectID = this.reader.getUint32();
-					break;
+		}
 
-					// Surface Blocks
+	}
 
-				case 'SSHN':
-					this.currentSurface.surfaceShaderName = this.reader.getString();
-					break;
+};
 
-				case 'AOVN':
-					this.currentSurface.surfaceCustomAOVName = this.reader.getString();
-					break;
+// ************** UTILITY FUNCTIONS **************
 
-					// Nodal Blocks
+function isEven( num ) {
 
-				case 'NSTA':
-					this.currentForm.disabled = this.reader.getUint16();
-					break;
+	return num % 2;
 
-				case 'NRNM':
-					this.currentForm.realName = this.reader.getString();
-					break;
+}
 
-				case 'NNME':
-					this.currentForm.refName = this.reader.getString();
-					this.currentSurface.nodes[ this.currentForm.refName ] = this.currentForm;
-					break;
+// calculate the length of the string in the buffer
+// this will be string.length + nullbyte + optional padbyte to make the length even
+function stringOffset( string ) {
 
-				// Nodal Blocks : connections
-				case 'INME':
-					if ( ! this.currentForm.nodeName ) this.currentForm.nodeName = [];
-					this.currentForm.nodeName.push( this.reader.getString() );
-					break;
+	return string.length + 1 + ( isEven( string.length + 1 ) ? 1 : 0 );
 
-				case 'IINN':
-					if ( ! this.currentForm.inputNodeName ) this.currentForm.inputNodeName = [];
-					this.currentForm.inputNodeName.push( this.reader.getString() );
-					break;
+}
 
-				case 'IINM':
-					if ( ! this.currentForm.inputName ) this.currentForm.inputName = [];
-					this.currentForm.inputName.push( this.reader.getString() );
-					break;
+// for testing purposes, dump buffer to console
+// printBuffer( this.reader.dv.buffer, this.reader.offset, length );
+function printBuffer( buffer, from, to ) {
 
-				case 'IONM':
-					if ( ! this.currentForm.inputOutputName ) this.currentForm.inputOutputName = [];
-					this.currentForm.inputOutputName.push( this.reader.getString() );
-					break;
+	console.log( THREE.LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) ) );
 
-				case 'FNAM':
-					this.currentForm.fileName = this.reader.getString();
-					break;
+}
 
-				case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
-					if ( length === 4 ) this.currentForm.textureChannel = this.reader.getIDTag();
-					else this.reader.skip( length );
-					break;
+var lwoTree;
 
-					// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+THREE.LWOLoader = function ( manager, parameters ) {
 
-				case 'SMAN':
-					var maxSmoothingAngle = this.reader.getFloat32();
-					this.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
-					break;
+	this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager;
 
-				// LWO2: Basic Surface Parameters
-				case 'COLR':
-					this.currentSurface.attributes.Color = {};
-					this.currentSurface.attributes.Color.value = this.reader.getFloat32Array( 3 );
-					this.reader.skip( 2 ); // VX: envelope
-					break;
+	parameters = parameters || {};
 
-				case 'LUMI':
-					this.currentSurface.attributes.luminosityLevel = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+	this.resourcePath = ( parameters.resourcePath !== undefined ) ? parameters.resourcePath : undefined;
 
-				case 'SPEC':
-					this.currentSurface.attributes.specularLevel = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+};
 
-				case 'TRAN':
-					this.currentSurface.attributes.opacity = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+THREE.LWOLoader.prototype = {
 
-				case 'BUMP':
-					this.currentSurface.attributes.bumpStrength = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+	constructor: THREE.LWOLoader,
 
-				case 'SIDE':
-					this.currentSurface.attributes.side = this.reader.getUint16();
-					break;
+	crossOrigin: 'anonymous',
 
-				case 'RIMG':
-					this.currentSurface.attributes.reflectionMap = this.reader.getVariableLengthIndex();
-					break;
+	load: function ( url, onLoad, onProgress, onError ) {
 
-				case 'RIND':
-					this.currentSurface.attributes.refractiveIndex = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+		var self = this;
 
-				case 'TIMG':
-					this.currentSurface.attributes.refractionMap = this.reader.getVariableLengthIndex();
-					break;
+		var path = ( self.path === undefined ) ? extractParentUrl( url, 'Objects' ) : self.path;
 
-				case 'IMAP':
-					if ( this.tree.format === 'LWO2' ) {
+		// give the mesh a default name based on the filename
+		var modelName = url.split( path ).pop().split( '.' )[ 0 ];
 
-						this.reader.skip( 2 );
+		var loader = new THREE.FileLoader( this.manager );
+		loader.setPath( self.path );
+		loader.setResponseType( 'arraybuffer' );
 
-					} else {
+		loader.load( url, function ( buffer ) {
 
-						this.currentSurface.attributes.imageMapIndex = this.reader.getUint32();
+			// console.time( 'Total parsing: ' );
+			onLoad( self.parse( buffer, path, modelName ) );
+			// console.timeEnd( 'Total parsing: ' );
 
-					}
-					break;
+		}, onProgress, onError );
 
-				case 'IUVI': // uv channel name
-					this.currentNode.UVChannel = this.reader.getString( length );
-					break;
+	},
 
-				case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
-					this.currentNode.widthWrappingMode = this.reader.getUint32();
-					break;
-				case 'IVTL': // heightWrappingMode
-					this.currentNode.heightWrappingMode = this.reader.getUint32();
-					break;
+	setCrossOrigin: function ( value ) {
 
-				// LWO2 USE
-				case 'BLOK':
-					// skip
-					break;
+		this.crossOrigin = value;
+		return this;
 
-				default:
-					this.parseUnknownCHUNK( blockID, length );
+	},
 
-			}
+	setPath: function ( value ) {
 
-			if ( this.reader.offset >= this.currentFormEnd ) {
+		this.path = value;
+		return this;
 
-				this.currentForm = this.parentForm;
+	},
 
-			}
+	setResourcePath: function ( value ) {
 
-		},
-
-
-		///
-		// FORM PARSING METHODS
-		///
-
-		// Forms are organisational and can contain any number of sub chunks and sub forms
-		// FORM ::= 'FORM'[ID4], length[U4], type[ID4], ( chunk[CHUNK] | form[FORM] ) * }
-		parseForm( length ) {
-
-			var type = this.reader.getIDTag();
-
-			switch ( type ) {
-
-				// SKIPPED FORMS
-				// if skipForm( length ) is called, the entire form and any sub forms and chunks are skipped
-
-				case 'ISEQ': // Image sequence
-				case 'ANIM': // plug in animation
-				case 'STCC': // Color-cycling Still
-				case 'VPVL':
-				case 'VPRM':
-				case 'NROT':
-				case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
-				case 'WRPH': // image wrap h
-				case 'FUNC':
-				case 'FALL':
-				case 'OPAC':
-				case 'GRAD': // gradient texture
-				case 'ENVS':
-				case 'VMOP':
-				case 'VMBG':
-
-				// Car Material FORMS
-				case 'OMAX':
-				case 'STEX':
-				case 'CKBG':
-				case 'CKEY':
-				case 'VMLA':
-				case 'VMLB':
-					this.skipForm( length ); // not currently supported
-					break;
+		this.resourcePath = value;
+		return this;
 
-				// if break; is called directly, the position in the lwoTree is not created
-				// any sub chunks and forms are added to the parent form instead
-				case 'META':
-				case 'NNDS':
-				case 'NODS':
-				case 'NDTA':
-				case 'ADAT':
-				case 'AOVS':
-				case 'BLOK':
-
-				// used by texture nodes
-				case 'IBGC': // imageBackgroundColor
-				case 'IOPC': // imageOpacity
-				case 'IIMG': // hold reference to image path
-				case 'TXTR':
-					// this.setupForm( type, length );
-					break;
+	},
 
-				case 'IFAL': // imageFallof
-				case 'ISCL': // imageScale
-				case 'IPOS': // imagePosition
-				case 'IROT': // imageRotation
-				case 'IBMP':
-				case 'IUTD':
-				case 'IVTD':
-					this.parseTextureNodeAttribute( type );
-					break;
+	parse: function ( iffBuffer, path, modelName ) {
 
-				case 'LWO2':
-					this.tree.format = type;
-					break;
+		lwoTree = new IFFParser().parse( iffBuffer );
 
-				case 'LWO3':
-					this.tree.format = type;
-					break;
+		// console.log( 'lwoTree', lwoTree );
 
-				case 'ENVL':
-					this.parseEnvelope( length );
-					break;
+		var textureLoader = new THREE.TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
 
-					// CLIP FORM AND SUB FORMS
+		return new LWOTreeParser( textureLoader ).parse( modelName );
 
-				case 'CLIP':
-					if ( this.tree.format === 'LWO2' ) {
+	}
 
-						this.parseForm( length );
+};
 
-					} else {
+// Parse the lwoTree object
+function LWOTreeParser( textureLoader ) {
 
-						this.parseClip( length );
+	this.textureLoader = textureLoader;
 
-					}
-					break;
+}
 
-				case 'STIL':
-					this.parseImage();
-					break;
+LWOTreeParser.prototype = {
 
-				case 'XREF': // clone of another STIL
-					this.reader.skip( 8 ); // unknown
-					this.currentForm.referenceTexture = {
-						index: this.reader.getUint32(),
-						refName: this.reader.getString() // internal unique ref
-					};
-					break;
+	constructor: LWOTreeParser,
 
-					// Not in spec, used by texture nodes
+	parse: function ( modelName ) {
 
-				case 'IMST':
-					this.parseImageStateForm( length );
-					break;
+		this.materials = new MaterialParser( this.textureLoader ).parse();
+		this.defaultLayerName = modelName;
 
-					// SURF FORM AND SUB FORMS
+		this.meshes = this.parseLayers();
 
-				case 'SURF':
-					this.parseSurfaceForm( length );
-					break;
+		return {
+			materials: this.materials,
+			meshes: this.meshes,
+		};
 
-				case 'VALU': // Not in spec
-					this.parseValueForm( length );
-					break;
+	},
 
-				case 'NTAG':
-					this.parseSubNode( length );
-					break;
+	parseLayers() {
 
-				case 'ATTR': // BSDF Node Attributes
-				case 'SATR': // Standard Node Attributes
-					this.setupForm( 'attributes', length );
-					break;
+		// array of all meshes for building hierarchy
+		var meshes = [];
 
-				case 'NCON':
-					this.parseConnections( length );
-					break;
+		// final array containing meshes with scene graph hierarchy set up
+		var finalMeshes = [];
 
-				case 'SSHA':
-					this.parentForm = this.currentForm;
-					this.currentForm = this.currentSurface;
-					this.setupForm( 'surfaceShader', length );
-					break;
+		var geometryParser = new GeometryParser();
 
-				case 'SSHD':
-					this.setupForm( 'surfaceShaderData', length );
-					break;
+		var self = this;
+		lwoTree.layers.forEach( function ( layer ) {
 
-				case 'ENTR': // Not in spec
-					this.parseEntryForm( length );
-					break;
+			var geometry = geometryParser.parse( layer.geometry, layer );
 
-					// Image Map Layer
+			var mesh = self.parseMesh( geometry, layer );
 
-				case 'IMAP':
-					this.parseImageMap( length );
-					break;
+			meshes[ layer.number ] = mesh;
 
-				case 'TAMP':
-					this.parseXVAL( 'amplitude', length );
-					break;
+			if ( layer.parent === - 1 ) finalMeshes.push( mesh );
+			else meshes[ layer.parent ].add( mesh );
 
-					//Texture Mapping Form
 
-				case 'TMAP':
-					this.setupForm( 'textureMap', length );
-					break;
+		} );
 
-				case 'CNTR':
-					this.parseXVAL3( 'center', length );
-					break;
+		this.applyPivots( finalMeshes );
 
-				case 'SIZE':
-					this.parseXVAL3( 'scale', length );
-					break;
+		return finalMeshes;
 
-				case 'ROTA':
-					this.parseXVAL3( 'rotation', length );
-					break;
+	},
 
-				default:
-					this.parseUnknownForm( type, length );
+	parseMesh( geometry, layer ) {
 
-			}
+		var mesh;
 
-		},
+		var materials = this.getMaterials( geometry.userData.matNames, layer.geometry.type );
 
-		setupForm( type, length ) {
+		this.duplicateUVs( geometry, materials );
 
-			if ( ! this.currentForm ) this.currentForm = this.currentNode;
+		if ( layer.geometry.type === 'points' ) mesh = new THREE.Points( geometry, materials );
+		else if ( layer.geometry.type === 'lines' ) mesh = new THREE.LineSegments( geometry, materials );
+		else mesh = new THREE.Mesh( geometry, materials );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		if ( layer.name ) mesh.name = layer.name;
+		else mesh.name = this.defaultLayerName + '_layer_' + layer.number;
 
-			if ( ! this.currentForm[ type ] ) {
+		mesh.userData.pivot = layer.pivot;
 
-				this.currentForm[ type ] = {};
-				this.currentForm = this.currentForm[ type ];
+		return mesh;
 
+	},
 
-			} else {
+	// TODO: may need to be reversed in z to convert LWO to three.js coordinates
+	applyPivots( meshes ) {
 
-				// should never see this unless there's a bug in the reader
-				console.warn( 'LWOLoader: form already exists on parent: ', type, this.currentForm );
+		meshes.forEach( function ( mesh ) {
 
-				this.currentForm = this.currentForm[ type ];
+			mesh.traverse( function ( child ) {
 
-			}
+				var pivot = child.userData.pivot;
 
+				child.position.x += pivot[ 0 ];
+				child.position.y += pivot[ 1 ];
+				child.position.z += pivot[ 2 ];
 
-		},
+				if ( child.parent ) {
 
-		skipForm( length ) {
+					var parentPivot = child.parent.userData.pivot;
 
-			this.reader.skip( length - 4 );
+					child.position.x -= parentPivot[ 0 ];
+					child.position.y -= parentPivot[ 1 ];
+					child.position.z -= parentPivot[ 2 ];
 
-		},
+				}
 
-		parseUnknownForm( type, length ) {
+			} );
 
-			console.warn( 'LWOLoader: unknown FORM encountered: ' + type, length );
+		} );
 
-			printBuffer( this.reader.dv.buffer, this.reader.offset, length - 4 );
-			this.reader.skip( length - 4 );
+	},
 
-		},
+	getMaterials( namesArray, type ) {
 
-		parseSurfaceForm( length ) {
+		var materials = [];
 
-			this.reader.skip( 8 ); // unknown Uint32 x2
+		var self = this;
 
-			var name = this.reader.getString();
+		namesArray.forEach( function ( name, i ) {
 
-			var surface = {
-				attributes: {}, // LWO2 style non-node attributes will go here
-				connections: {},
-				name: name,
-				inputName: name,
-				nodes: {},
-				source: this.reader.getString(),
-			};
+			materials[ i ] = self.getMaterialByName( name );
 
-			this.tree.materials[ name ] = surface;
-			this.currentSurface = surface;
+		} );
 
-			this.parentForm = this.tree.materials;
-			this.currentForm = surface;
-			this.currentFormEnd = this.reader.offset + length;
+		// convert materials to line or point mats if required
+		if ( type === 'points' || type === 'lines' ) {
 
-		},
+			materials.forEach( function ( mat, i ) {
 
-		parseSurfaceLwo2( length ) {
+				var spec = {
+					color: mat.color,
+				};
 
-			var name = this.reader.getString();
+				if ( type === 'points' ) {
 
-			var surface = {
-				attributes: {}, // LWO2 style non-node attributes will go here
-				connections: {},
-				name: name,
-				nodes: {},
-				source: this.reader.getString(),
-			};
+					spec.size = 0.1;
+					spec.map = mat.map;
+					spec.morphTargets = mat.morphTargets;
+					materials[ i ] = new THREE.PointsMaterial( spec );
 
-			this.tree.materials[ name ] = surface;
-			this.currentSurface = surface;
+				} else if ( type === 'lines' ) {
 
-			this.parentForm = this.tree.materials;
-			this.currentForm = surface;
-			this.currentFormEnd = this.reader.offset + length;
+					materials[ i ] = new THREE.LineBasicMaterial( spec );
 
-		},
+				}
 
-		parseSubNode( length ) {
+			} );
 
-			// parse the NRNM CHUNK of the subnode FORM to get
-			// a meaningful name for the subNode
-			// some subnodes can be renamed, but Input and Surface cannot
+		}
 
-			this.reader.skip( 8 ); // NRNM + length
-			var name = this.reader.getString();
+		// if there is only one material, return that directly instead of array
+		var filtered = materials.filter( Boolean );
+		if ( filtered.length === 1 ) return filtered[ 0 ];
 
-			var node = {
-				name: name
-			};
-			this.currentForm = node;
-			this.currentNode = node;
+		return materials;
+
+	},
+
+	getMaterialByName( name ) {
 
-			this.currentFormEnd = this.reader.offset + length;
+		return this.materials.filter( function ( m ) {
 
+			return m.name === name;
 
-		},
+		} )[ 0 ];
 
-		// collect attributes from all nodes at the top level of a surface
-		parseConnections( length ) {
+	},
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+	// If the material has an aoMap, duplicate UVs
+	duplicateUVs( geometry, materials ) {
 
-			this.currentForm = this.currentSurface.connections;
+		var duplicateUVs = false;
 
-		},
+		if ( ! Array.isArray( materials ) ) {
 
-		// surface node attribute data, e.g. specular, roughness etc
-		parseEntryForm( length ) {
+			if ( materials.aoMap ) duplicateUVs = true;
 
-			this.reader.skip( 8 ); // NAME + length
-			var name = this.reader.getString();
-			this.currentForm = this.currentNode.attributes;
+		} else {
 
-			this.setupForm( name, length );
+			materials.forEach( function ( material ) {
 
-		},
+				if ( material.aoMap ) duplicateUVs = true;
 
-		// parse values from material - doesn't match up to other LWO3 data types
-		// sub form of entry form
-		parseValueForm() {
+			} );
+
+		}
 
-			this.reader.skip( 8 ); // unknown + length
+		if ( ! duplicateUVs ) return;
 
-			var valueType = this.reader.getString();
+		geometry.addAttribute( 'uv2', new THREE.BufferAttribute( geometry.attributes.uv.array, 2 ) );
 
-			if ( valueType === 'double' ) {
+	},
 
-				this.currentForm.value = this.reader.getUint64();
+};
 
-			} else if ( valueType === 'int' ) {
+function MaterialParser( textureLoader ) {
 
-				this.currentForm.value = this.reader.getUint32();
+	this.textureLoader = textureLoader;
 
-			} else if ( valueType === 'vparam' ) {
+}
 
-				this.reader.skip( 24 );
-				this.currentForm.value = this.reader.getFloat64();
+MaterialParser.prototype = {
 
-			} else if ( valueType === 'vparam3' ) {
+	constructor: MaterialParser,
 
-				this.reader.skip( 24 );
-				this.currentForm.value = this.reader.getFloat64Array( 3 );
+	parse: function () {
 
+		var materials = [];
+		this.textures = {};
+
+		for ( var name in lwoTree.materials ) {
+
+			if ( lwoTree.format === 'LWO3' ) {
+
+				materials.push( this.parseMaterial( lwoTree.materials[ name ], name, lwoTree.textures ) );
+
+			} else if ( lwoTree.format === 'LWO2' ) {
+
+				materials.push( this.parseMaterialLwo2( lwoTree.materials[ name ], name, lwoTree.textures ) );
 
 			}
 
-		},
+		}
 
-		// holds various data about texture node image state
-		// Data other thanmipMapLevel unknown
-		parseImageStateForm() {
+		return materials;
 
-			this.reader.skip( 8 ); // unknown
+	},
 
-			this.currentForm.mipMapLevel = this.reader.getFloat32();
+	parseMaterial( materialData, name, textures ) {
 
-		},
+		var params = {
+			name: name,
+			side: this.getSide( materialData.attributes ),
+			flatShading: this.getSmooth( materialData.attributes ),
+		};
 
-		// LWO2 style image data node OR LWO3 textures defined at top level in editor (not as SURF node)
-		parseImageMap( length ) {
+		var connections = this.parseConnections( materialData.connections, materialData.nodes );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		var maps = this.parseTextureNodes( connections.maps );
 
-			if ( ! this.currentForm.maps ) this.currentForm.maps = [];
+		this.parseAttributeImageMaps( connections.attributes, textures, maps, materialData.maps );
 
-			var map = {};
-			this.currentForm.maps.push( map );
-			this.currentForm = map;
+		var attributes = this.parseAttributes( connections.attributes, maps );
 
-			this.reader.skip( 10 ); // unknown, could be an issue if it contains a VX
+		this.parseEnvMap( connections, maps, attributes );
 
-		},
+		params = Object.assign( maps, params );
+		params = Object.assign( params, attributes );
 
-		parseTextureNodeAttribute( type ) {
+		var materialType = this.getMaterialType( connections.attributes );
 
-			this.reader.skip( 28 ); // FORM + length + VPRM + unknown + Uint32 x2 + float32
+		return new materialType( params );
 
-			this.reader.skip( 20 ); // FORM + length + VPVL + float32 + Uint32
+	},
 
-			switch ( type ) {
+	parseMaterialLwo2( materialData, name/*, textures*/ ) {
 
-				case 'ISCL':
-					this.currentNode.scale = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IPOS':
-					this.currentNode.position = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IROT':
-					this.currentNode.rotation = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IFAL':
-					this.currentNode.falloff = this.reader.getFloat32Array( 3 );
-					break;
+		var params = {
+			name: name,
+			side: this.getSide( materialData.attributes ),
+			flatShading: this.getSmooth( materialData.attributes ),
+		};
 
-				case 'IBMP':
-					this.currentNode.amplitude = this.reader.getFloat32();
-					break;
-				case 'IUTD':
-					this.currentNode.uTiles = this.reader.getFloat32();
-					break;
-				case 'IVTD':
-					this.currentNode.vTiles = this.reader.getFloat32();
-					break;
+		var attributes = this.parseAttributes( materialData.attributes, {} );
+		params = Object.assign( params, attributes );
+		return new THREE.MeshPhongMaterial( params );
 
-			}
+	},
 
-			this.reader.skip( 2 ); // unknown
+	// Note: converting from left to right handed coords by switching x -> -x in vertices, and
+	// then switching mat FrontSide -> BackSide
+	// NB: this means that THREE.FrontSide and THREE.BackSide have been switched!
+	getSide( attributes ) {
 
+		if ( ! attributes.side ) return THREE.BackSide;
 
-		},
+		switch ( attributes.side ) {
 
-		// ENVL forms are currently ignored
-		parseEnvelope( length ) {
+			case 0:
+			case 1:
+				return THREE.BackSide;
+			case 2: return THREE.FrontSide;
+			case 3: return THREE.DoubleSide;
 
-			this.reader.skip( length - 4 ); // skipping  entirely for now
+		}
 
-		},
+	},
 
-		///
-		// CHUNK PARSING METHODS
-		///
+	getSmooth( attributes ) {
 
-		// clips can either be defined inside a surface node, or at the top
-		// level and they have a different format in each case
-		parseClip( length ) {
+		if ( ! attributes.smooth ) return true;
+		return ! attributes.smooth;
 
-			var tag = this.reader.getIDTag();
+	},
 
-			// inside surface node
-			if ( tag === 'FORM' ) {
+	parseConnections( connections, nodes ) {
 
-				this.reader.skip( 16 );
+		var materialConnections = {
+			maps: {}
+		};
 
-				this.currentNode.fileName = this.reader.getString();
+		var inputName = connections.inputName;
+		var inputNodeName = connections.inputNodeName;
+		var nodeName = connections.nodeName;
 
-				return;
+		var self = this;
+		inputName.forEach( function ( name, index ) {
+
+			if ( name === 'Material' ) {
+
+				var matNode = self.getNodeByRefName( inputNodeName[ index ], nodes );
+				materialConnections.attributes = matNode.attributes;
+				materialConnections.envMap = matNode.fileName;
+				materialConnections.name = inputNodeName[ index ];
 
 			}
 
-			// otherwise top level
-			this.reader.setOffset( this.reader.offset - 4 );
+		} );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		nodeName.forEach( function ( name, index ) {
 
-			this.reader.skip( 8 ); // unknown
+			if ( name === materialConnections.name ) {
 
-			var texture = {
-				index: this.reader.getUint32()
-			};
-			this.tree.textures.push( texture );
-			this.currentForm = texture;
+				materialConnections.maps[ inputName[ index ] ] = self.getNodeByRefName( inputNodeName[ index ], nodes );
 
-		},
+			}
 
-		parseClipLwo2( length ) {
+		} );
 
-			var texture = {
-				index: this.reader.getUint32(),
-				fileName: ""
-			};
+		return materialConnections;
 
-			// seach STIL block
-			while ( true ) {
+	},
 
-				var tag = this.reader.getIDTag();
-				var n_length = this.reader.getUint16();
-				if ( tag === 'STIL' ) {
+	getNodeByRefName( refName, nodes ) {
 
-					texture.fileName = this.reader.getString();
-					break;
+		for ( var name in nodes ) {
 
-				}
+			if ( nodes[ name ].refName === refName ) return nodes[ name ];
 
-				if ( n_length >= length ) {
+		}
 
-					break;
+	},
 
-				}
+	parseTextureNodes( textureNodes ) {
+
+		var maps = {};
+
+		for ( var name in textureNodes ) {
+
+			var node = textureNodes[ name ];
+			var path = node.fileName;
+
+			if ( ! path ) return;
+
+			var texture = this.loadTexture( path );
+
+			if ( node.widthWrappingMode !== undefined ) texture.wrapS = this.getWrappingType( node.widthWrappingMode );
+			if ( node.heightWrappingMode !== undefined ) texture.wrapT = this.getWrappingType( node.heightWrappingMode );
+
+			switch ( name ) {
+
+				case 'Color':
+					maps.map = texture;
+					break;
+				case 'Roughness':
+					maps.roughnessMap = texture;
+					maps.roughness = 0.5;
+					break;
+				case 'Specular':
+					maps.specularMap = texture;
+					maps.specular = 0xffffff;
+					break;
+				case 'Luminous':
+					maps.emissiveMap = texture;
+					maps.emissive = 0x808080;
+					break;
+				case 'Luminous Color':
+					maps.emissive = 0x808080;
+					break;
+				case 'Metallic':
+					maps.metalnessMap = texture;
+					maps.metalness = 0.5;
+					break;
+				case 'Transparency':
+				case 'Alpha':
+					maps.alphaMap = texture;
+					maps.transparent = true;
+					break;
+				case 'Normal':
+					maps.normalMap = texture;
+					if ( node.amplitude !== undefined ) maps.normalScale = new THREE.Vector2( node.amplitude, node.amplitude );
+					break;
+				case 'Bump':
+					maps.bumpMap = texture;
+					break;
 
 			}
 
-			this.tree.textures.push( texture );
-			this.currentForm = texture;
+		}
 
-		},
+		// LWO BSDF materials can have both spec and rough, but this is not valid in three
+		if ( maps.roughnessMap && maps.specularMap ) delete maps.specularMap;
 
-		parseImage() {
+		return maps;
 
-			this.reader.skip( 8 ); // unknown
-			this.currentForm.fileName = this.reader.getString();
+	},
 
-		},
+	// maps can also be defined on individual material attributes, parse those here
+	// This occurs on Standard (Phong) surfaces
+	parseAttributeImageMaps( attributes, textures, maps ) {
 
-		parseXVAL( type, length ) {
+		for ( var name in attributes ) {
 
-			var endOffset = this.reader.offset + length - 4;
-			this.reader.skip( 8 );
+			var attribute = attributes[ name ];
 
-			this.currentForm[ type ] = this.reader.getFloat32();
+			if ( attribute.maps ) {
 
-			this.reader.setOffset( endOffset ); // set end offset directly to skip optional envelope
+				var mapData = attribute.maps[ 0 ];
 
-		},
+				var path = this.getTexturePathByIndex( mapData.imageIndex, textures );
+				if ( ! path ) return;
 
-		parseXVAL3( type, length ) {
+				var texture = this.loadTexture( path );
 
-			var endOffset = this.reader.offset + length - 4;
-			this.reader.skip( 8 );
+				if ( mapData.wrap !== undefined ) texture.wrapS = this.getWrappingType( mapData.wrap.w );
+				if ( mapData.wrap !== undefined ) texture.wrapT = this.getWrappingType( mapData.wrap.h );
 
-			this.currentForm[ type ] = {
-				x: this.reader.getFloat32(),
-				y: this.reader.getFloat32(),
-				z: this.reader.getFloat32(),
-			};
+				switch ( name ) {
 
-			this.reader.setOffset( endOffset );
+					case 'Color':
+						maps.map = texture;
+						break;
+					case 'Diffuse':
+						maps.aoMap = texture;
+						break;
+					case 'Roughness':
+						maps.roughnessMap = texture;
+						maps.roughness = 1;
+						break;
+					case 'Specular':
+						maps.specularMap = texture;
+						maps.specular = 0xffffff;
+						break;
+					case 'Luminosity':
+						maps.emissiveMap = texture;
+						maps.emissive = 0x808080;
+						break;
+					case 'Metallic':
+						maps.metalnessMap = texture;
+						maps.metalness = 1;
+						break;
+					case 'Transparency':
+					case 'Alpha':
+						maps.alphaMap = texture;
+						maps.transparent = true;
+						break;
+					case 'Normal':
+						maps.normalMap = texture;
+						break;
+					case 'Bump':
+						maps.bumpMap = texture;
+						break;
 
-		},
+				}
 
-		// Tags associated with an object
-		// OTAG { type[ID4], tag-string[S0] }
-		parseObjectTag() {
+			}
 
-			if ( ! this.tree.objectTags ) this.tree.objectTags = {};
+		}
 
-			this.tree.objectTags[ this.reader.getIDTag() ] = {
-				tagString: this.reader.getString()
-			};
+	},
 
-		},
+	parseAttributes( attributes, maps ) {
 
-		// Signals the start of a new layer. All the data chunks which follow will be included in this layer until another layer chunk is encountered.
-		// LAYR: number[U2], flags[U2], pivot[VEC12], name[S0], parent[U2]
-		parseLayer( length ) {
+		var params = {};
 
-			var layer = {
-				number: this.reader.getUint16(),
-				flags: this.reader.getUint16(), // If the least significant bit of flags is set, the layer is hidden.
-				pivot: this.reader.getFloat32Array( 3 ), // Note: this seems to be superflous, as the geometry is translated when pivot is present
-				name: this.reader.getString(),
-			};
+		// don't use color data if color map is present
+		if ( attributes.Color && ! maps.map ) {
 
-			this.tree.layers.push( layer );
-			this.currentLayer = layer;
+			params.color = new THREE.Color().fromArray( attributes.Color.value );
 
-			var parsedLength = 16 + stringOffset( this.currentLayer.name ); // index ( 2 ) + flags( 2 ) + pivot( 12 ) + stringlength
+		} else params.color = new THREE.Color();
 
-			// if we have not reached then end of the layer block, there must be a parent defined
-			this.currentLayer.parent = ( parsedLength < length ) ? this.reader.getUint16() : - 1; // omitted or -1 for no parent
 
-		},
+		if ( attributes.Transparency && attributes.Transparency.value !== 0 ) {
 
-		// VEC12 * ( F4 + F4 + F4 ) array of x,y,z vectors
-		// Converting from left to right handed coordinate system:
-		// x -> -x and switch material FrontSide -> BackSide
-		parsePoints( length ) {
+			params.opacity = 1 - attributes.Transparency.value;
+			params.transparent = true;
 
-			this.currentPoints = [];
-			for ( var i = 0; i < length / 4; i += 3 ) {
+		}
 
-				// z -> -z to match three.js right handed coords
-				this.currentPoints.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
+		if ( attributes[ 'Bump Height' ] ) params.bumpScale = attributes[ 'Bump Height' ].value * 0.1;
 
-			}
+		if ( attributes[ 'Refraction Index' ] ) params.refractionRatio = 1 / attributes[ 'Refraction Index' ].value;
+
+		this.parsePhysicalAttributes( params, attributes, maps );
+		this.parseStandardAttributes( params, attributes, maps );
+		this.parsePhongAttributes( params, attributes, maps );
 
-		},
+		return params;
 
-		// parse VMAP or VMAD
-		// Associates a set of floating-point vectors with a set of points.
-		// VMAP: { type[ID4], dimension[U2], name[S0], ( vert[VX], value[F4] # dimension ) * }
+	},
 
-		// VMAD Associates a set of floating-point vectors with the vertices of specific polygons.
-		// Similar to VMAP UVs, but associates with polygon vertices rather than points
-		// to solve to problem of UV seams:  VMAD chunks are paired with VMAPs of the same name,
-		// if they exist. The vector values in the VMAD will then replace those in the
-		// corresponding VMAP, but only for calculations involving the specified polygons.
-		// VMAD { type[ID4], dimension[U2], name[S0], ( vert[VX], poly[VX], value[F4] # dimension ) * }
-		parseVertexMapping( length, discontinuous ) {
+	parsePhysicalAttributes( params, attributes/*, maps*/ ) {
 
-			var finalOffset = this.reader.offset + length;
+		if ( attributes.Clearcoat && attributes.Clearcoat.value > 0 ) {
 
-			var channelName = this.reader.getString();
+			params.clearCoat = attributes.Clearcoat.value;
 
-			if ( this.reader.offset === finalOffset ) {
+			if ( attributes[ 'Clearcoat Gloss' ] ) {
 
-				// then we are in a texture node and the VMAP chunk is just a reference to a UV channel name
-				this.currentForm.UVChannel = channelName;
-				return;
+				params.clearCoatRoughness = 0.5 * ( 1 - attributes[ 'Clearcoat Gloss' ].value );
 
 			}
 
-			// otherwise reset to initial length and parse normal VMAP CHUNK
-			this.reader.setOffset( this.reader.offset - stringOffset( channelName ) );
+		}
 
-			var type = this.reader.getIDTag();
+	},
 
-			this.reader.getUint16(); // dimension
-			var name = this.reader.getString();
+	parseStandardAttributes( params, attributes, maps ) {
 
-			var remainingLength = length - 6 - stringOffset( name );
 
-			switch ( type ) {
+		if ( attributes.Luminous ) {
 
-				case 'TXUV':
-					this.parseUVMapping( name, finalOffset, discontinuous );
-					break;
-				case 'MORF':
-				case 'SPOT':
-					this.parseMorphTargets( name, finalOffset, type ); // can't be discontinuous
-					break;
-				// unsupported VMAPs
-				case 'APSL':
-				case 'NORM':
-				case 'WGHT':
-				case 'MNVW':
-				case 'PICK':
-				case 'RGB ':
-				case 'RGBA':
-					this.reader.skip( remainingLength );
-					break;
-				default:
-					console.warn( 'LWOLoader: unknown vertex map type: ' + type );
-					this.reader.skip( remainingLength );
+			params.emissiveIntensity = attributes.Luminous.value;
+
+			if ( attributes[ 'Luminous Color' ] && ! maps.emissive ) {
+
+				params.emissive = new THREE.Color().fromArray( attributes[ 'Luminous Color' ].value );
+
+			} else {
+
+				params.emissive = new THREE.Color( 0x808080 );
 
 			}
 
-		},
+		}
 
-		parseUVMapping( name, finalOffset, discontinuous ) {
+		if ( attributes.Roughness && ! maps.roughnessMap ) params.roughness = attributes.Roughness.value;
+		if ( attributes.Metallic && ! maps.metalnessMap ) params.metalness = attributes.Metallic.value;
 
-			var uvIndices = [];
-			var polyIndices = [];
-			var uvs = [];
+	},
 
-			while ( this.reader.offset < finalOffset ) {
+	parsePhongAttributes( params, attributes, maps ) {
 
-				uvIndices.push( this.reader.getVariableLengthIndex() );
+		if ( attributes.Diffuse ) params.color.multiplyScalar( attributes.Diffuse.value );
 
-				if ( discontinuous ) polyIndices.push( this.reader.getVariableLengthIndex() );
+		if ( attributes.Reflection ) {
 
-				uvs.push( this.reader.getFloat32(), this.reader.getFloat32() );
+			params.reflectivity = attributes.Reflection.value;
+			params.combine = THREE.AddOperation;
 
-			}
+		}
 
-			if ( discontinuous ) {
+		if ( attributes.Luminosity ) {
 
-				if ( ! this.currentLayer.discontinuousUVs ) this.currentLayer.discontinuousUVs = {};
+			params.emissiveIntensity = attributes.Luminosity.value;
 
-				this.currentLayer.discontinuousUVs[ name ] = {
-					uvIndices: uvIndices,
-					polyIndices: polyIndices,
-					uvs: uvs,
-				};
+			if ( ! maps.emissiveMap && ! maps.map ) {
 
-			} else {
+				params.emissive = params.color;
 
-				if ( ! this.currentLayer.uvs ) this.currentLayer.uvs = {};
+			} else {
 
-				this.currentLayer.uvs[ name ] = {
-					uvIndices: uvIndices,
-					uvs: uvs,
-				};
+				params.emissive = new THREE.Color( 0x808080 );
 
 			}
 
-		},
+		}
 
-		parseMorphTargets( name, finalOffset, type ) {
+		// parse specular if there is no roughness - we will interpret the material as 'Phong' in this case
+		if ( ! attributes.Roughness && attributes.Specular && ! maps.specularMap ) {
 
-			var indices = [];
-			var points = [];
+			if ( attributes[ 'Color Highlight' ] ) {
 
-			type = ( type === 'MORF' ) ? 'relative' : 'absolute';
+				params.specular = new THREE.Color().setScalar( attributes.Specular.value ).lerp( params.color.clone().multiplyScalar( attributes.Specular.value ), attributes[ 'Color Highlight' ].value );
 
-			while ( this.reader.offset < finalOffset ) {
+			} else {
 
-				indices.push( this.reader.getVariableLengthIndex() );
-				// z -> -z to match three.js right handed coords
-				points.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
+				params.specular = new THREE.Color().setScalar( attributes.Specular.value );
 
 			}
 
-			if ( ! this.currentLayer.morphTargets ) this.currentLayer.morphTargets = {};
+		}
 
-			this.currentLayer.morphTargets[ name ] = {
-				indices: indices,
-				points: points,
-				type: type,
-			};
+		if ( params.specular && attributes.Glossiness ) params.shininess = 7 + Math.pow( 2, attributes.Glossiness.value * 12 + 2 );
 
-		},
+	},
 
-		// A list of polygons for the current layer.
-		// POLS { type[ID4], ( numvert+flags[U2], vert[VX] # numvert ) * }
-		parsePolygonList( length ) {
+	parseEnvMap( connections, maps, attributes ) {
 
-			var finalOffset = this.reader.offset + length;
-			var type = this.reader.getIDTag();
+		if ( connections.envMap ) {
 
-			var indices = [];
+			var envMap = this.loadTexture( connections.envMap );
 
-			// hold a list of polygon sizes, to be split up later
-			var polygonDimensions = [];
+			if ( attributes.transparent && attributes.opacity < 0.999 ) {
 
-			while ( this.reader.offset < finalOffset ) {
+				envMap.mapping = THREE.EquirectangularRefractionMapping;
 
-				var numverts = this.reader.getUint16();
+				// Reflectivity and refraction mapping don't work well together in Phong materials
+				if ( attributes.reflectivity !== undefined ) {
 
-				//var flags = numverts & 64512; // 6 high order bits are flags - ignoring for now
-				numverts = numverts & 1023; // remaining ten low order bits are vertex num
-				polygonDimensions.push( numverts );
+					delete attributes.reflectivity;
+					delete attributes.combine;
 
-				for ( var j = 0; j < numverts; j ++ ) indices.push( this.reader.getVariableLengthIndex() );
+				}
 
-			}
+				if ( attributes.metalness !== undefined ) {
 
-			var geometryData = {
-				type: type,
-				vertexIndices: indices,
-				polygonDimensions: polygonDimensions,
-				points: this.currentPoints
-			};
+					delete attributes.metalness;
 
-			// Note: assuming that all polys will be lines or points if the first is
-			if ( polygonDimensions[ 0 ] === 1 ) geometryData.type = 'points';
-			else if ( polygonDimensions[ 0 ] === 2 ) geometryData.type = 'lines';
+				}
 
-			this.currentLayer.geometry = geometryData;
+			} else envMap.mapping = THREE.EquirectangularReflectionMapping;
 
-		},
+			maps.envMap = envMap;
 
-		// Lists the tag strings that can be associated with polygons by the PTAG chunk.
-		// TAGS { tag-string[S0] * }
-		parseTagStrings( length ) {
+		}
 
-			this.tree.tags = this.reader.getStringArray( length );
+	},
 
-		},
+	// get texture defined at top level by its index
+	getTexturePathByIndex( index ) {
 
-		// Associates tags of a given type with polygons in the most recent POLS chunk.
-		// PTAG { type[ID4], ( poly[VX], tag[U2] ) * }
-		parsePolygonTagMapping( length ) {
+		var fileName = '';
 
-			var finalOffset = this.reader.offset + length;
-			var type = this.reader.getIDTag();
-			if ( type === 'SURF' ) this.parseMaterialIndices( finalOffset );
-			else { //PART, SMGP, COLR not supported
+		if ( ! lwoTree.textures ) return fileName;
 
-				this.reader.skip( length - 4 );
+		lwoTree.textures.forEach( function ( texture ) {
 
-			}
+			if ( texture.index === index ) fileName = texture.fileName;
 
-		},
+		} );
 
-		parseMaterialIndices( finalOffset ) {
+		return fileName;
 
-			// array holds polygon index followed by material index
-			this.currentLayer.geometry.materialIndices = [];
+	},
 
-			var initialMatIndex;
+	loadTexture( path ) {
 
-			while ( this.reader.offset < finalOffset ) {
+		if ( ! path ) return null;
 
-				var polygonIndex = this.reader.getVariableLengthIndex();
-				var materialIndex = this.reader.getUint16();
+		var texture;
 
-				if ( ! initialMatIndex ) initialMatIndex = materialIndex; // set up first mat index
+		texture = this.textureLoader.load(
+			path,
+			undefined,
+			undefined,
+			function () {
 
-				this.currentLayer.geometry.materialIndices.push( polygonIndex, materialIndex );
+				console.warn( 'LWOLoader: non-standard resource hierarchy. Use \`resourcePath\` parameter to specify root content directory.' );
 
 			}
+		);
 
-		},
-
-		parseUnknownCHUNK( blockID, length ) {
+		return texture;
 
-			console.warn( 'LWOLoader: unknown chunk type: ' + blockID + ' length: ' + length );
+	},
 
-			// print the chunk plus some bytes padding either side
-			// printBuffer( this.reader.dv.buffer, this.reader.offset - 20, length + 40 );
+	// 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+	getWrappingType( num ) {
 
-			var data = this.reader.getString( length );
+		switch ( num ) {
 
-			this.currentForm[ blockID ] = data;
+			case 0:
+				console.warn( 'LWOLoader: "Reset" texture wrapping type is not supported in three.js' );
+				return THREE.ClampToEdgeWrapping;
+			case 1: return THREE.RepeatWrapping;
+			case 2: return THREE.MirroredRepeatWrapping;
+			case 3: return THREE.ClampToEdgeWrapping;
 
 		}
 
-	};
-
-	function DataViewReader( buffer ) {
+	},
 
-		// For testing: dump whole buffer to console as a string
-		// printBuffer( buffer, 0, buffer.byteLength );
+	getMaterialType( nodeData ) {
 
-		this.dv = new DataView( buffer );
-		this.offset = 0;
+		if ( nodeData.Clearcoat && nodeData.Clearcoat.value > 0 ) return THREE.MeshPhysicalMaterial;
+		if ( nodeData.Roughness ) return THREE.MeshStandardMaterial;
+		return THREE.MeshPhongMaterial;
 
 	}
 
-	DataViewReader.prototype = {
+};
 
-		constructor: DataViewReader,
+function GeometryParser() {}
 
-		size: function () {
+GeometryParser.prototype = {
 
-			return this.dv.buffer.byteLength;
+	constructor: GeometryParser,
 
-		},
+	parse( geoData, layer ) {
 
-		setOffset( offset ) {
+		var geometry = new THREE.BufferGeometry();
 
-			if ( offset > 0 && offset < this.dv.buffer.byteLength ) {
+		geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( geoData.points, 3 ) );
 
-				this.offset = offset;
+		var indices = this.splitIndices( geoData.vertexIndices, geoData.polygonDimensions );
+		geometry.setIndex( indices );
 
-			} else {
+		this.parseGroups( geometry, geoData );
 
-				console.error( 'LWOLoader: invalid buffer offset' );
+		geometry.computeVertexNormals();
 
-			}
+		this.parseUVs( geometry, layer, indices );
+		this.parseMorphTargets( geometry, layer, indices );
 
-		},
+		// TODO: z may need to be reversed to account for coordinate system change
+		geometry.translate( - layer.pivot[ 0 ], - layer.pivot[ 1 ], - layer.pivot[ 2 ] );
 
-		endOfFile: function () {
+		// var userData = geometry.userData;
+		// geometry = geometry.toNonIndexed()
+		// geometry.userData = userData;
 
-			if ( this.offset >= this.size() ) return true;
-			return false;
+		return geometry;
 
-		},
+	},
 
-		skip: function ( length ) {
+	// split quads into tris
+	splitIndices( indices, polygonDimensions ) {
 
-			this.offset += length;
+		var remappedIndices = [];
 
-		},
+		var i = 0;
+		polygonDimensions.forEach( function ( dim ) {
 
-		getUint8: function () {
+			if ( dim < 4 ) {
 
-			var value = this.dv.getUint8( this.offset );
-			this.offset += 1;
-			return value;
+				for ( var k = 0; k < dim; k ++ ) remappedIndices.push( indices[ i + k ] );
 
-		},
+			} else if ( dim === 4 ) {
 
-		getUint16: function () {
+				remappedIndices.push(
+					indices[ i ],
+					indices[ i + 1 ],
+					indices[ i + 2 ],
 
-			var value = this.dv.getUint16( this.offset );
-			this.offset += 2;
-			return value;
+					indices[ i ],
+					indices[ i + 2 ],
+					indices[ i + 3 ]
 
-		},
+				);
 
-		getInt32: function () {
+			} else if ( dim > 4 ) {
 
-			var value = this.dv.getInt32( this.offset, false );
-			this.offset += 4;
-			return value;
+				for ( var k = 1; k < dim - 1; k ++ ) {
 
-		},
+					remappedIndices.push( indices[ i ], indices[ i + k ], indices[ i + k + 1 ] );
 
-		getUint32: function () {
+				}
 
-			var value = this.dv.getUint32( this.offset, false );
-			this.offset += 4;
-			return value;
+				console.warn( 'LWOLoader: polygons with greater than 4 sides are not supported' );
 
-		},
+			}
 
-		getUint64: function () {
+			i += dim;
 
-			var low, high;
+		} );
 
-			high = this.getUint32();
-			low = this.getUint32();
-			return high * 0x100000000 + low;
+		return remappedIndices;
 
-		},
+	},
 
-		getFloat32: function () {
+	// NOTE: currently ignoring poly indices and assuming that they are intelligently ordered
+	parseGroups( geometry, geoData ) {
 
-			var value = this.dv.getFloat32( this.offset, false );
-			this.offset += 4;
-			return value;
+		var tags = lwoTree.tags;
+		var matNames = [];
 
-		},
+		var elemSize = 3;
+		if ( geoData.type === 'lines' ) elemSize = 2;
+		if ( geoData.type === 'points' ) elemSize = 1;
 
-		getFloat32Array: function ( size ) {
+		var remappedIndices = this.splitMaterialIndices( geoData.polygonDimensions, geoData.materialIndices );
 
-			var a = [];
+		var indexNum = 0; // create new indices in numerical order
+		var indexPairs = {}; // original indices mapped to numerical indices
 
-			for ( var i = 0; i < size; i ++ ) {
+		var prevMaterialIndex;
 
-				a.push( this.getFloat32() );
+		var prevStart = 0;
+		var currentCount = 0;
 
-			}
+		for ( var i = 0; i < remappedIndices.length; i += 2 ) {
 
-			return a;
+			var materialIndex = remappedIndices[ i + 1 ];
 
-		},
+			if ( i === 0 ) matNames[ indexNum ] = tags[ materialIndex ];
 
-		getFloat64: function () {
+			if ( prevMaterialIndex === undefined ) prevMaterialIndex = materialIndex;
 
-			var value = this.dv.getFloat64( this.offset, this.littleEndian );
-			this.offset += 8;
-			return value;
+			if ( materialIndex !== prevMaterialIndex ) {
 
-		},
+				var currentIndex;
+				if ( indexPairs[ tags[ prevMaterialIndex ] ] ) {
 
-		getFloat64Array: function ( size ) {
+					currentIndex = indexPairs[ tags[ prevMaterialIndex ] ];
 
-			var a = [];
+				} else {
 
-			for ( var i = 0; i < size; i ++ ) {
+					currentIndex = indexNum;
+					indexPairs[ tags[ prevMaterialIndex ] ] = indexNum;
+					matNames[ indexNum ] = tags[ prevMaterialIndex ];
+					indexNum ++;
 
-				a.push( this.getFloat64() );
+				}
+
+				geometry.addGroup( prevStart, currentCount, currentIndex );
+
+				prevStart += currentCount;
+
+				prevMaterialIndex = materialIndex;
+				currentCount = 0;
 
 			}
 
-			return a;
+			currentCount += elemSize;
+
+		}
 
-		},
+		// the loop above doesn't add the last group, do that here.
+		if ( geometry.groups.length > 0 ) {
 
-		// get variable-length index data type
-		// VX ::= index[U2] | (index + 0xFF000000)[U4]
-		// If the index value is less than 65,280 (0xFF00),then VX === U2
-		// otherwise VX === U4 with bits 24-31 set
-		// When reading an index, if the first byte encountered is 255 (0xFF), then
-		// the four-byte form is being used and the first byte should be discarded or masked out.
-		getVariableLengthIndex() {
+			var currentIndex;
+			if ( indexPairs[ tags[ materialIndex ] ] ) {
 
-			var firstByte = this.getUint8();
+				currentIndex = indexPairs[ tags[ materialIndex ] ];
 
-			if ( firstByte === 255 ) {
+			} else {
 
-				return this.getUint8() * 65536 + this.getUint8() * 256 + this.getUint8();
+				currentIndex = indexNum;
+				indexPairs[ tags[ materialIndex ] ] = indexNum;
+				matNames[ indexNum ] = tags[ materialIndex ];
 
 			}
 
-			return firstByte * 256 + this.getUint8();
+			geometry.addGroup( prevStart, currentCount, currentIndex );
+
+		}
+
+		// Mat names from TAGS chunk, used to build up an array of materials for this geometry
+		geometry.userData.matNames = matNames;
+
+	},
 
-		},
+	splitMaterialIndices( polygonDimensions, indices ) {
 
-		// An ID tag is a sequence of 4 bytes containing 7-bit ASCII values
-		getIDTag() {
+		var remappedIndices = [];
 
-			return this.getString( 4 );
+		polygonDimensions.forEach( function ( dim, i ) {
 
-		},
+			if ( dim <= 3 ) {
 
-		getString: function ( size ) {
+				remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
-			if ( size === 0 ) return;
+			} else if ( dim === 4 ) {
 
-			// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
-			var a = [];
+				remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ], indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
-			if ( size ) {
+			} else {
 
-				for ( var i = 0; i < size; i ++ ) {
+				 // ignore > 4 for now
+				for ( var k = 0; k < dim - 2; k ++ ) {
 
-					a[ i ] = this.getUint8();
+					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
 				}
 
-			} else {
+			}
 
-				var currentChar;
-				var len = 0;
+		} );
 
-				while ( currentChar !== 0 ) {
+		return remappedIndices;
 
-					currentChar = this.getUint8();
-					if ( currentChar !== 0 ) a.push( currentChar );
-					len ++;
+	},
 
-				}
+	// UV maps:
+	// 1: are defined via index into an array of points, not into a geometry
+	// - the geometry is also defined by an index into this array, but the indexes may not match
+	// 2: there can be any number of UV maps for a single geometry. Here these are combined,
+	// 	with preference given to the first map encountered
+	// 3: UV maps can be partial - that is, defined for only a part of the geometry
+	// 4: UV maps can be VMAP or VMAD (discontinuous, to allow for seams). In practice, most
+	// UV maps are defined as partially VMAP and partially VMAD
+	// VMADs are currently not supported
+	parseUVs( geometry, layer ) {
 
-				if ( ! isEven( len + 1 ) ) this.getUint8(); // if string with terminating nullbyte is uneven, extra nullbyte is added
+		// start by creating a UV map set to zero for the whole geometry
+		var remappedUVs = Array.from( Array( geometry.attributes.position.count * 2 ), function () {
 
-			}
+			return 0;
 
-			return THREE.LoaderUtils.decodeText( new Uint8Array( a ) );
+		} );
 
-		},
+		for ( var name in layer.uvs ) {
 
-		getStringArray: function ( size ) {
+			var uvs = layer.uvs[ name ].uvs;
+			var uvIndices = layer.uvs[ name ].uvIndices;
 
-			var a = this.getString( size );
-			a = a.split( '\0' );
+			uvIndices.forEach( function ( i, j ) {
 
-			return a.filter( Boolean ); // return array with any empty strings removed
+				remappedUVs[ i * 2 ] = uvs[ j * 2 ];
+				remappedUVs[ i * 2 + 1 ] = uvs[ j * 2 + 1 ];
+
+			} );
 
 		}
 
-	};
+		geometry.addAttribute( 'uv', new THREE.Float32BufferAttribute( remappedUVs, 2 ) );
 
-	// ************** UTILITY FUNCTIONS **************
+	},
 
-	function isEven( num ) {
+	parseMorphTargets( geometry, layer ) {
 
-		return num % 2;
+		var num = 0;
+		for ( var name in layer.morphTargets ) {
 
-	}
+			var remappedPoints = geometry.attributes.position.array.slice();
 
-	// calculate the length of the string in the buffer
-	// this will be string.length + nullbyte + optional padbyte to make the length even
-	function stringOffset( string ) {
+			if ( ! geometry.morphAttributes.position ) geometry.morphAttributes.position = [];
 
-		return string.length + 1 + ( isEven( string.length + 1 ) ? 1 : 0 );
+			var morphPoints = layer.morphTargets[ name ].points;
+			var morphIndices = layer.morphTargets[ name ].indices;
+			var type = layer.morphTargets[ name ].type;
 
-	}
+			morphIndices.forEach( function ( i, j ) {
 
-	// for testing purposes, dump buffer to console
-	// printBuffer( this.reader.dv.buffer, this.reader.offset, length );
-	function printBuffer( buffer, from, to ) {
+				if ( type === 'relative' ) {
 
-		console.log( THREE.LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) ) );
+					remappedPoints[ i * 3 ] += morphPoints[ j * 3 ];
+					remappedPoints[ i * 3 + 1 ] += morphPoints[ j * 3 + 1 ];
+					remappedPoints[ i * 3 + 2 ] += morphPoints[ j * 3 + 2 ];
 
-	}
+				} else {
+
+					remappedPoints[ i * 3 ] = morphPoints[ j * 3 ];
+					remappedPoints[ i * 3 + 1 ] = morphPoints[ j * 3 + 1 ];
+					remappedPoints[ i * 3 + 2 ] = morphPoints[ j * 3 + 2 ];
+
+				}
+
+			} );
+
+			geometry.morphAttributes.position[ num ] = new THREE.Float32BufferAttribute( remappedPoints, 3 );
+			geometry.morphAttributes.position[ num ].name = name;
+
+			num ++;
+
+		}
+
+	},
+
+};
+
+
+// ************** UTILITY FUNCTIONS **************
+
+function extractParentUrl( url, dir ) {
+
+	var index = url.indexOf( dir );
+
+	if ( index === - 1 ) return './';
 
-	return LWOLoader;
+	return url.substr( 0, index );
 
-} )();
+}

+ 0 - 0
examples/js/ConvexObjectBreaker.js → examples/js/misc/ConvexObjectBreaker.js


+ 0 - 0
examples/js/GPUComputationRenderer.js → examples/js/misc/GPUComputationRenderer.js


+ 12 - 8
examples/js/Ocean.js → examples/js/misc/Ocean.js

@@ -1,3 +1,7 @@
+/*
+	three.js Ocean
+*/
+
 THREE.Ocean = function ( renderer, camera, scene, options ) {
 
 	// flag used to trigger parameter changes
@@ -87,10 +91,10 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	////////////////////////////////////////
 
 	// 0 - The vertex shader used in all of the simulation steps
-	var fullscreeenVertexShader = THREE.ShaderLib[ "ocean_sim_vertex" ];
+	var fullscreeenVertexShader = THREE.OceanShaders[ "ocean_sim_vertex" ];
 
 	// 1 - Horizontal wave vertices used for FFT
-	var oceanHorizontalShader = THREE.ShaderLib[ "ocean_subtransform" ];
+	var oceanHorizontalShader = THREE.OceanShaders[ "ocean_subtransform" ];
 	var oceanHorizontalUniforms = THREE.UniformsUtils.clone( oceanHorizontalShader.uniforms );
 	this.materialOceanHorizontal = new THREE.ShaderMaterial( {
 		uniforms: oceanHorizontalUniforms,
@@ -103,7 +107,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialOceanHorizontal.depthTest = false;
 
 	// 2 - Vertical wave vertices used for FFT
-	var oceanVerticalShader = THREE.ShaderLib[ "ocean_subtransform" ];
+	var oceanVerticalShader = THREE.OceanShaders[ "ocean_subtransform" ];
 	var oceanVerticalUniforms = THREE.UniformsUtils.clone( oceanVerticalShader.uniforms );
 	this.materialOceanVertical = new THREE.ShaderMaterial( {
 		uniforms: oceanVerticalUniforms,
@@ -116,7 +120,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialOceanVertical.depthTest = false;
 
 	// 3 - Initial spectrum used to generate height map
-	var initialSpectrumShader = THREE.ShaderLib[ "ocean_initial_spectrum" ];
+	var initialSpectrumShader = THREE.OceanShaders[ "ocean_initial_spectrum" ];
 	var initialSpectrumUniforms = THREE.UniformsUtils.clone( initialSpectrumShader.uniforms );
 	this.materialInitialSpectrum = new THREE.ShaderMaterial( {
 		uniforms: initialSpectrumUniforms,
@@ -128,7 +132,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialInitialSpectrum.depthTest = false;
 
 	// 4 - Phases used to animate heightmap
-	var phaseShader = THREE.ShaderLib[ "ocean_phase" ];
+	var phaseShader = THREE.OceanShaders[ "ocean_phase" ];
 	var phaseUniforms = THREE.UniformsUtils.clone( phaseShader.uniforms );
 	this.materialPhase = new THREE.ShaderMaterial( {
 		uniforms: phaseUniforms,
@@ -139,7 +143,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialPhase.depthTest = false;
 
 	// 5 - Shader used to update spectrum
-	var spectrumShader = THREE.ShaderLib[ "ocean_spectrum" ];
+	var spectrumShader = THREE.OceanShaders[ "ocean_spectrum" ];
 	var spectrumUniforms = THREE.UniformsUtils.clone( spectrumShader.uniforms );
 	this.materialSpectrum = new THREE.ShaderMaterial( {
 		uniforms: spectrumUniforms,
@@ -151,7 +155,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialSpectrum.depthTest = false;
 
 	// 6 - Shader used to update spectrum normals
-	var normalShader = THREE.ShaderLib[ "ocean_normals" ];
+	var normalShader = THREE.OceanShaders[ "ocean_normals" ];
 	var normalUniforms = THREE.UniformsUtils.clone( normalShader.uniforms );
 	this.materialNormal = new THREE.ShaderMaterial( {
 		uniforms: normalUniforms,
@@ -163,7 +167,7 @@ THREE.Ocean = function ( renderer, camera, scene, options ) {
 	this.materialNormal.depthTest = false;
 
 	// 7 - Shader used to update normals
-	var oceanShader = THREE.ShaderLib[ "ocean_main" ];
+	var oceanShader = THREE.OceanShaders[ "ocean_main" ];
 	var oceanUniforms = THREE.UniformsUtils.clone( oceanShader.uniforms );
 	this.materialOcean = new THREE.ShaderMaterial( {
 		uniforms: oceanUniforms,

+ 10 - 9
examples/js/shaders/OceanShaders.js

@@ -1,4 +1,4 @@
-// Author: Aleksandr Albert
+/* Author: Aleksandr Albert
 // Website: www.routter.co.tt
 
 // Description: A deep water ocean shader set
@@ -20,9 +20,10 @@
 
 // -- Rendering Shader
 // [7] ocean_main               -> Vertex and Fragment shader used to create the final render
+*/
 
-
-THREE.ShaderLib[ 'ocean_sim_vertex' ] = {
+THREE.OceanShaders = {}
+THREE.OceanShaders[ 'ocean_sim_vertex' ] = {
 	vertexShader: [
 		'varying vec2 vUV;',
 
@@ -32,7 +33,7 @@ THREE.ShaderLib[ 'ocean_sim_vertex' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_subtransform' ] = {
+THREE.OceanShaders[ 'ocean_subtransform' ] = {
 	uniforms: {
 		"u_input": { value: null },
 		"u_transformSize": { value: 512.0 },
@@ -82,7 +83,7 @@ THREE.ShaderLib[ 'ocean_subtransform' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_initial_spectrum' ] = {
+THREE.OceanShaders[ 'ocean_initial_spectrum' ] = {
 	uniforms: {
 		"u_wind": { value: new THREE.Vector2( 10.0, 10.0 ) },
 		"u_resolution": { value: 512.0 },
@@ -163,7 +164,7 @@ THREE.ShaderLib[ 'ocean_initial_spectrum' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_phase' ] = {
+THREE.OceanShaders[ 'ocean_phase' ] = {
 	uniforms: {
 		"u_phases": { value: null },
 		"u_deltaTime": { value: null },
@@ -203,7 +204,7 @@ THREE.ShaderLib[ 'ocean_phase' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_spectrum' ] = {
+THREE.OceanShaders[ 'ocean_spectrum' ] = {
 	uniforms: {
 		"u_size": { value: null },
 		"u_resolution": { value: null },
@@ -267,7 +268,7 @@ THREE.ShaderLib[ 'ocean_spectrum' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_normals' ] = {
+THREE.OceanShaders[ 'ocean_normals' ] = {
 	uniforms: {
 		"u_displacementMap": { value: null },
 		"u_resolution": { value: null },
@@ -301,7 +302,7 @@ THREE.ShaderLib[ 'ocean_normals' ] = {
 		'}'
 	].join( '\n' )
 };
-THREE.ShaderLib[ 'ocean_main' ] = {
+THREE.OceanShaders[ 'ocean_main' ] = {
 	uniforms: {
 		"u_displacementMap": { value: null },
 		"u_normalMap": { value: null },

+ 311 - 311
examples/js/shaders/VolumeShader.js

@@ -8,317 +8,317 @@
 
 THREE.VolumeRenderShader1 = {
 	uniforms: {
-        "u_size": { value: new THREE.Vector3( 1, 1, 1 ) },
-        "u_renderstyle": { value: 0 },
-        "u_renderthreshold": { value: 0.5 },
-        "u_clim": { value: new THREE.Vector2( 1, 1 ) },
-        "u_data": { value: null },
-        "u_cmdata": { value: null }
-    },
-    vertexShader: [
-        'varying vec4 v_nearpos;',
-        'varying vec4 v_farpos;',
-        'varying vec3 v_position;',
-
-        'mat4 inversemat(mat4 m) {',
-            // Taken from https://github.com/stackgl/glsl-inverse/blob/master/index.glsl
-            // This function is licenced by the MIT license to Mikola Lysenko
-            'float',
-            'a00 = m[0][0], a01 = m[0][1], a02 = m[0][2], a03 = m[0][3],',
-            'a10 = m[1][0], a11 = m[1][1], a12 = m[1][2], a13 = m[1][3],',
-            'a20 = m[2][0], a21 = m[2][1], a22 = m[2][2], a23 = m[2][3],',
-            'a30 = m[3][0], a31 = m[3][1], a32 = m[3][2], a33 = m[3][3],',
-
-            'b00 = a00 * a11 - a01 * a10,',
-            'b01 = a00 * a12 - a02 * a10,',
-            'b02 = a00 * a13 - a03 * a10,',
-            'b03 = a01 * a12 - a02 * a11,',
-            'b04 = a01 * a13 - a03 * a11,',
-            'b05 = a02 * a13 - a03 * a12,',
-            'b06 = a20 * a31 - a21 * a30,',
-            'b07 = a20 * a32 - a22 * a30,',
-            'b08 = a20 * a33 - a23 * a30,',
-            'b09 = a21 * a32 - a22 * a31,',
-            'b10 = a21 * a33 - a23 * a31,',
-            'b11 = a22 * a33 - a23 * a32,',
-
-            'det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;',
-
-        'return mat4(',
-            'a11 * b11 - a12 * b10 + a13 * b09,',
-            'a02 * b10 - a01 * b11 - a03 * b09,',
-            'a31 * b05 - a32 * b04 + a33 * b03,',
-            'a22 * b04 - a21 * b05 - a23 * b03,',
-            'a12 * b08 - a10 * b11 - a13 * b07,',
-            'a00 * b11 - a02 * b08 + a03 * b07,',
-            'a32 * b02 - a30 * b05 - a33 * b01,',
-            'a20 * b05 - a22 * b02 + a23 * b01,',
-            'a10 * b10 - a11 * b08 + a13 * b06,',
-            'a01 * b08 - a00 * b10 - a03 * b06,',
-            'a30 * b04 - a31 * b02 + a33 * b00,',
-            'a21 * b02 - a20 * b04 - a23 * b00,',
-            'a11 * b07 - a10 * b09 - a12 * b06,',
-            'a00 * b09 - a01 * b07 + a02 * b06,',
-            'a31 * b01 - a30 * b03 - a32 * b00,',
-            'a20 * b03 - a21 * b01 + a22 * b00) / det;',
-        '}',
-
-
-        'void main() {',
-            // Prepare transforms to map to "camera view". See also:
-            // https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
-            'mat4 viewtransformf = viewMatrix;',
-            'mat4 viewtransformi = inversemat(viewMatrix);',
-
-            // Project local vertex coordinate to camera position. Then do a step
-            // backward (in cam coords) to the near clipping plane, and project back. Do
-            // the same for the far clipping plane. This gives us all the information we
-            // need to calculate the ray and truncate it to the viewing cone.
-            'vec4 position4 = vec4(position, 1.0);',
-            'vec4 pos_in_cam = viewtransformf * position4;',
-
-            // Intersection of ray and near clipping plane (z = -1 in clip coords)
-            'pos_in_cam.z = -pos_in_cam.w;',
-            'v_nearpos = viewtransformi * pos_in_cam;',
-
-            // Intersection of ray and far clipping plane (z = +1 in clip coords)
-            'pos_in_cam.z = pos_in_cam.w;',
-            'v_farpos = viewtransformi * pos_in_cam;',
-
-            // Set varyings and output pos
-            'v_position = position;',
-            'gl_Position = projectionMatrix * viewMatrix * modelMatrix * position4;',
-        '}',
-    ].join( '\n' ),
+				"u_size": { value: new THREE.Vector3( 1, 1, 1 ) },
+				"u_renderstyle": { value: 0 },
+				"u_renderthreshold": { value: 0.5 },
+				"u_clim": { value: new THREE.Vector2( 1, 1 ) },
+				"u_data": { value: null },
+				"u_cmdata": { value: null }
+		},
+		vertexShader: [
+				'varying vec4 v_nearpos;',
+				'varying vec4 v_farpos;',
+				'varying vec3 v_position;',
+
+				'mat4 inversemat(mat4 m) {',
+						// Taken from https://github.com/stackgl/glsl-inverse/blob/master/index.glsl
+						// This function is licenced by the MIT license to Mikola Lysenko
+						'float',
+						'a00 = m[0][0], a01 = m[0][1], a02 = m[0][2], a03 = m[0][3],',
+						'a10 = m[1][0], a11 = m[1][1], a12 = m[1][2], a13 = m[1][3],',
+						'a20 = m[2][0], a21 = m[2][1], a22 = m[2][2], a23 = m[2][3],',
+						'a30 = m[3][0], a31 = m[3][1], a32 = m[3][2], a33 = m[3][3],',
+
+						'b00 = a00 * a11 - a01 * a10,',
+						'b01 = a00 * a12 - a02 * a10,',
+						'b02 = a00 * a13 - a03 * a10,',
+						'b03 = a01 * a12 - a02 * a11,',
+						'b04 = a01 * a13 - a03 * a11,',
+						'b05 = a02 * a13 - a03 * a12,',
+						'b06 = a20 * a31 - a21 * a30,',
+						'b07 = a20 * a32 - a22 * a30,',
+						'b08 = a20 * a33 - a23 * a30,',
+						'b09 = a21 * a32 - a22 * a31,',
+						'b10 = a21 * a33 - a23 * a31,',
+						'b11 = a22 * a33 - a23 * a32,',
+
+						'det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;',
+
+				'return mat4(',
+						'a11 * b11 - a12 * b10 + a13 * b09,',
+						'a02 * b10 - a01 * b11 - a03 * b09,',
+						'a31 * b05 - a32 * b04 + a33 * b03,',
+						'a22 * b04 - a21 * b05 - a23 * b03,',
+						'a12 * b08 - a10 * b11 - a13 * b07,',
+						'a00 * b11 - a02 * b08 + a03 * b07,',
+						'a32 * b02 - a30 * b05 - a33 * b01,',
+						'a20 * b05 - a22 * b02 + a23 * b01,',
+						'a10 * b10 - a11 * b08 + a13 * b06,',
+						'a01 * b08 - a00 * b10 - a03 * b06,',
+						'a30 * b04 - a31 * b02 + a33 * b00,',
+						'a21 * b02 - a20 * b04 - a23 * b00,',
+						'a11 * b07 - a10 * b09 - a12 * b06,',
+						'a00 * b09 - a01 * b07 + a02 * b06,',
+						'a31 * b01 - a30 * b03 - a32 * b00,',
+						'a20 * b03 - a21 * b01 + a22 * b00) / det;',
+				'}',
+
+
+				'void main() {',
+						// Prepare transforms to map to "camera view". See also:
+						// https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
+						'mat4 viewtransformf = viewMatrix;',
+						'mat4 viewtransformi = inversemat(viewMatrix);',
+
+						// Project local vertex coordinate to camera position. Then do a step
+						// backward (in cam coords) to the near clipping plane, and project back. Do
+						// the same for the far clipping plane. This gives us all the information we
+						// need to calculate the ray and truncate it to the viewing cone.
+						'vec4 position4 = vec4(position, 1.0);',
+						'vec4 pos_in_cam = viewtransformf * position4;',
+
+						// Intersection of ray and near clipping plane (z = -1 in clip coords)
+						'pos_in_cam.z = -pos_in_cam.w;',
+						'v_nearpos = viewtransformi * pos_in_cam;',
+
+						// Intersection of ray and far clipping plane (z = +1 in clip coords)
+						'pos_in_cam.z = pos_in_cam.w;',
+						'v_farpos = viewtransformi * pos_in_cam;',
+
+						// Set varyings and output pos
+						'v_position = position;',
+						'gl_Position = projectionMatrix * viewMatrix * modelMatrix * position4;',
+				'}',
+		].join( '\n' ),
 	fragmentShader: [
-        'precision highp float;',
-        'precision mediump sampler3D;',
-
-        'uniform vec3 u_size;',
-        'uniform int u_renderstyle;',
-        'uniform float u_renderthreshold;',
-        'uniform vec2 u_clim;',
-
-        'uniform sampler3D u_data;',
-        'uniform sampler2D u_cmdata;',
-
-        'varying vec3 v_position;',
-        'varying vec4 v_nearpos;',
-        'varying vec4 v_farpos;',
-
-        // The maximum distance through our rendering volume is sqrt(3).
-        'const int MAX_STEPS = 887;  // 887 for 512^3, 1774 for 1024^3',
-        'const int REFINEMENT_STEPS = 4;',
-        'const float relative_step_size = 1.0;',
-        'const vec4 ambient_color = vec4(0.2, 0.4, 0.2, 1.0);',
-        'const vec4 diffuse_color = vec4(0.8, 0.2, 0.2, 1.0);',
-        'const vec4 specular_color = vec4(1.0, 1.0, 1.0, 1.0);',
-        'const float shininess = 40.0;',
-
-        'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
-        'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
-
-        'float sample1(vec3 texcoords);',
-        'vec4 apply_colormap(float val);',
-        'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray);',
-
-
-        'void main() {',
-            // Normalize clipping plane info
-            'vec3 farpos = v_farpos.xyz / v_farpos.w;',
-            'vec3 nearpos = v_nearpos.xyz / v_nearpos.w;',
-
-            // Calculate unit vector pointing in the view direction through this fragment.
-            'vec3 view_ray = normalize(nearpos.xyz - farpos.xyz);',
-
-            // Compute the (negative) distance to the front surface or near clipping plane.
-            // v_position is the back face of the cuboid, so the initial distance calculated in the dot
-            // product below is the distance from near clip plane to the back of the cuboid
-            'float distance = dot(nearpos - v_position, view_ray);',
-            'distance = max(distance, min((-0.5 - v_position.x) / view_ray.x,',
-                                        '(u_size.x - 0.5 - v_position.x) / view_ray.x));',
-            'distance = max(distance, min((-0.5 - v_position.y) / view_ray.y,',
-                                        '(u_size.y - 0.5 - v_position.y) / view_ray.y));',
-            'distance = max(distance, min((-0.5 - v_position.z) / view_ray.z,',
-                                        '(u_size.z - 0.5 - v_position.z) / view_ray.z));',
-
-                                        // Now we have the starting position on the front surface
-            'vec3 front = v_position + view_ray * distance;',
-
-            // Decide how many steps to take
-            'int nsteps = int(-distance / relative_step_size + 0.5);',
-            'if ( nsteps < 1 )',
-                'discard;',
-
-            // Get starting location and step vector in texture coordinates
-            'vec3 step = ((v_position - front) / u_size) / float(nsteps);',
-            'vec3 start_loc = front / u_size;',
-
-            // For testing: show the number of steps. This helps to establish
-            // whether the rays are correctly oriented
-            //'gl_FragColor = vec4(0.0, float(nsteps) / 1.0 / u_size.x, 1.0, 1.0);',
-            //'return;',
-
-            'if (u_renderstyle == 0)',
-                'cast_mip(start_loc, step, nsteps, view_ray);',
-            'else if (u_renderstyle == 1)',
-                'cast_iso(start_loc, step, nsteps, view_ray);',
-
-            'if (gl_FragColor.a < 0.05)',
-                'discard;',
-        '}',
-
-
-        'float sample1(vec3 texcoords) {',
-            '/* Sample float value from a 3D texture. Assumes intensity data. */',
-            'return texture(u_data, texcoords.xyz).r;',
-        '}',
-
-
-        'vec4 apply_colormap(float val) {',
-            'val = (val - u_clim[0]) / (u_clim[1] - u_clim[0]);',
-            'return texture2D(u_cmdata, vec2(val, 0.5));',
-        '}',
-
-
-        'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
-
-            'float max_val = -1e6;',
-            'int max_i = 100;',
-            'vec3 loc = start_loc;',
-
-            // Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
-            // non-constant expression. So we use a hard-coded max, and an additional condition
-            // inside the loop.
-            'for (int iter=0; iter<MAX_STEPS; iter++) {',
-                'if (iter >= nsteps)',
-                    'break;',
-                // Sample from the 3D texture
-                'float val = sample1(loc);',
-                // Apply MIP operation
-                'if (val > max_val) {',
-                    'max_val = val;',
-                    'max_i = iter;',
-                '}',
-                // Advance location deeper into the volume
-                'loc += step;',
-            '}',
-
-            // Refine location, gives crispier images
-            'vec3 iloc = start_loc + step * (float(max_i) - 0.5);',
-            'vec3 istep = step / float(REFINEMENT_STEPS);',
-            'for (int i=0; i<REFINEMENT_STEPS; i++) {',
-                'max_val = max(max_val, sample1(iloc));',
-                'iloc += istep;',
-            '}',
-
-            // Resolve final color
-            'gl_FragColor = apply_colormap(max_val);',
-        '}',
-
-
-        'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
-
-            'gl_FragColor = vec4(0.0);  // init transparent',
-            'vec4 color3 = vec4(0.0);  // final color',
-            'vec3 dstep = 1.5 / u_size;  // step to sample derivative',
-            'vec3 loc = start_loc;',
-
-            'float low_threshold = u_renderthreshold - 0.02 * (u_clim[1] - u_clim[0]);',
-
-            // Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
-            // non-constant expression. So we use a hard-coded max, and an additional condition
-            // inside the loop.
-            'for (int iter=0; iter<MAX_STEPS; iter++) {',
-                'if (iter >= nsteps)',
-                    'break;',
-
-                    // Sample from the 3D texture
-                'float val = sample1(loc);',
-
-                'if (val > low_threshold) {',
-                // Take the last interval in smaller steps
-                    'vec3 iloc = loc - 0.5 * step;',
-                    'vec3 istep = step / float(REFINEMENT_STEPS);',
-                    'for (int i=0; i<REFINEMENT_STEPS; i++) {',
-                        'val = sample1(iloc);',
-                        'if (val > u_renderthreshold) {',
-                            'gl_FragColor = add_lighting(val, iloc, dstep, view_ray);',
-                            'return;',
-                        '}',
-                        'iloc += istep;',
-                    '}',
-                '}',
-
-                // Advance location deeper into the volume
-                'loc += step;',
-            '}',
-        '}',
-
-
-        'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray)',
-        '{',
-            // Calculate color by incorporating lighting
-
-            // View direction
-            'vec3 V = normalize(view_ray);',
-
-            // calculate normal vector from gradient
-            'vec3 N;',
-            'float val1, val2;',
-            'val1 = sample1(loc + vec3(-step[0], 0.0, 0.0));',
-            'val2 = sample1(loc + vec3(+step[0], 0.0, 0.0));',
-            'N[0] = val1 - val2;',
-            'val = max(max(val1, val2), val);',
-            'val1 = sample1(loc + vec3(0.0, -step[1], 0.0));',
-            'val2 = sample1(loc + vec3(0.0, +step[1], 0.0));',
-            'N[1] = val1 - val2;',
-            'val = max(max(val1, val2), val);',
-            'val1 = sample1(loc + vec3(0.0, 0.0, -step[2]));',
-            'val2 = sample1(loc + vec3(0.0, 0.0, +step[2]));',
-            'N[2] = val1 - val2;',
-            'val = max(max(val1, val2), val);',
-
-            'float gm = length(N); // gradient magnitude',
-            'N = normalize(N);',
-
-            // Flip normal so it points towards viewer
-            'float Nselect = float(dot(N, V) > 0.0);',
-            'N = (2.0 * Nselect - 1.0) * N;  // ==  Nselect * N - (1.0-Nselect)*N;',
-
-            // Init colors
-            'vec4 ambient_color = vec4(0.0, 0.0, 0.0, 0.0);',
-            'vec4 diffuse_color = vec4(0.0, 0.0, 0.0, 0.0);',
-            'vec4 specular_color = vec4(0.0, 0.0, 0.0, 0.0);',
-
-            // note: could allow multiple lights
-            'for (int i=0; i<1; i++)',
-            '{',
-                 // Get light direction (make sure to prevent zero devision)
-                'vec3 L = normalize(view_ray);  //lightDirs[i];',
-                'float lightEnabled = float( length(L) > 0.0 );',
-                'L = normalize(L + (1.0 - lightEnabled));',
-
-                // Calculate lighting properties
-                'float lambertTerm = clamp(dot(N, L), 0.0, 1.0);',
-                'vec3 H = normalize(L+V); // Halfway vector',
-                'float specularTerm = pow(max(dot(H, N), 0.0), shininess);',
-
-                // Calculate mask
-                'float mask1 = lightEnabled;',
-
-                // Calculate colors
-                'ambient_color +=  mask1 * ambient_color;  // * gl_LightSource[i].ambient;',
-                'diffuse_color +=  mask1 * lambertTerm;',
-                'specular_color += mask1 * specularTerm * specular_color;',
-            '}',
-
-            // Calculate final color by componing different components
-            'vec4 final_color;',
-            'vec4 color = apply_colormap(val);',
-            'final_color = color * (ambient_color + diffuse_color) + specular_color;',
-            'final_color.a = color.a;',
-            'return final_color;',
-        '}',
+				'precision highp float;',
+				'precision mediump sampler3D;',
+
+				'uniform vec3 u_size;',
+				'uniform int u_renderstyle;',
+				'uniform float u_renderthreshold;',
+				'uniform vec2 u_clim;',
+
+				'uniform sampler3D u_data;',
+				'uniform sampler2D u_cmdata;',
+
+				'varying vec3 v_position;',
+				'varying vec4 v_nearpos;',
+				'varying vec4 v_farpos;',
+
+				// The maximum distance through our rendering volume is sqrt(3).
+				'const int MAX_STEPS = 887;	// 887 for 512^3, 1774 for 1024^3',
+				'const int REFINEMENT_STEPS = 4;',
+				'const float relative_step_size = 1.0;',
+				'const vec4 ambient_color = vec4(0.2, 0.4, 0.2, 1.0);',
+				'const vec4 diffuse_color = vec4(0.8, 0.2, 0.2, 1.0);',
+				'const vec4 specular_color = vec4(1.0, 1.0, 1.0, 1.0);',
+				'const float shininess = 40.0;',
+
+				'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+				'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+
+				'float sample1(vec3 texcoords);',
+				'vec4 apply_colormap(float val);',
+				'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray);',
+
+
+				'void main() {',
+						// Normalize clipping plane info
+						'vec3 farpos = v_farpos.xyz / v_farpos.w;',
+						'vec3 nearpos = v_nearpos.xyz / v_nearpos.w;',
+
+						// Calculate unit vector pointing in the view direction through this fragment.
+						'vec3 view_ray = normalize(nearpos.xyz - farpos.xyz);',
+
+						// Compute the (negative) distance to the front surface or near clipping plane.
+						// v_position is the back face of the cuboid, so the initial distance calculated in the dot
+						// product below is the distance from near clip plane to the back of the cuboid
+						'float distance = dot(nearpos - v_position, view_ray);',
+						'distance = max(distance, min((-0.5 - v_position.x) / view_ray.x,',
+																				'(u_size.x - 0.5 - v_position.x) / view_ray.x));',
+						'distance = max(distance, min((-0.5 - v_position.y) / view_ray.y,',
+																				'(u_size.y - 0.5 - v_position.y) / view_ray.y));',
+						'distance = max(distance, min((-0.5 - v_position.z) / view_ray.z,',
+																				'(u_size.z - 0.5 - v_position.z) / view_ray.z));',
+
+																				// Now we have the starting position on the front surface
+						'vec3 front = v_position + view_ray * distance;',
+
+						// Decide how many steps to take
+						'int nsteps = int(-distance / relative_step_size + 0.5);',
+						'if ( nsteps < 1 )',
+								'discard;',
+
+						// Get starting location and step vector in texture coordinates
+						'vec3 step = ((v_position - front) / u_size) / float(nsteps);',
+						'vec3 start_loc = front / u_size;',
+
+						// For testing: show the number of steps. This helps to establish
+						// whether the rays are correctly oriented
+						//'gl_FragColor = vec4(0.0, float(nsteps) / 1.0 / u_size.x, 1.0, 1.0);',
+						//'return;',
+
+						'if (u_renderstyle == 0)',
+								'cast_mip(start_loc, step, nsteps, view_ray);',
+						'else if (u_renderstyle == 1)',
+								'cast_iso(start_loc, step, nsteps, view_ray);',
+
+						'if (gl_FragColor.a < 0.05)',
+								'discard;',
+				'}',
+
+
+				'float sample1(vec3 texcoords) {',
+						'/* Sample float value from a 3D texture. Assumes intensity data. */',
+						'return texture(u_data, texcoords.xyz).r;',
+				'}',
+
+
+				'vec4 apply_colormap(float val) {',
+						'val = (val - u_clim[0]) / (u_clim[1] - u_clim[0]);',
+						'return texture2D(u_cmdata, vec2(val, 0.5));',
+				'}',
+
+
+				'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+						'float max_val = -1e6;',
+						'int max_i = 100;',
+						'vec3 loc = start_loc;',
+
+						// Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+						// non-constant expression. So we use a hard-coded max, and an additional condition
+						// inside the loop.
+						'for (int iter=0; iter<MAX_STEPS; iter++) {',
+								'if (iter >= nsteps)',
+										'break;',
+								// Sample from the 3D texture
+								'float val = sample1(loc);',
+								// Apply MIP operation
+								'if (val > max_val) {',
+										'max_val = val;',
+										'max_i = iter;',
+								'}',
+								// Advance location deeper into the volume
+								'loc += step;',
+						'}',
+
+						// Refine location, gives crispier images
+						'vec3 iloc = start_loc + step * (float(max_i) - 0.5);',
+						'vec3 istep = step / float(REFINEMENT_STEPS);',
+						'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+								'max_val = max(max_val, sample1(iloc));',
+								'iloc += istep;',
+						'}',
+
+						// Resolve final color
+						'gl_FragColor = apply_colormap(max_val);',
+				'}',
+
+
+				'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+						'gl_FragColor = vec4(0.0);	// init transparent',
+						'vec4 color3 = vec4(0.0);	// final color',
+						'vec3 dstep = 1.5 / u_size;	// step to sample derivative',
+						'vec3 loc = start_loc;',
+
+						'float low_threshold = u_renderthreshold - 0.02 * (u_clim[1] - u_clim[0]);',
+
+						// Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+						// non-constant expression. So we use a hard-coded max, and an additional condition
+						// inside the loop.
+						'for (int iter=0; iter<MAX_STEPS; iter++) {',
+								'if (iter >= nsteps)',
+										'break;',
+
+										// Sample from the 3D texture
+								'float val = sample1(loc);',
+
+								'if (val > low_threshold) {',
+								// Take the last interval in smaller steps
+										'vec3 iloc = loc - 0.5 * step;',
+										'vec3 istep = step / float(REFINEMENT_STEPS);',
+										'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+												'val = sample1(iloc);',
+												'if (val > u_renderthreshold) {',
+														'gl_FragColor = add_lighting(val, iloc, dstep, view_ray);',
+														'return;',
+												'}',
+												'iloc += istep;',
+										'}',
+								'}',
+
+								// Advance location deeper into the volume
+								'loc += step;',
+						'}',
+				'}',
+
+
+				'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray)',
+				'{',
+						// Calculate color by incorporating lighting
+
+						// View direction
+						'vec3 V = normalize(view_ray);',
+
+						// calculate normal vector from gradient
+						'vec3 N;',
+						'float val1, val2;',
+						'val1 = sample1(loc + vec3(-step[0], 0.0, 0.0));',
+						'val2 = sample1(loc + vec3(+step[0], 0.0, 0.0));',
+						'N[0] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+						'val1 = sample1(loc + vec3(0.0, -step[1], 0.0));',
+						'val2 = sample1(loc + vec3(0.0, +step[1], 0.0));',
+						'N[1] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+						'val1 = sample1(loc + vec3(0.0, 0.0, -step[2]));',
+						'val2 = sample1(loc + vec3(0.0, 0.0, +step[2]));',
+						'N[2] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+
+						'float gm = length(N); // gradient magnitude',
+						'N = normalize(N);',
+
+						// Flip normal so it points towards viewer
+						'float Nselect = float(dot(N, V) > 0.0);',
+						'N = (2.0 * Nselect - 1.0) * N;	// ==	Nselect * N - (1.0-Nselect)*N;',
+
+						// Init colors
+						'vec4 ambient_color = vec4(0.0, 0.0, 0.0, 0.0);',
+						'vec4 diffuse_color = vec4(0.0, 0.0, 0.0, 0.0);',
+						'vec4 specular_color = vec4(0.0, 0.0, 0.0, 0.0);',
+
+						// note: could allow multiple lights
+						'for (int i=0; i<1; i++)',
+						'{',
+								 // Get light direction (make sure to prevent zero devision)
+								'vec3 L = normalize(view_ray);	//lightDirs[i];',
+								'float lightEnabled = float( length(L) > 0.0 );',
+								'L = normalize(L + (1.0 - lightEnabled));',
+
+								// Calculate lighting properties
+								'float lambertTerm = clamp(dot(N, L), 0.0, 1.0);',
+								'vec3 H = normalize(L+V); // Halfway vector',
+								'float specularTerm = pow(max(dot(H, N), 0.0), shininess);',
+
+								// Calculate mask
+								'float mask1 = lightEnabled;',
+
+								// Calculate colors
+								'ambient_color +=	mask1 * ambient_color;	// * gl_LightSource[i].ambient;',
+								'diffuse_color +=	mask1 * lambertTerm;',
+								'specular_color += mask1 * specularTerm * specular_color;',
+						'}',
+
+						// Calculate final color by componing different components
+						'vec4 final_color;',
+						'vec4 color = apply_colormap(val);',
+						'final_color = color * (ambient_color + diffuse_color) + specular_color;',
+						'final_color.a = color.a;',
+						'return final_color;',
+				'}',
 	].join( '\n' )
 };

+ 16 - 0
examples/jsm/animation/AnimationClipCreator.d.ts

@@ -0,0 +1,16 @@
+import {
+  AnimationClip,
+  Vector3
+} from '../../../src/Three';
+
+export class AnimationClipCreator  {
+  constructor();
+
+  static CreateRotationAnimation(period: number, axis: string): AnimationClip;
+  static CreateScaleAxisAnimation(period: number, axis: string): AnimationClip;
+  static CreateShakeAnimation(duration: number, shakeScale: Vector3): AnimationClip;
+  static CreatePulsationAnimation(duration: number, pulseScale: number): AnimationClip;
+  static CreateVisibilityAnimation(duration: number): AnimationClip;
+  static CreateMaterialColorAnimation(duration: number, colors: number[]): AnimationClip;
+
+}

+ 125 - 0
examples/jsm/animation/AnimationClipCreator.js

@@ -0,0 +1,125 @@
+/**
+ *
+ * Creator of typical test AnimationClips / KeyframeTracks
+ *
+ * @author Ben Houston / http://clara.io/
+ * @author David Sarno / http://lighthaus.us/
+ */
+
+import {
+	AnimationClip,
+	BooleanKeyframeTrack,
+	ColorKeyframeTrack,
+	NumberKeyframeTrack,
+	Vector3,
+	VectorKeyframeTrack
+} from "../../../build/three.module.js";
+
+var AnimationClipCreator = function () {};
+
+AnimationClipCreator.CreateRotationAnimation = function ( period, axis ) {
+
+	var times = [ 0, period ], values = [ 0, 360 ];
+
+	axis = axis || 'x';
+	var trackName = '.rotation[' + axis + ']';
+
+	var track = new NumberKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, period, [ track ] );
+
+};
+
+AnimationClipCreator.CreateScaleAxisAnimation = function ( period, axis ) {
+
+	var times = [ 0, period ], values = [ 0, 1 ];
+
+	axis = axis || 'x';
+	var trackName = '.scale[' + axis + ']';
+
+	var track = new NumberKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, period, [ track ] );
+
+};
+
+AnimationClipCreator.CreateShakeAnimation = function ( duration, shakeScale ) {
+
+	var times = [], values = [], tmp = new Vector3();
+
+	for ( var i = 0; i < duration * 10; i ++ ) {
+
+		times.push( i / 10 );
+
+		tmp.set( Math.random() * 2.0 - 1.0, Math.random() * 2.0 - 1.0, Math.random() * 2.0 - 1.0 ).
+			multiply( shakeScale ).
+			toArray( values, values.length );
+
+	}
+
+	var trackName = '.position';
+
+	var track = new VectorKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, duration, [ track ] );
+
+};
+
+
+AnimationClipCreator.CreatePulsationAnimation = function ( duration, pulseScale ) {
+
+	var times = [], values = [], tmp = new Vector3();
+
+	for ( var i = 0; i < duration * 10; i ++ ) {
+
+		times.push( i / 10 );
+
+		var scaleFactor = Math.random() * pulseScale;
+		tmp.set( scaleFactor, scaleFactor, scaleFactor ).
+			toArray( values, values.length );
+
+	}
+
+	var trackName = '.scale';
+
+	var track = new VectorKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, duration, [ track ] );
+
+};
+
+
+AnimationClipCreator.CreateVisibilityAnimation = function ( duration ) {
+
+	var times = [ 0, duration / 2, duration ], values = [ true, false, true ];
+
+	var trackName = '.visible';
+
+	var track = new BooleanKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, duration, [ track ] );
+
+};
+
+
+AnimationClipCreator.CreateMaterialColorAnimation = function ( duration, colors ) {
+
+	var times = [], values = [],
+		timeStep = duration / colors.length;
+
+	for ( var i = 0; i <= colors.length; i ++ ) {
+
+		times.push( i * timeStep );
+		values.push( colors[ i % colors.length ] );
+
+	}
+
+	var trackName = '.material[0].color';
+
+	var track = new ColorKeyframeTrack( trackName, times, values );
+
+	return new AnimationClip( null, duration, [ track ] );
+
+};
+
+export { AnimationClipCreator };

+ 20 - 0
examples/jsm/animation/TimelinerController.d.ts

@@ -0,0 +1,20 @@
+import {
+  AnimationClip,
+  Scene,
+  Vector3
+} from '../../../src/Three';
+
+export class TimelinerController  {
+  constructor(scene: Scene, trackInfo: object[], onUpdate: () => void);
+
+  delKeyframe(channelName: string, time: number): void;
+  deserialize(structs: object): void;
+  getChannelKeyTimes(): number[];
+  getChannelNames(): string[];
+  init(): void;
+  moveKeyframe(channelName: string, time: number, delta: number, moveRemaining: boolean): void;
+  serialize(): object;
+  setDisplayTime(time: number): void;
+  setDuration(duration: number): void;
+  setKeyframe(channelName: string, time: number): void;
+}

+ 283 - 0
examples/jsm/animation/TimelinerController.js

@@ -0,0 +1,283 @@
+/**
+ * Controller class for the Timeliner GUI.
+ *
+ * Timeliner GUI library (required to use this class):
+ *
+ * 		../libs/timeliner_gui.min.js
+ *
+ * Source code:
+ *
+ * 		https://github.com/tschw/timeliner_gui
+ * 		https://github.com/zz85/timeliner (fork's origin)
+ *
+ * @author tschw
+ *
+ */
+
+import {
+	AnimationClip,
+	AnimationMixer,
+	AnimationUtils,
+	PropertyBinding
+} from "../../../build/three.module.js";
+
+var TimelinerController = function TimelinerController( scene, trackInfo, onUpdate ) {
+
+	this._scene = scene;
+	this._trackInfo = trackInfo;
+
+	this._onUpdate = onUpdate;
+
+	this._mixer = new AnimationMixer( scene );
+	this._clip = null;
+	this._action = null;
+
+	this._tracks = {};
+	this._propRefs = {};
+	this._channelNames = [];
+
+};
+
+TimelinerController.prototype = {
+
+	constructor: TimelinerController,
+
+	init: function () {
+
+		var tracks = [],
+			trackInfo = this._trackInfo;
+
+		for ( var i = 0, n = trackInfo.length; i !== n; ++ i ) {
+
+			var spec = trackInfo[ i ];
+
+			tracks.push( this._addTrack( spec.type, spec.propertyPath, spec.initialValue, spec.interpolation ) );
+
+		}
+
+		this._clip = new AnimationClip( 'editclip', 0, tracks );
+		this._action = this._mixer.clipAction( this._clip ).play();
+
+	},
+
+	setDisplayTime: function ( time ) {
+
+		this._action.time = time;
+		this._mixer.update( 0 );
+
+		this._onUpdate();
+
+	},
+
+	setDuration: function ( duration ) {
+
+		this._clip.duration = duration;
+
+	},
+
+	getChannelNames: function () {
+
+		return this._channelNames;
+
+	},
+
+	getChannelKeyTimes: function ( channelName ) {
+
+		return this._tracks[ channelName ].times;
+
+	},
+
+	setKeyframe: function ( channelName, time ) {
+
+		var track = this._tracks[ channelName ],
+			times = track.times,
+			index = Timeliner.binarySearch( times, time ),
+			values = track.values,
+			stride = track.getValueSize(),
+			offset = index * stride;
+
+		if ( index < 0 ) {
+
+			// insert new keyframe
+
+			index = ~ index;
+			offset = index * stride;
+
+			var nTimes = times.length + 1,
+				nValues = values.length + stride;
+
+			for ( var i = nTimes - 1; i !== index; -- i ) {
+
+				times[ i ] = times[ i - 1 ];
+
+			}
+
+			for ( var i = nValues - 1, e = offset + stride - 1; i !== e; -- i ) {
+
+				values[ i ] = values[ i - stride ];
+
+			}
+
+		}
+
+		times[ index ] = time;
+		this._propRefs[ channelName ].getValue( values, offset );
+
+	},
+
+	delKeyframe: function ( channelName, time ) {
+
+		var track = this._tracks[ channelName ],
+			times = track.times,
+			index = Timeliner.binarySearch( times, time );
+
+		// we disallow to remove the keyframe when it is the last one we have,
+		// since the animation system is designed to always produce a defined
+		// state
+
+		if ( times.length > 1 && index >= 0 ) {
+
+			var nTimes = times.length - 1,
+				values = track.values,
+				stride = track.getValueSize(),
+				nValues = values.length - stride;
+
+			// note: no track.getValueSize when array sizes are out of sync
+
+			for ( var i = index; i !== nTimes; ++ i ) {
+
+				times[ i ] = times[ i + 1 ];
+
+			}
+
+			times.pop();
+
+			for ( var offset = index * stride; offset !== nValues; ++ offset ) {
+
+				values[ offset ] = values[ offset + stride ];
+
+			}
+
+			values.length = nValues;
+
+		}
+
+	},
+
+	moveKeyframe: function ( channelName, time, delta, moveRemaining ) {
+
+		var track = this._tracks[ channelName ],
+			times = track.times,
+			index = Timeliner.binarySearch( times, time );
+
+		if ( index >= 0 ) {
+
+			var endAt = moveRemaining ? times.length : index + 1,
+				needsSort = times[ index - 1 ] <= time ||
+					! moveRemaining && time >= times[ index + 1 ];
+
+			while ( index !== endAt ) times[ index ++ ] += delta;
+
+			if ( needsSort ) this._sort( track );
+
+		}
+
+	},
+
+	serialize: function () {
+
+		var result = {
+				duration: this._clip.duration,
+				channels: {}
+			},
+
+			names = this._channelNames,
+			tracks = this._tracks,
+
+			channels = result.channels;
+
+		for ( var i = 0, n = names.length; i !== n; ++ i ) {
+
+			var name = names[ i ],
+				track = tracks[ name ];
+
+			channels[ name ] = {
+
+				times: track.times,
+				values: track.values
+
+			};
+
+		}
+
+		return result;
+
+	},
+
+	deserialize: function ( structs ) {
+
+		var names = this._channelNames,
+			tracks = this._tracks,
+
+			channels = structs.channels;
+
+		this.setDuration( structs.duration );
+
+		for ( var i = 0, n = names.length; i !== n; ++ i ) {
+
+			var name = names[ i ],
+				track = tracks[ name ],
+				data = channels[ name ];
+
+			this._setArray( track.times, data.times );
+			this._setArray( track.values, data.values );
+
+		}
+
+		// update display
+		this.setDisplayTime( this._mixer.time );
+
+	},
+
+	_sort: function ( track ) {
+
+		var times = track.times, order = AnimationUtils.getKeyframeOrder( times );
+
+		this._setArray( times, AnimationUtils.sortedArray( times, 1, order ) );
+
+		var values = track.values,
+			stride = track.getValueSize();
+
+		this._setArray( values, AnimationUtils.sortedArray( values, stride, order ) );
+
+	},
+
+	_setArray: function ( dst, src ) {
+
+		dst.length = 0;
+		dst.push.apply( dst, src );
+
+	},
+
+	_addTrack: function ( type, prop, initialValue, interpolation ) {
+
+		var track = new type( prop, [ 0 ], initialValue, interpolation );
+
+		// data must be in JS arrays so it can be resized
+		track.times = Array.prototype.slice.call( track.times );
+		track.values = Array.prototype.slice.call( track.values );
+
+		this._channelNames.push( prop );
+		this._tracks[ prop ] = track;
+
+		// for recording the state:
+		this._propRefs[ prop ] =
+				new PropertyBinding( this._scene, prop );
+
+		return track;
+
+	}
+
+};
+
+export { TimelinerController };

+ 23 - 14
examples/jsm/loaders/LWOLoader.d.ts

@@ -1,23 +1,32 @@
 import {
-  LoadingManager,
-  Material,
-  Object3D
+	LoadingManager,
+	Material,
+	Object3D
 } from '../../../src/Three';
 
 export interface LWO {
-  materials: Material[];
-  meshes: Object3D[];
+	materials: Material[];
+	meshes: Object3D[];
+}
+
+export interface LWOLoaderParameters {
+
+	/**
+	* Base content delivery folder path, use when it differs from Lightwave default structure
+	*/
+	resourcePath?: string;
+
 }
 
 export class LWOLoader {
-  constructor(manager?: LoadingManager);
-  crossOrigin: string;
-  path: string;
-  resourcePath: string;
+	constructor(manager?: LoadingManager, parameters?: LWOLoaderParameters);
+	crossOrigin: string;
+	path: string;
+	resourcePath: string;
 
-  load(url: string, onLoad: (lwo: LWO) => void, onProgress?: (event: ProgressEvent) => void, onError?: (event: ErrorEvent) => void) : void;
-  setPath(path: string): this;
-  setResourcePath(path: string): this;
-  setCrossOrigin(value: string): this;
-  parse(data: ArrayBuffer, path: string, modelName: string): LWO;
+	load(url: string, onLoad: (lwo: LWO) => void, onProgress?: (event: ProgressEvent) => void, onError?: (event: ErrorEvent) => void) : void;
+	setPath(path: string): this;
+	setResourcePath(path: string): this;
+	setCrossOrigin(value: string): this;
+	parse(data: ArrayBuffer, path: string, modelName: string): LWO;
 }

+ 2237 - 1631
examples/jsm/loaders/LWOLoader.js

@@ -1,7 +1,10 @@
 /**
+ * @version 1.1.1
+ *
  * @author Lewy Blue https://github.com/looeee
+ * @author Guilherme Avila https://github/sciecode
  *
- * Load files in LWO3 and LWO2 format
+ * @desc Load files in LWO3 and LWO2 format on Three.js
  *
  * LWO3 format specification:
  * 	http://static.lightwave3d.com/sdk/2018/html/filefmts/lwo3.html
@@ -9,7 +12,10 @@
  * LWO2 format specification:
  * 	http://static.lightwave3d.com/sdk/2018/html/filefmts/lwo2.html
  *
- */
+ * Development and test repository:
+ *	https://github.com/threejs/lwoloader
+ *
+ **/
 
 import {
 	AddOperation,
@@ -30,6 +36,7 @@ import {
 	LoaderUtils,
 	Mesh,
 	MeshPhongMaterial,
+	MeshPhysicalMaterial,
 	MeshStandardMaterial,
 	MirroredRepeatWrapping,
 	Points,
@@ -39,2442 +46,3041 @@ import {
 	Vector2
 } from "../../../build/three.module.js";
 
-var LWOLoader = ( function () {
+function LWO2Parser( IFFParser ) {
 
-	var lwoTree;
+	this.IFF = IFFParser;
 
-	function LWOLoader( manager ) {
+}
 
-		this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager;
+LWO2Parser.prototype = {
 
-	}
+	constructor: LWO2Parser,
 
-	LWOLoader.prototype = {
+	parseBlock: function () {
 
-		constructor: LWOLoader,
+		this.IFF.debugger.offset = this.IFF.reader.offset;
+		this.IFF.debugger.closeForms();
 
-		crossOrigin: 'anonymous',
+		var blockID = this.IFF.reader.getIDTag();
+		var length = this.IFF.reader.getUint32(); // size of data in bytes
+		if ( length > this.IFF.reader.dv.byteLength - this.IFF.reader.offset ) {
 
-		load: function ( url, onLoad, onProgress, onError ) {
+			this.IFF.reader.offset -= 4;
+			length = this.IFF.reader.getUint16();
 
-			var self = this;
+		}
 
-			var path = ( self.path === undefined ) ? LoaderUtils.extractUrlBase( url ) : self.path;
+		this.IFF.debugger.dataOffset = this.IFF.reader.offset;
+		this.IFF.debugger.length = length;
+
+		// Data types may be found in either LWO2 OR LWO3 spec
+		switch ( blockID ) {
+
+			case 'FORM': // form blocks may consist of sub -chunks or sub-forms
+				this.IFF.parseForm( length );
+				break;
+
+			// SKIPPED CHUNKS
+			// if break; is called directly, the position in the lwoTree is not created
+			// any sub chunks and forms are added to the parent form instead
+			// MISC skipped
+			case 'ICON': // Thumbnail Icon Image
+			case 'VMPA': // Vertex Map Parameter
+			case 'BBOX': // bounding box
+			// case 'VMMD':
+			// case 'VTYP':
+
+			// normal maps can be specified, normally on models imported from other applications. Currently ignored
+			case 'NORM':
+
+			// ENVL FORM skipped
+			case 'PRE ':
+			case 'POST':
+			case 'KEY ':
+			case 'SPAN':
+
+			// CLIP FORM skipped
+			case 'TIME':
+			case 'CLRS':
+			case 'CLRA':
+			case 'FILT':
+			case 'DITH':
+			case 'CONT':
+			case 'BRIT':
+			case 'SATR':
+			case 'HUE ':
+			case 'GAMM':
+			case 'NEGA':
+			case 'IFLT':
+			case 'PFLT':
+
+			// Image Map Layer skipped
+			case 'PROJ':
+			case 'AXIS':
+			case 'AAST':
+			case 'PIXB':
+			case 'AUVO':
+			case 'STCK':
+
+			// Procedural Textures skipped
+			case 'PROC':
+			case 'VALU':
+			case 'FUNC':
+
+			// Gradient Textures skipped
+			case 'PNAM':
+			case 'INAM':
+			case 'GRST':
+			case 'GREN':
+			case 'GRPT':
+			case 'FKEY':
+			case 'IKEY':
+
+			// Texture Mapping Form skipped
+			case 'CSYS':
+
+			// Surface CHUNKs skipped
+			case 'OPAQ': // top level 'opacity' checkbox
+			case 'CMAP': // clip map
+
+			// Surface node CHUNKS skipped
+			// These mainly specify the node editor setup in LW
+			case 'NLOC':
+			case 'NZOM':
+			case 'NVER':
+			case 'NSRV':
+			case 'NVSK': // unknown
+			case 'NCRD':
+			case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
+			case 'WRPH': // image wrap h
+			case 'NMOD':
+			case 'NPRW':
+			case 'NPLA':
+			case 'NODS':
+			case 'VERS':
+			case 'ENUM':
+			case 'TAG ':
+			case 'OPAC':
+
+			// Car Material CHUNKS
+			case 'CGMD':
+			case 'CGTY':
+			case 'CGST':
+			case 'CGEN':
+			case 'CGTS':
+			case 'CGTE':
+			case 'OSMP':
+			case 'OMDE':
+			case 'OUTR':
+			case 'FLAG':
+
+			case 'TRNL':
+			case 'GLOW':
+			case 'GVAL': // glow intensity
+			case 'SHRP':
+			case 'RFOP':
+			case 'RSAN':
+			case 'TROP':
+			case 'RBLR':
+			case 'TBLR':
+			case 'CLRH':
+			case 'CLRF':
+			case 'ADTR':
+			case 'LINE':
+			case 'ALPH':
+			case 'VCOL':
+			case 'ENAB':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length );
+				break;
+
+			case 'SURF':
+				this.IFF.parseSurfaceLwo2( length );
+				break;
+
+			case 'CLIP':
+				this.IFF.parseClipLwo2( length );
+				break;
+
+			// Texture node chunks (not in spec)
+			case 'IPIX': // usePixelBlending
+			case 'IMIP': // useMipMaps
+			case 'IMOD': // imageBlendingMode
+			case 'AMOD': // unknown
+			case 'IINV': // imageInvertAlpha
+			case 'INCR': // imageInvertColor
+			case 'IAXS': // imageAxis ( for non-UV maps)
+			case 'IFOT': // imageFallofType
+			case 'ITIM': // timing for animated textures
+			case 'IWRL':
+			case 'IUTI':
+			case 'IINX':
+			case 'IINY':
+			case 'IINZ':
+			case 'IREF': // possibly a VX for reused texture nodes
+				if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
+				else this.IFF.reader.skip( length );
+				break;
+
+			case 'OTAG':
+				this.IFF.parseObjectTag();
+				break;
+
+			case 'LAYR':
+				this.IFF.parseLayer( length );
+				break;
+
+			case 'PNTS':
+				this.IFF.parsePoints( length );
+				break;
+
+			case 'VMAP':
+				this.IFF.parseVertexMapping( length );
+				break;
+
+			case 'AUVU':
+			case 'AUVN':
+				this.IFF.reader.skip( length - 1 );
+				this.IFF.reader.getVariableLengthIndex(); // VX
+				break;
+
+			case 'POLS':
+				this.IFF.parsePolygonList( length );
+				break;
+
+			case 'TAGS':
+				this.IFF.parseTagStrings( length );
+				break;
+
+			case 'PTAG':
+				this.IFF.parsePolygonTagMapping( length );
+				break;
+
+			case 'VMAD':
+				this.IFF.parseVertexMapping( length, true );
+				break;
+
+			// Misc CHUNKS
+			case 'DESC': // Description Line
+				this.IFF.currentForm.description = this.IFF.reader.getString();
+				break;
+
+			case 'TEXT':
+			case 'CMNT':
+			case 'NCOM':
+				this.IFF.currentForm.comment = this.IFF.reader.getString();
+				break;
+
+			// Envelope Form
+			case 'NAME':
+				this.IFF.currentForm.channelName = this.IFF.reader.getString();
+				break;
+
+			// Image Map Layer
+			case 'WRAP':
+				this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
+				break;
+
+			case 'IMAG':
+				var index = this.IFF.reader.getVariableLengthIndex();
+				this.IFF.currentForm.imageIndex = index;
+				break;
+
+			// Texture Mapping Form
+			case 'OREF':
+				this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
+				break;
+
+			case 'ROID':
+				this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
+				break;
+
+			// Surface Blocks
+			case 'SSHN':
+				this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
+				break;
+
+			case 'AOVN':
+				this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
+				break;
+
+			// Nodal Blocks
+			case 'NSTA':
+				this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
+				break;
+
+			case 'NRNM':
+				this.IFF.currentForm.realName = this.IFF.reader.getString();
+				break;
+
+			case 'NNME':
+				this.IFF.currentForm.refName = this.IFF.reader.getString();
+				this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
+				break;
+
+			// Nodal Blocks : connections
+			case 'INME':
+				if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
+				this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINN':
+				if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
+				this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINM':
+				if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
+				this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IONM':
+				if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
+				this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'FNAM':
+				this.IFF.currentForm.fileName = this.IFF.reader.getString();
+				break;
+
+			case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
+				if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
+				else this.IFF.reader.skip( length );
+				break;
+
+			// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+			case 'SMAN':
+				var maxSmoothingAngle = this.IFF.reader.getFloat32();
+				this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
+				break;
+
+			// LWO2: Basic Surface Parameters
+			case 'COLR':
+				this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
+				this.IFF.reader.skip( 2 ); // VX: envelope
+				break;
+
+			case 'LUMI':
+				this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SPEC':
+				this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'DIFF':
+				this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'REFL':
+				this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'GLOS':
+				this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TRAN':
+				this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'BUMP':
+				this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SIDE':
+				this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
+				break;
+
+			case 'RIMG':
+				this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'RIND':
+				this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TIMG':
+				this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'IMAP':
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TMAP':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length ); // needs implementing
+				break;
+
+			case 'IUVI': // uv channel name
+				this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
+				break;
+
+			case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+				this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
+				break;
+			case 'IVTL': // heightWrappingMode
+				this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
+				break;
+
+			// LWO2 USE
+			case 'BLOK':
+				// skip
+				break;
+
+			default:
+				this.IFF.parseUnknownCHUNK( blockID, length );
 
-			// give the mesh a default name based on the filename
-			var modelName = url.split( path ).pop().split( '.' )[ 0 ];
+		}
 
-			var loader = new FileLoader( this.manager );
-			loader.setPath( self.path );
-			loader.setResponseType( 'arraybuffer' );
+		if ( blockID != 'FORM' ) {
 
-			loader.load( url, function ( buffer ) {
+			this.IFF.debugger.node = 1;
+			this.IFF.debugger.nodeID = blockID;
+			this.IFF.debugger.log();
 
-				// console.time( 'Total parsing: ' );
-				onLoad( self.parse( buffer, path, modelName ) );
-				// console.timeEnd( 'Total parsing: ' );
+		}
 
-			}, onProgress, onError );
+		if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
 
-		},
+			this.IFF.currentForm = this.IFF.parentForm;
 
-		setCrossOrigin: function ( value ) {
+		}
 
-			this.crossOrigin = value;
-			return this;
+	}
 
-		},
+};
+
+function LWO3Parser( IFFParser ) {
+
+	this.IFF = IFFParser;
+
+}
+
+LWO3Parser.prototype = {
+
+	constructor: LWO3Parser,
+
+	parseBlock: function () {
+
+		this.IFF.debugger.offset = this.IFF.reader.offset;
+		this.IFF.debugger.closeForms();
+
+		var blockID = this.IFF.reader.getIDTag();
+		var length = this.IFF.reader.getUint32(); // size of data in bytes
+
+		this.IFF.debugger.dataOffset = this.IFF.reader.offset;
+		this.IFF.debugger.length = length;
+
+		// Data types may be found in either LWO2 OR LWO3 spec
+		switch ( blockID ) {
+
+			case 'FORM': // form blocks may consist of sub -chunks or sub-forms
+				this.IFF.parseForm( length );
+				break;
+
+			// SKIPPED CHUNKS
+			// MISC skipped
+			case 'ICON': // Thumbnail Icon Image
+			case 'VMPA': // Vertex Map Parameter
+			case 'BBOX': // bounding box
+			// case 'VMMD':
+			// case 'VTYP':
+
+			// normal maps can be specified, normally on models imported from other applications. Currently ignored
+			case 'NORM':
+
+			// ENVL FORM skipped
+			case 'PRE ':
+			case 'POST':
+			case 'KEY ':
+			case 'SPAN':
+
+			// CLIP FORM skipped
+			case 'TIME':
+			case 'CLRS':
+			case 'CLRA':
+			case 'FILT':
+			case 'DITH':
+			case 'CONT':
+			case 'BRIT':
+			case 'SATR':
+			case 'HUE ':
+			case 'GAMM':
+			case 'NEGA':
+			case 'IFLT':
+			case 'PFLT':
+
+			// Image Map Layer skipped
+			case 'PROJ':
+			case 'AXIS':
+			case 'AAST':
+			case 'PIXB':
+			case 'STCK':
+
+			// Procedural Textures skipped
+			case 'VALU':
+
+			// Gradient Textures skipped
+			case 'PNAM':
+			case 'INAM':
+			case 'GRST':
+			case 'GREN':
+			case 'GRPT':
+			case 'FKEY':
+			case 'IKEY':
+
+			// Texture Mapping Form skipped
+			case 'CSYS':
+
+				// Surface CHUNKs skipped
+			case 'OPAQ': // top level 'opacity' checkbox
+			case 'CMAP': // clip map
+
+			// Surface node CHUNKS skipped
+			// These mainly specify the node editor setup in LW
+			case 'NLOC':
+			case 'NZOM':
+			case 'NVER':
+			case 'NSRV':
+			case 'NCRD':
+			case 'NMOD':
+			case 'NSEL':
+			case 'NPRW':
+			case 'NPLA':
+			case 'VERS':
+			case 'ENUM':
+			case 'TAG ':
+
+			// Car Material CHUNKS
+			case 'CGMD':
+			case 'CGTY':
+			case 'CGST':
+			case 'CGEN':
+			case 'CGTS':
+			case 'CGTE':
+			case 'OSMP':
+			case 'OMDE':
+			case 'OUTR':
+			case 'FLAG':
+
+			case 'TRNL':
+			case 'GLOS':
+			case 'SHRP':
+			case 'RFOP':
+			case 'RSAN':
+			case 'TROP':
+			case 'RBLR':
+			case 'TBLR':
+			case 'CLRH':
+			case 'CLRF':
+			case 'ADTR':
+			case 'GLOW':
+			case 'LINE':
+			case 'ALPH':
+			case 'VCOL':
+			case 'ENAB':
+				this.IFF.debugger.skipped = true;
+				this.IFF.reader.skip( length );
+				break;
+
+			// Texture node chunks (not in spec)
+			case 'IPIX': // usePixelBlending
+			case 'IMIP': // useMipMaps
+			case 'IMOD': // imageBlendingMode
+			case 'AMOD': // unknown
+			case 'IINV': // imageInvertAlpha
+			case 'INCR': // imageInvertColor
+			case 'IAXS': // imageAxis ( for non-UV maps)
+			case 'IFOT': // imageFallofType
+			case 'ITIM': // timing for animated textures
+			case 'IWRL':
+			case 'IUTI':
+			case 'IINX':
+			case 'IINY':
+			case 'IINZ':
+			case 'IREF': // possibly a VX for reused texture nodes
+				if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
+				else this.IFF.reader.skip( length );
+				break;
+
+			case 'OTAG':
+				this.IFF.parseObjectTag();
+				break;
+
+			case 'LAYR':
+				this.IFF.parseLayer( length );
+				break;
+
+			case 'PNTS':
+				this.IFF.parsePoints( length );
+				break;
+
+			case 'VMAP':
+				this.IFF.parseVertexMapping( length );
+				break;
+
+			case 'POLS':
+				this.IFF.parsePolygonList( length );
+				break;
+
+			case 'TAGS':
+				this.IFF.parseTagStrings( length );
+				break;
+
+			case 'PTAG':
+				this.IFF.parsePolygonTagMapping( length );
+				break;
+
+			case 'VMAD':
+				this.IFF.parseVertexMapping( length, true );
+				break;
+
+			// Misc CHUNKS
+			case 'DESC': // Description Line
+				this.IFF.currentForm.description = this.IFF.reader.getString();
+				break;
+
+			case 'TEXT':
+			case 'CMNT':
+			case 'NCOM':
+				this.IFF.currentForm.comment = this.IFF.reader.getString();
+				break;
+
+			// Envelope Form
+			case 'NAME':
+				this.IFF.currentForm.channelName = this.IFF.reader.getString();
+				break;
+
+			// Image Map Layer
+			case 'WRAP':
+				this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
+				break;
+
+			case 'IMAG':
+				var index = this.IFF.reader.getVariableLengthIndex();
+				this.IFF.currentForm.imageIndex = index;
+				break;
+
+			// Texture Mapping Form
+			case 'OREF':
+				this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
+				break;
+
+			case 'ROID':
+				this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
+				break;
+
+			// Surface Blocks
+			case 'SSHN':
+				this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
+				break;
+
+			case 'AOVN':
+				this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
+				break;
+
+			// Nodal Blocks
+			case 'NSTA':
+				this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
+				break;
+
+			case 'NRNM':
+				this.IFF.currentForm.realName = this.IFF.reader.getString();
+				break;
+
+			case 'NNME':
+				this.IFF.currentForm.refName = this.IFF.reader.getString();
+				this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
+				break;
+
+			// Nodal Blocks : connections
+			case 'INME':
+				if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
+				this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINN':
+				if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
+				this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IINM':
+				if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
+				this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'IONM':
+				if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
+				this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
+				break;
+
+			case 'FNAM':
+				this.IFF.currentForm.fileName = this.IFF.reader.getString();
+				break;
+
+			case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
+				if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
+				else this.IFF.reader.skip( length );
+				break;
+
+			// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+			case 'SMAN':
+				var maxSmoothingAngle = this.IFF.reader.getFloat32();
+				this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
+				break;
+
+			// LWO2: Basic Surface Parameters
+			case 'COLR':
+				this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
+				this.IFF.reader.skip( 2 ); // VX: envelope
+				break;
+
+			case 'LUMI':
+				this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SPEC':
+				this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'DIFF':
+				this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'REFL':
+				this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'GLOS':
+				this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TRAN':
+				this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'BUMP':
+				this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'SIDE':
+				this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
+				break;
+
+			case 'RIMG':
+				this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'RIND':
+				this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
+				this.IFF.reader.skip( 2 );
+				break;
+
+			case 'TIMG':
+				this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
+				break;
+
+			case 'IMAP':
+				this.IFF.currentSurface.attributes.imageMapIndex = this.IFF.reader.getUint32();
+				break;
+
+			case 'IUVI': // uv channel name
+				this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
+				break;
+
+			case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+				this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
+				break;
+			case 'IVTL': // heightWrappingMode
+				this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
+				break;
+
+			default:
+				this.IFF.parseUnknownCHUNK( blockID, length );
 
-		setPath: function ( value ) {
+		}
 
-			this.path = value;
-			return this;
+		if ( blockID != 'FORM' ) {
 
-		},
+			this.IFF.debugger.node = 1;
+			this.IFF.debugger.nodeID = blockID;
+			this.IFF.debugger.log();
 
-		setResourcePath: function ( value ) {
+		}
 
-			this.resourcePath = value;
-			return this;
+		if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
 
-		},
+			this.IFF.currentForm = this.IFF.parentForm;
 
-		parse: function ( iffBuffer, path, modelName ) {
+		}
 
-			lwoTree = new IFFParser().parse( iffBuffer );
+	}
 
-			// console.log( 'lwoTree', lwoTree );
+};
 
-			var textureLoader = new TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
+/**
+ * === IFFParser ===
+ * - Parses data from the IFF buffer.
+ * - LWO3 files are in IFF format and can contain the following data types, referred to by shorthand codes
+ *
+ * ATOMIC DATA TYPES
+ *  ID Tag - 4x 7 bit uppercase ASCII chars: ID4
+ *  signed integer, 1, 2, or 4 byte length: I1, I2, I4
+ *  unsigned integer, 1, 2, or 4 byte length: U1, U2, U4
+ *  float, 4 byte length: F4
+ *  string, series of ASCII chars followed by null byte (If the length of the string including the null terminating byte is odd, an extra null is added so that the data that follows will begin on an even byte boundary): S0
+ *
+ * COMPOUND DATA TYPES
+ *  Variable-length Index (index into an array or collection): U2 or U4 : VX
+ *  Color (RGB): F4 + F4 + F4: COL12
+ *  Coordinate (x, y, z): F4 + F4 + F4: VEC12
+ *  Percentage F4 data type from 0->1 with 1 = 100%: FP4
+ *  Angle in radian F4: ANG4
+ *  Filename (string) S0: FNAM0
+ *  XValue F4 + index (VX) + optional envelope( ENVL ): XVAL
+ *  XValue vector VEC12 + index (VX) + optional envelope( ENVL ): XVAL3
+ *
+ *  The IFF file is arranged in chunks:
+ *  CHUNK = ID4 + length (U4) + length X bytes of data + optional 0 pad byte
+ *  optional 0 pad byte is there to ensure chunk ends on even boundary, not counted in size
+ *
+ * COMPOUND DATA TYPES
+ * - Chunks are combined in Forms (collections of chunks)
+ * - FORM = string 'FORM' (ID4) + length (U4) + type (ID4) + optional ( CHUNK | FORM )
+ * - CHUNKS and FORMS are collectively referred to as blocks
+ * - The entire file is contained in one top level FORM
+ *
+ **/
 
-			return new LWOTreeParser( textureLoader ).parse( modelName );
+function IFFParser( ) {
 
-		}
+	this.debugger = new Debugger();
+	// this.debugger.enable(); // un-comment to log IFF hierarchy.
 
-	};
+}
 
-	// Parse the lwoTree object
-	function LWOTreeParser( textureLoader ) {
+IFFParser.prototype = {
 
-		this.textureLoader = textureLoader;
+	constructor: IFFParser,
 
-	}
+	parse: function ( buffer ) {
 
-	LWOTreeParser.prototype = {
+		this.reader = new DataViewReader( buffer );
 
-		constructor: LWOTreeParser,
+		this.tree = {
+			materials: {},
+			layers: [],
+			tags: [],
+			textures: [],
+		};
 
-		parse: function ( modelName ) {
+		// start out at the top level to add any data before first layer is encountered
+		this.currentLayer = this.tree;
+		this.currentForm = this.tree;
 
-			this.materials = new MaterialParser( this.textureLoader ).parse();
-			this.defaultLayerName = modelName;
+		this.parseTopForm();
 
-			this.meshes = this.parseLayers();
+		if ( this.tree.format === undefined ) return;
 
-			return {
-				materials: this.materials,
-				meshes: this.meshes,
-			};
+		if ( this.tree.format === 'LWO2' ) {
 
-		},
+			this.parser = new LWO2Parser( this );
+			while ( ! this.reader.endOfFile() ) this.parser.parseBlock();
 
-		parseLayers() {
+		} else if ( this.tree.format === 'LWO3' ) {
 
-			// array of all meshes for building hierarchy
-			var meshes = [];
+			this.parser = new LWO3Parser( this );
+			while ( ! this.reader.endOfFile() ) this.parser.parseBlock();
 
-			// final array containing meshes with scene graph hierarchy set up
-			var finalMeshes = [];
+		}
 
-			var geometryParser = new GeometryParser();
+		this.debugger.offset = this.reader.offset;
+		this.debugger.closeForms();
 
-			var self = this;
-			lwoTree.layers.forEach( function ( layer ) {
+		return this.tree;
 
-				var geometry = geometryParser.parse( layer.geometry, layer );
+	},
 
-				var mesh = self.parseMesh( geometry, layer );
+	parseTopForm() {
 
-				meshes[ layer.number ] = mesh;
+		this.debugger.offset = this.reader.offset;
 
-				if ( layer.parent === - 1 ) finalMeshes.push( mesh );
-				else meshes[ layer.parent ].add( mesh );
+		var topForm = this.reader.getIDTag();
 
+		if ( topForm !== 'FORM' ) {
 
-			} );
+			console.warn( "LWOLoader: Top-level FORM missing." );
+			return;
 
-			this.applyPivots( finalMeshes );
+		}
 
-			return finalMeshes;
+		var length = this.reader.getUint32();
 
-		},
+		this.debugger.dataOffset = this.reader.offset;
+		this.debugger.length = length;
 
-		parseMesh( geometry, layer ) {
+		var type = this.reader.getIDTag();
 
-			var mesh;
+		if ( type === 'LWO2' ) {
 
-			var materials = this.getMaterials( geometry.userData.matNames, layer.geometry.type );
+			this.tree.format = type;
 
-			this.duplicateUVs( geometry, materials );
+		} else if ( type === 'LWO3' ) {
 
-			if ( layer.geometry.type === 'points' ) mesh = new Points( geometry, materials );
-			else if ( layer.geometry.type === 'lines' ) mesh = new LineSegments( geometry, materials );
-			else mesh = new Mesh( geometry, materials );
+			this.tree.format = type;
 
-			if ( layer.name ) mesh.name = layer.name;
-			else mesh.name = this.defaultLayerName + '_layer_' + layer.number;
+		}
 
-			mesh.userData.pivot = layer.pivot;
+		this.debugger.node = 0;
+		this.debugger.nodeID = type;
+		this.debugger.log();
+
+		return;
+
+	},
+
+
+	///
+	// FORM PARSING METHODS
+	///
+
+	// Forms are organisational and can contain any number of sub chunks and sub forms
+	// FORM ::= 'FORM'[ID4], length[U4], type[ID4], ( chunk[CHUNK] | form[FORM] ) * }
+	parseForm( length ) {
+
+		var type = this.reader.getIDTag();
+
+		switch ( type ) {
+
+			// SKIPPED FORMS
+			// if skipForm( length ) is called, the entire form and any sub forms and chunks are skipped
+
+			case 'ISEQ': // Image sequence
+			case 'ANIM': // plug in animation
+			case 'STCC': // Color-cycling Still
+			case 'VPVL':
+			case 'VPRM':
+			case 'NROT':
+			case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
+			case 'WRPH': // image wrap h
+			case 'FUNC':
+			case 'FALL':
+			case 'OPAC':
+			case 'GRAD': // gradient texture
+			case 'ENVS':
+			case 'VMOP':
+			case 'VMBG':
+
+			// Car Material FORMS
+			case 'OMAX':
+			case 'STEX':
+			case 'CKBG':
+			case 'CKEY':
+			case 'VMLA':
+			case 'VMLB':
+				this.debugger.skipped = true;
+				this.skipForm( length ); // not currently supported
+				break;
+
+			// if break; is called directly, the position in the lwoTree is not created
+			// any sub chunks and forms are added to the parent form instead
+			case 'META':
+			case 'NNDS':
+			case 'NODS':
+			case 'NDTA':
+			case 'ADAT':
+			case 'AOVS':
+			case 'BLOK':
+
+			// used by texture nodes
+			case 'IBGC': // imageBackgroundColor
+			case 'IOPC': // imageOpacity
+			case 'IIMG': // hold reference to image path
+			case 'TXTR':
+				// this.setupForm( type, length );
+				this.debugger.length = 4;
+				this.debugger.skipped = true;
+				break;
+
+			case 'IFAL': // imageFallof
+			case 'ISCL': // imageScale
+			case 'IPOS': // imagePosition
+			case 'IROT': // imageRotation
+			case 'IBMP':
+			case 'IUTD':
+			case 'IVTD':
+				this.parseTextureNodeAttribute( type );
+				break;
+
+			case 'ENVL':
+				this.parseEnvelope( length );
+				break;
+
+				// CLIP FORM AND SUB FORMS
+
+			case 'CLIP':
+				if ( this.tree.format === 'LWO2' ) {
 
-			return mesh;
+					this.parseForm( length );
 
-		},
+				} else {
 
-		// TODO: may need to be reversed in z to convert LWO to three.js coordinates
-		applyPivots( meshes ) {
+					this.parseClip( length );
 
-			meshes.forEach( function ( mesh ) {
+				}
+				break;
 
-				mesh.traverse( function ( child ) {
+			case 'STIL':
+				this.parseImage();
+				break;
 
-					var pivot = child.userData.pivot;
+			case 'XREF': // clone of another STIL
+				this.reader.skip( 8 ); // unknown
+				this.currentForm.referenceTexture = {
+					index: this.reader.getUint32(),
+					refName: this.reader.getString() // internal unique ref
+				};
+				break;
 
-					child.position.x += pivot[ 0 ];
-					child.position.y += pivot[ 1 ];
-					child.position.z += pivot[ 2 ];
+				// Not in spec, used by texture nodes
 
-					if ( child.parent ) {
+			case 'IMST':
+				this.parseImageStateForm( length );
+				break;
 
-						var parentPivot = child.parent.userData.pivot;
+				// SURF FORM AND SUB FORMS
 
-						child.position.x -= parentPivot[ 0 ];
-						child.position.y -= parentPivot[ 1 ];
-						child.position.z -= parentPivot[ 2 ];
+			case 'SURF':
+				this.parseSurfaceForm( length );
+				break;
 
-					}
+			case 'VALU': // Not in spec
+				this.parseValueForm( length );
+				break;
 
-				} );
+			case 'NTAG':
+				this.parseSubNode( length );
+				break;
 
-			} );
+			case 'ATTR': // BSDF Node Attributes
+			case 'SATR': // Standard Node Attributes
+				this.setupForm( 'attributes', length );
+				break;
 
-		},
+			case 'NCON':
+				this.parseConnections( length );
+				break;
 
-		getMaterials( namesArray, type ) {
+			case 'SSHA':
+				this.parentForm = this.currentForm;
+				this.currentForm = this.currentSurface;
+				this.setupForm( 'surfaceShader', length );
+				break;
 
-			var materials = [];
+			case 'SSHD':
+				this.setupForm( 'surfaceShaderData', length );
+				break;
 
-			var self = this;
+			case 'ENTR': // Not in spec
+				this.parseEntryForm( length );
+				break;
 
-			namesArray.forEach( function ( name, i ) {
+				// Image Map Layer
 
-				materials[ i ] = self.getMaterialByName( name );
+			case 'IMAP':
+				this.parseImageMap( length );
+				break;
 
-			} );
+			case 'TAMP':
+				this.parseXVAL( 'amplitude', length );
+				break;
 
-			// convert materials to line or point mats if required
-			if ( type === 'points' || type === 'lines' ) {
+				//Texture Mapping Form
 
-				materials.forEach( function ( mat, i ) {
+			case 'TMAP':
+				this.setupForm( 'textureMap', length );
+				break;
 
-					var spec = {
-						color: mat.color,
-					};
+			case 'CNTR':
+				this.parseXVAL3( 'center', length );
+				break;
 
-					if ( type === 'points' ) {
+			case 'SIZE':
+				this.parseXVAL3( 'scale', length );
+				break;
 
-						spec.size = 0.1;
-						spec.map = mat.map;
-						spec.morphTargets = mat.morphTargets;
-						materials[ i ] = new PointsMaterial( spec );
+			case 'ROTA':
+				this.parseXVAL3( 'rotation', length );
+				break;
 
-					} else if ( type === 'lines' ) {
+			default:
+				this.parseUnknownForm( type, length );
 
-						materials[ i ] = new LineBasicMaterial( spec );
+		}
 
-					}
+		this.debugger.node = 0;
+		this.debugger.nodeID = type;
+		this.debugger.log();
 
-				} );
+	},
 
-			}
+	setupForm( type, length ) {
 
-			// if there is only one material, return that directly instead of array
-			var filtered = materials.filter( Boolean );
-			if ( filtered.length === 1 ) return filtered[ 0 ];
+		if ( ! this.currentForm ) this.currentForm = this.currentNode;
 
-			return materials;
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-		},
+		if ( ! this.currentForm[ type ] ) {
 
-		getMaterialByName( name ) {
+			this.currentForm[ type ] = {};
+			this.currentForm = this.currentForm[ type ];
 
-			return this.materials.filter( function ( m ) {
 
-				return m.name === name;
+		} else {
 
-			} )[ 0 ];
+			// should never see this unless there's a bug in the reader
+			console.warn( 'LWOLoader: form already exists on parent: ', type, this.currentForm );
 
-		},
+			this.currentForm = this.currentForm[ type ];
 
-		// If the material has an aoMap, duplicate UVs
-		duplicateUVs( geometry, materials ) {
+		}
 
-			var duplicateUVs = false;
 
-			if ( ! Array.isArray( materials ) ) {
+	},
 
-				if ( materials.aoMap ) duplicateUVs = true;
+	skipForm( length ) {
 
-			} else {
+		this.reader.skip( length - 4 );
 
-				materials.forEach( function ( material ) {
+	},
 
-					if ( material.aoMap ) duplicateUVs = true;
+	parseUnknownForm( type, length ) {
 
-				} );
+		console.warn( 'LWOLoader: unknown FORM encountered: ' + type, length );
 
-			}
+		printBuffer( this.reader.dv.buffer, this.reader.offset, length - 4 );
+		this.reader.skip( length - 4 );
 
-			if ( ! duplicateUVs ) return;
+	},
 
-			geometry.addAttribute( 'uv2', new BufferAttribute( geometry.attributes.uv.array, 2 ) );
+	parseSurfaceForm( length ) {
 
-		},
+		this.reader.skip( 8 ); // unknown Uint32 x2
 
-	};
+		var name = this.reader.getString();
 
-	function MaterialParser( textureLoader ) {
+		var surface = {
+			attributes: {}, // LWO2 style non-node attributes will go here
+			connections: {},
+			name: name,
+			inputName: name,
+			nodes: {},
+			source: this.reader.getString(),
+		};
 
-		this.textureLoader = textureLoader;
+		this.tree.materials[ name ] = surface;
+		this.currentSurface = surface;
 
-	}
+		this.parentForm = this.tree.materials;
+		this.currentForm = surface;
+		this.currentFormEnd = this.reader.offset + length;
 
-	MaterialParser.prototype = {
+	},
 
-		constructor: MaterialParser,
+	parseSurfaceLwo2( length ) {
 
-		parse: function () {
+		var name = this.reader.getString();
 
-			var materials = [];
-			this.textures = {};
+		var surface = {
+			attributes: {}, // LWO2 style non-node attributes will go here
+			connections: {},
+			name: name,
+			nodes: {},
+			source: this.reader.getString(),
+		};
 
-			for ( var name in lwoTree.materials ) {
+		this.tree.materials[ name ] = surface;
+		this.currentSurface = surface;
 
-				if ( lwoTree.format === 'LWO3' ) {
+		this.parentForm = this.tree.materials;
+		this.currentForm = surface;
+		this.currentFormEnd = this.reader.offset + length;
 
-					materials.push( this.parseMaterial( lwoTree.materials[ name ], name, lwoTree.textures ) );
+	},
 
-				} else if ( lwoTree.format === 'LWO2' ) {
+	parseSubNode( length ) {
 
-					materials.push( this.parseMaterialLwo2( lwoTree.materials[ name ], name /*, lwoTree.textures */ ) );
+		// parse the NRNM CHUNK of the subnode FORM to get
+		// a meaningful name for the subNode
+		// some subnodes can be renamed, but Input and Surface cannot
 
-				}
+		this.reader.skip( 8 ); // NRNM + length
+		var name = this.reader.getString();
 
-			}
+		var node = {
+			name: name
+		};
+		this.currentForm = node;
+		this.currentNode = node;
 
-			return materials;
+		this.currentFormEnd = this.reader.offset + length;
 
-		},
 
-		parseMaterial( materialData, name, textures ) {
+	},
 
-			var params = {
-				name: name,
-				side: this.getSide( materialData.attributes ),
-				flatShading: this.getSmooth( materialData.attributes ),
-			};
+	// collect attributes from all nodes at the top level of a surface
+	parseConnections( length ) {
 
-			var connections = this.parseConnections( materialData.connections, materialData.nodes );
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-			var maps = this.parseTextureNodes( connections.maps );
+		this.currentForm = this.currentSurface.connections;
 
-			this.parseAttributeImageMaps( connections.attributes, textures, maps, materialData.maps );
+	},
 
-			var attributes = this.parseAttributes( connections.attributes, maps );
+	// surface node attribute data, e.g. specular, roughness etc
+	parseEntryForm( length ) {
 
-			this.parseEnvMap( connections, maps, attributes );
+		this.reader.skip( 8 ); // NAME + length
+		var name = this.reader.getString();
+		this.currentForm = this.currentNode.attributes;
 
-			params = Object.assign( maps, params );
-			params = Object.assign( params, attributes );
+		this.setupForm( name, length );
 
-			var materialCtor = connections.attributes.Roughness ? MeshStandardMaterial : MeshPhongMaterial;
+	},
 
-			return new materialCtor( params );
+	// parse values from material - doesn't match up to other LWO3 data types
+	// sub form of entry form
+	parseValueForm() {
 
-		},
+		this.reader.skip( 8 ); // unknown + length
 
-		parseMaterialLwo2( materialData, name /*, textures */ ) {
+		var valueType = this.reader.getString();
 
-			var params = {
-				name: name,
-				side: this.getSide( materialData.attributes ),
-				flatShading: this.getSmooth( materialData.attributes ),
-			};
+		if ( valueType === 'double' ) {
 
-			var attributes = this.parseAttributes( materialData.attributes, {} );
-			params = Object.assign( params, attributes );
-			return new MeshPhongMaterial( params );
+			this.currentForm.value = this.reader.getUint64();
 
-		},
+		} else if ( valueType === 'int' ) {
 
-		// Note: converting from left to right handed coords by switching x -> -x in vertices, and
-		// then switching mat FrontSide -> BackSide
-		// NB: this means that FrontSide and BackSide have been switched!
-		getSide( attributes ) {
+			this.currentForm.value = this.reader.getUint32();
 
-			if ( ! attributes.side ) return BackSide;
+		} else if ( valueType === 'vparam' ) {
 
-			switch ( attributes.side ) {
+			this.reader.skip( 24 );
+			this.currentForm.value = this.reader.getFloat64();
 
-				case 0:
-				case 1:
-					return BackSide;
-				case 2: return FrontSide;
-				case 3: return DoubleSide;
+		} else if ( valueType === 'vparam3' ) {
 
-			}
+			this.reader.skip( 24 );
+			this.currentForm.value = this.reader.getFloat64Array( 3 );
 
-		},
+		}
 
-		getSmooth( attributes ) {
+	},
 
-			if ( ! attributes.smooth ) return true;
-			return ! attributes.smooth;
+	// holds various data about texture node image state
+	// Data other thanmipMapLevel unknown
+	parseImageStateForm() {
 
-		},
+		this.reader.skip( 8 ); // unknown
 
-		parseConnections( connections, nodes ) {
+		this.currentForm.mipMapLevel = this.reader.getFloat32();
 
-			var materialConnections = {
-				maps: {}
-			};
+	},
 
-			var inputName = connections.inputName;
-			var inputNodeName = connections.inputNodeName;
-			var nodeName = connections.nodeName;
+	// LWO2 style image data node OR LWO3 textures defined at top level in editor (not as SURF node)
+	parseImageMap( length ) {
 
-			var self = this;
-			inputName.forEach( function ( name, index ) {
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
 
-				if ( name === 'Material' ) {
+		if ( ! this.currentForm.maps ) this.currentForm.maps = [];
 
-					var matNode = self.getNodeByRefName( inputNodeName[ index ], nodes );
-					materialConnections.attributes = matNode.attributes;
-					materialConnections.envMap = matNode.fileName;
-					materialConnections.name = inputNodeName[ index ];
+		var map = {};
+		this.currentForm.maps.push( map );
+		this.currentForm = map;
 
-				}
+		this.reader.skip( 10 ); // unknown, could be an issue if it contains a VX
 
-			} );
+	},
 
-			nodeName.forEach( function ( name, index ) {
+	parseTextureNodeAttribute( type ) {
 
-				if ( name === materialConnections.name ) {
+		this.reader.skip( 28 ); // FORM + length + VPRM + unknown + Uint32 x2 + float32
 
-					materialConnections.maps[ inputName[ index ] ] = self.getNodeByRefName( inputNodeName[ index ], nodes );
+		this.reader.skip( 20 ); // FORM + length + VPVL + float32 + Uint32
 
-				}
+		switch ( type ) {
 
-			} );
+			case 'ISCL':
+				this.currentNode.scale = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IPOS':
+				this.currentNode.position = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IROT':
+				this.currentNode.rotation = this.reader.getFloat32Array( 3 );
+				break;
+			case 'IFAL':
+				this.currentNode.falloff = this.reader.getFloat32Array( 3 );
+				break;
 
-			return materialConnections;
+			case 'IBMP':
+				this.currentNode.amplitude = this.reader.getFloat32();
+				break;
+			case 'IUTD':
+				this.currentNode.uTiles = this.reader.getFloat32();
+				break;
+			case 'IVTD':
+				this.currentNode.vTiles = this.reader.getFloat32();
+				break;
 
-		},
+		}
 
-		getNodeByRefName( refName, nodes ) {
+		this.reader.skip( 2 ); // unknown
 
-			for ( var name in nodes ) {
 
-				if ( nodes[ name ].refName === refName ) return nodes[ name ];
+	},
 
-			}
+	// ENVL forms are currently ignored
+	parseEnvelope( length ) {
 
-		},
+		this.reader.skip( length - 4 ); // skipping  entirely for now
 
-		parseTextureNodes( textureNodes ) {
+	},
 
-			var maps = {};
+	///
+	// CHUNK PARSING METHODS
+	///
 
-			for ( var name in textureNodes ) {
+	// clips can either be defined inside a surface node, or at the top
+	// level and they have a different format in each case
+	parseClip( length ) {
 
-				var node = textureNodes[ name ];
-				var path = node.fileName;
+		var tag = this.reader.getIDTag();
 
-				if ( ! path ) return;
+		// inside surface node
+		if ( tag === 'FORM' ) {
 
-				var texture = this.loadTexture( path );
+			this.reader.skip( 16 );
 
-				if ( node.widthWrappingMode !== undefined ) texture.wrapS = this.getWrappingType( node.widthWrappingMode );
-				if ( node.heightWrappingMode !== undefined ) texture.wrapT = this.getWrappingType( node.heightWrappingMode );
+			this.currentNode.fileName = this.reader.getString();
 
-				switch ( name ) {
+			return;
 
-					case 'Color':
-						maps.map = texture;
-						break;
-					case 'Roughness':
-						maps.roughnessMap = texture;
-						maps.roughness = 0.5;
-						break;
-					case 'Specular':
-						maps.specularMap = texture;
-						maps.specular = 0xffffff;
-						break;
-					case 'Luminous':
-						maps.emissiveMap = texture;
-						maps.emissive = 0x808080;
-						break;
-					case 'Metallic':
-						maps.metalnessMap = texture;
-						maps.metalness = 0.5;
-						break;
-					case 'Transparency':
-					case 'Alpha':
-						maps.alphaMap = texture;
-						maps.transparent = true;
-						break;
-					case 'Normal':
-						maps.normalMap = texture;
-						if ( node.amplitude !== undefined ) maps.normalScale = new Vector2( node.amplitude, node.amplitude );
-						break;
-					case 'Bump':
-						maps.bumpMap = texture;
-						break;
+		}
 
-				}
+		// otherwise top level
+		this.reader.setOffset( this.reader.offset - 4 );
+
+		this.currentFormEnd = this.reader.offset + length;
+		this.parentForm = this.currentForm;
+
+		this.reader.skip( 8 ); // unknown
+
+		var texture = {
+			index: this.reader.getUint32()
+		};
+		this.tree.textures.push( texture );
+		this.currentForm = texture;
+
+	},
+
+	parseClipLwo2( length ) {
+
+		var texture = {
+			index: this.reader.getUint32(),
+			fileName: ""
+		};
+
+		// seach STIL block
+		while ( true ) {
+
+			var tag = this.reader.getIDTag();
+			var n_length = this.reader.getUint16();
+			if ( tag === 'STIL' ) {
+
+				texture.fileName = this.reader.getString();
+				break;
 
 			}
 
-			// LWO BSDF materials can have both spec and rough, but this is not valid in three
-			if ( maps.roughnessMap && maps.specularMap ) delete maps.specularMap;
-
-			return maps;
-
-		},
-
-		// maps can also be defined on individual material attributes, parse those here
-		// This occurs on Standard (Phong) surfaces
-		parseAttributeImageMaps( attributes, textures, maps ) {
-
-			for ( var name in attributes ) {
-
-				var attribute = attributes[ name ];
-
-				if ( attribute.maps ) {
-
-					var mapData = attribute.maps[ 0 ];
-
-					var path = this.getTexturePathByIndex( mapData.imageIndex, textures );
-					if ( ! path ) return;
-
-					var texture = this.loadTexture( path );
-
-					if ( mapData.wrap !== undefined ) texture.wrapS = this.getWrappingType( mapData.wrap.w );
-					if ( mapData.wrap !== undefined ) texture.wrapT = this.getWrappingType( mapData.wrap.h );
-
-					switch ( name ) {
-
-						case 'Color':
-							maps.map = texture;
-							break;
-						case 'Diffuse':
-							maps.aoMap = texture;
-							break;
-						case 'Roughness':
-							maps.roughnessMap = texture;
-							maps.roughness = 1;
-							break;
-						case 'Specular':
-							maps.specularMap = texture;
-							maps.specular = 0xffffff;
-							break;
-						case 'Luminosity':
-							maps.emissiveMap = texture;
-							maps.emissive = 0x808080;
-							break;
-						case 'Metallic':
-							maps.metalnessMap = texture;
-							maps.metalness = 1;
-							break;
-						case 'Transparency':
-						case 'Alpha':
-							maps.alphaMap = texture;
-							maps.transparent = true;
-							break;
-						case 'Normal':
-							maps.normalMap = texture;
-							break;
-						case 'Bump':
-							maps.bumpMap = texture;
-							break;
-
-					}
+			if ( n_length >= length ) {
 
-				}
+				break;
 
 			}
 
-		},
+		}
 
-		parseAttributes( attributes, maps ) {
+		this.tree.textures.push( texture );
+		this.currentForm = texture;
 
-			var params = {};
+	},
 
-			// don't use color data if color map is present
-			if ( attributes.Color && ! maps.map ) {
+	parseImage() {
 
-				params.color = new Color().fromArray( attributes.Color.value );
+		this.reader.skip( 8 ); // unknown
+		this.currentForm.fileName = this.reader.getString();
 
-			} else params.color = new Color();
+	},
 
+	parseXVAL( type, length ) {
 
-			if ( attributes.Transparency && attributes.Transparency.value !== 0 ) {
+		var endOffset = this.reader.offset + length - 4;
+		this.reader.skip( 8 );
 
-				params.opacity = 1 - attributes.Transparency.value;
-				params.transparent = true;
+		this.currentForm[ type ] = this.reader.getFloat32();
 
-			}
+		this.reader.setOffset( endOffset ); // set end offset directly to skip optional envelope
 
-			if ( attributes[ 'Bump Height' ] ) params.bumpScale = attributes[ 'Bump Height' ].value * 0.1;
+	},
 
-			if ( attributes[ 'Refraction Index' ] ) params.refractionRatio = 1 / attributes[ 'Refraction Index' ].value;
+	parseXVAL3( type, length ) {
 
-			this.parseStandardAttributes( params, attributes, maps );
-			this.parsePhongAttributes( params, attributes, maps );
+		var endOffset = this.reader.offset + length - 4;
+		this.reader.skip( 8 );
 
-			return params;
+		this.currentForm[ type ] = {
+			x: this.reader.getFloat32(),
+			y: this.reader.getFloat32(),
+			z: this.reader.getFloat32(),
+		};
 
-		},
+		this.reader.setOffset( endOffset );
 
-		parseStandardAttributes( params, attributes, maps ) {
+	},
 
-			if ( attributes.Luminous && attributes.Luminous.value !== 0 && attributes[ 'Luminous Color' ] ) {
+	// Tags associated with an object
+	// OTAG { type[ID4], tag-string[S0] }
+	parseObjectTag() {
 
-				var emissiveColor = attributes[ 'Luminous Color' ].value.map( function ( val ) {
+		if ( ! this.tree.objectTags ) this.tree.objectTags = {};
 
-					return val * attributes.Luminous.value;
+		this.tree.objectTags[ this.reader.getIDTag() ] = {
+			tagString: this.reader.getString()
+		};
 
-				} );
+	},
 
-				params.emissive = new Color().fromArray( emissiveColor );
+	// Signals the start of a new layer. All the data chunks which follow will be included in this layer until another layer chunk is encountered.
+	// LAYR: number[U2], flags[U2], pivot[VEC12], name[S0], parent[U2]
+	parseLayer( length ) {
 
-			}
-			if ( attributes.Roughness && ! maps.roughnessMap ) params.roughness = attributes.Roughness.value;
-			if ( attributes.Metallic && ! maps.metalnessMap ) params.metalness = attributes.Metallic.value;
+		var layer = {
+			number: this.reader.getUint16(),
+			flags: this.reader.getUint16(), // If the least significant bit of flags is set, the layer is hidden.
+			pivot: this.reader.getFloat32Array( 3 ), // Note: this seems to be superflous, as the geometry is translated when pivot is present
+			name: this.reader.getString(),
+		};
 
-		},
+		this.tree.layers.push( layer );
+		this.currentLayer = layer;
 
-		parsePhongAttributes( params, attributes, maps ) {
+		var parsedLength = 16 + stringOffset( this.currentLayer.name ); // index ( 2 ) + flags( 2 ) + pivot( 12 ) + stringlength
 
-			if ( attributes.Diffuse ) params.color.multiplyScalar( attributes.Diffuse.value );
+		// if we have not reached then end of the layer block, there must be a parent defined
+		this.currentLayer.parent = ( parsedLength < length ) ? this.reader.getUint16() : - 1; // omitted or -1 for no parent
 
-			if ( attributes.Reflection ) {
+	},
 
-				params.reflectivity = attributes.Reflection.value;
-				params.combine = AddOperation;
+	// VEC12 * ( F4 + F4 + F4 ) array of x,y,z vectors
+	// Converting from left to right handed coordinate system:
+	// x -> -x and switch material FrontSide -> BackSide
+	parsePoints( length ) {
 
-			}
+		this.currentPoints = [];
+		for ( var i = 0; i < length / 4; i += 3 ) {
 
-			if ( attributes.Luminosity && ! maps.emissiveMap ) params.emissive = new Color().setScalar( attributes.Luminosity.value );
+			// z -> -z to match three.js right handed coords
+			this.currentPoints.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
 
-			if ( attributes.Glossiness !== undefined ) params.shininess = 5 + Math.pow( attributes.Glossiness.value * 7, 6 );
+		}
 
-			// parse specular if there is no roughness - we will interpret the material as 'Phong' in this case
-			if ( ! attributes.Roughness && attributes.Specular && ! maps.specularMap ) params.specular = new Color().setScalar( attributes.Specular.value * 1.5 );
+	},
 
-		},
+	// parse VMAP or VMAD
+	// Associates a set of floating-point vectors with a set of points.
+	// VMAP: { type[ID4], dimension[U2], name[S0], ( vert[VX], value[F4] # dimension ) * }
 
-		parseEnvMap( connections, maps, attributes ) {
+	// VMAD Associates a set of floating-point vectors with the vertices of specific polygons.
+	// Similar to VMAP UVs, but associates with polygon vertices rather than points
+	// to solve to problem of UV seams:  VMAD chunks are paired with VMAPs of the same name,
+	// if they exist. The vector values in the VMAD will then replace those in the
+	// corresponding VMAP, but only for calculations involving the specified polygons.
+	// VMAD { type[ID4], dimension[U2], name[S0], ( vert[VX], poly[VX], value[F4] # dimension ) * }
+	parseVertexMapping( length, discontinuous ) {
 
-			if ( connections.envMap ) {
+		var finalOffset = this.reader.offset + length;
 
-				var envMap = this.loadTexture( connections.envMap );
+		var channelName = this.reader.getString();
 
-				if ( attributes.transparent && attributes.opacity < 0.999 ) {
+		if ( this.reader.offset === finalOffset ) {
 
-					envMap.mapping = EquirectangularRefractionMapping;
+			// then we are in a texture node and the VMAP chunk is just a reference to a UV channel name
+			this.currentForm.UVChannel = channelName;
+			return;
 
-					// Reflectivity and refraction mapping don't work well together in Phong materials
-					if ( attributes.reflectivity !== undefined ) {
+		}
 
-						delete attributes.reflectivity;
-						delete attributes.combine;
+		// otherwise reset to initial length and parse normal VMAP CHUNK
+		this.reader.setOffset( this.reader.offset - stringOffset( channelName ) );
+
+		var type = this.reader.getIDTag();
+
+		this.reader.getUint16(); // dimension
+		var name = this.reader.getString();
+
+		var remainingLength = length - 6 - stringOffset( name );
+
+		switch ( type ) {
+
+			case 'TXUV':
+				this.parseUVMapping( name, finalOffset, discontinuous );
+				break;
+			case 'MORF':
+			case 'SPOT':
+				this.parseMorphTargets( name, finalOffset, type ); // can't be discontinuous
+				break;
+			// unsupported VMAPs
+			case 'APSL':
+			case 'NORM':
+			case 'WGHT':
+			case 'MNVW':
+			case 'PICK':
+			case 'RGB ':
+			case 'RGBA':
+				this.reader.skip( remainingLength );
+				break;
+			default:
+				console.warn( 'LWOLoader: unknown vertex map type: ' + type );
+				this.reader.skip( remainingLength );
 
-					}
+		}
 
-					if ( attributes.metalness !== undefined ) {
+	},
 
-						delete attributes.metalness;
+	parseUVMapping( name, finalOffset, discontinuous ) {
 
-					}
+		var uvIndices = [];
+		var polyIndices = [];
+		var uvs = [];
 
-				} else envMap.mapping = EquirectangularReflectionMapping;
+		while ( this.reader.offset < finalOffset ) {
 
-				maps.envMap = envMap;
+			uvIndices.push( this.reader.getVariableLengthIndex() );
 
-			}
+			if ( discontinuous ) polyIndices.push( this.reader.getVariableLengthIndex() );
 
-		},
+			uvs.push( this.reader.getFloat32(), this.reader.getFloat32() );
 
-		// get texture defined at top level by its index
-		getTexturePathByIndex( index ) {
+		}
 
-			var fileName = '';
+		if ( discontinuous ) {
 
-			if ( ! lwoTree.textures ) return fileName;
+			if ( ! this.currentLayer.discontinuousUVs ) this.currentLayer.discontinuousUVs = {};
 
-			lwoTree.textures.forEach( function ( texture ) {
+			this.currentLayer.discontinuousUVs[ name ] = {
+				uvIndices: uvIndices,
+				polyIndices: polyIndices,
+				uvs: uvs,
+			};
 
-				if ( texture.index === index ) fileName = texture.fileName;
+		} else {
 
-			} );
+			if ( ! this.currentLayer.uvs ) this.currentLayer.uvs = {};
 
-			return fileName;
+			this.currentLayer.uvs[ name ] = {
+				uvIndices: uvIndices,
+				uvs: uvs,
+			};
 
-		},
+		}
 
-		loadTexture( path ) {
+	},
 
-			if ( ! path ) return null;
+	parseMorphTargets( name, finalOffset, type ) {
 
-			return this.textureLoader.load( this.cleanPath( path ) );
+		var indices = [];
+		var points = [];
 
-		},
+		type = ( type === 'MORF' ) ? 'relative' : 'absolute';
 
-		// Lightwave expects textures to be in folder called Images relative
-		// to the model
-		// Otherwise, the full absolute path is stored: D://some_directory/textures/bumpMap.png
-		// In this case, we'll strip out everything and load 'bumpMap.png' from the same directory as the model
-		cleanPath( path ) {
+		while ( this.reader.offset < finalOffset ) {
 
-			if ( path.toLowerCase().indexOf( 'images' ) === 0 ) return './' + path;
-			return path.split( '/' ).pop().split( '\\' ).pop();
+			indices.push( this.reader.getVariableLengthIndex() );
+			// z -> -z to match three.js right handed coords
+			points.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
 
-		},
+		}
 
-		// 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
-		getWrappingType( num ) {
+		if ( ! this.currentLayer.morphTargets ) this.currentLayer.morphTargets = {};
 
-			switch ( num ) {
+		this.currentLayer.morphTargets[ name ] = {
+			indices: indices,
+			points: points,
+			type: type,
+		};
 
-				case 0:
-					console.warn( 'LWOLoader: "Reset" texture wrapping type is not supported in three.js' );
-					return ClampToEdgeWrapping;
-				case 1: return RepeatWrapping;
-				case 2: return MirroredRepeatWrapping;
-				case 3: return ClampToEdgeWrapping;
+	},
 
-			}
+	// A list of polygons for the current layer.
+	// POLS { type[ID4], ( numvert+flags[U2], vert[VX] # numvert ) * }
+	parsePolygonList( length ) {
 
-		},
+		var finalOffset = this.reader.offset + length;
+		var type = this.reader.getIDTag();
 
-		getType( nodeData ) {
+		var indices = [];
 
-			if ( nodeData.roughness ) return 'Standard';
-			return 'Phong';
+		// hold a list of polygon sizes, to be split up later
+		var polygonDimensions = [];
 
-		},
+		while ( this.reader.offset < finalOffset ) {
 
-	};
+			var numverts = this.reader.getUint16();
 
-	function GeometryParser() {}
+			//var flags = numverts & 64512; // 6 high order bits are flags - ignoring for now
+			numverts = numverts & 1023; // remaining ten low order bits are vertex num
+			polygonDimensions.push( numverts );
 
-	GeometryParser.prototype = {
+			for ( var j = 0; j < numverts; j ++ ) indices.push( this.reader.getVariableLengthIndex() );
 
-		constructor: GeometryParser,
+		}
 
-		parse( geoData, layer ) {
+		var geometryData = {
+			type: type,
+			vertexIndices: indices,
+			polygonDimensions: polygonDimensions,
+			points: this.currentPoints
+		};
 
-			var geometry = new BufferGeometry();
+		// Note: assuming that all polys will be lines or points if the first is
+		if ( polygonDimensions[ 0 ] === 1 ) geometryData.type = 'points';
+		else if ( polygonDimensions[ 0 ] === 2 ) geometryData.type = 'lines';
 
-			geometry.addAttribute( 'position', new Float32BufferAttribute( geoData.points, 3 ) );
+		this.currentLayer.geometry = geometryData;
 
-			var indices = this.splitIndices( geoData.vertexIndices, geoData.polygonDimensions );
-			geometry.setIndex( indices );
+	},
 
-			this.parseGroups( geometry, geoData );
+	// Lists the tag strings that can be associated with polygons by the PTAG chunk.
+	// TAGS { tag-string[S0] * }
+	parseTagStrings( length ) {
 
-			geometry.computeVertexNormals();
+		this.tree.tags = this.reader.getStringArray( length );
 
-			this.parseUVs( geometry, layer, indices );
-			this.parseMorphTargets( geometry, layer, indices );
+	},
 
-			// TODO: z may need to be reversed to account for coordinate system change
-			geometry.translate( - layer.pivot[ 0 ], - layer.pivot[ 1 ], - layer.pivot[ 2 ] );
+	// Associates tags of a given type with polygons in the most recent POLS chunk.
+	// PTAG { type[ID4], ( poly[VX], tag[U2] ) * }
+	parsePolygonTagMapping( length ) {
 
-			// var userData = geometry.userData;
-			// geometry = geometry.toNonIndexed()
-			// geometry.userData = userData;
+		var finalOffset = this.reader.offset + length;
+		var type = this.reader.getIDTag();
+		if ( type === 'SURF' ) this.parseMaterialIndices( finalOffset );
+		else { //PART, SMGP, COLR not supported
 
-			return geometry;
+			this.reader.skip( length - 4 );
 
-		},
+		}
 
-		// split quads into tris
-		splitIndices( indices, polygonDimensions ) {
+	},
 
-			var remappedIndices = [];
+	parseMaterialIndices( finalOffset ) {
 
-			var i = 0;
-			polygonDimensions.forEach( function ( dim ) {
+		// array holds polygon index followed by material index
+		this.currentLayer.geometry.materialIndices = [];
 
-				if ( dim < 4 ) {
+		while ( this.reader.offset < finalOffset ) {
 
-					for ( var k = 0; k < dim; k ++ ) remappedIndices.push( indices[ i + k ] );
+			var polygonIndex = this.reader.getVariableLengthIndex();
+			var materialIndex = this.reader.getUint16();
 
-				} else if ( dim === 4 ) {
+			this.currentLayer.geometry.materialIndices.push( polygonIndex, materialIndex );
 
-					remappedIndices.push(
-						indices[ i ],
-						indices[ i + 1 ],
-						indices[ i + 2 ],
+		}
 
-						indices[ i ],
-						indices[ i + 2 ],
-						indices[ i + 3 ]
+	},
 
-					);
+	parseUnknownCHUNK( blockID, length ) {
 
-				} else if ( dim > 4 ) {
+		console.warn( 'LWOLoader: unknown chunk type: ' + blockID + ' length: ' + length );
 
-					for ( var k = 1; k < dim - 1; k ++ ) {
+		// print the chunk plus some bytes padding either side
+		// printBuffer( this.reader.dv.buffer, this.reader.offset - 20, length + 40 );
 
-						remappedIndices.push( indices[ i ], indices[ i + k ], indices[ i + k + 1 ] );
+		var data = this.reader.getString( length );
 
-					}
+		this.currentForm[ blockID ] = data;
 
-					console.warn( 'LWOLoader: polygons with greater than 4 sides are not supported' );
+	}
 
-				}
+};
 
-				i += dim;
+function DataViewReader( buffer ) {
 
-			} );
+	this.dv = new DataView( buffer );
+	this.offset = 0;
 
-			return remappedIndices;
+}
 
-		},
+DataViewReader.prototype = {
 
-		// NOTE: currently ignoring poly indices and assuming that they are intelligently ordered
-		parseGroups( geometry, geoData ) {
+	constructor: DataViewReader,
 
-			var tags = lwoTree.tags;
-			var matNames = [];
+	size: function () {
 
-			var elemSize = 3;
-			if ( geoData.type === 'lines' ) elemSize = 2;
-			if ( geoData.type === 'points' ) elemSize = 1;
+		return this.dv.buffer.byteLength;
 
-			var remappedIndices = this.splitMaterialIndices( geoData.polygonDimensions, geoData.materialIndices );
+	},
 
-			var indexNum = 0; // create new indices in numerical order
-			var indexPairs = {}; // original indices mapped to numerical indices
+	setOffset( offset ) {
 
-			var prevMaterialIndex;
+		if ( offset > 0 && offset < this.dv.buffer.byteLength ) {
 
-			var prevStart = 0;
-			var currentCount = 0;
+			this.offset = offset;
 
-			for ( var i = 0; i < remappedIndices.length; i += 2 ) {
+		} else {
 
-				var materialIndex = remappedIndices[ i + 1 ];
+			console.error( 'LWOLoader: invalid buffer offset' );
 
-				if ( i === 0 ) matNames[ indexNum ] = tags[ materialIndex ];
+		}
 
-				if ( prevMaterialIndex === undefined ) prevMaterialIndex = materialIndex;
+	},
 
-				if ( materialIndex !== prevMaterialIndex ) {
+	endOfFile: function () {
 
-					var currentIndex;
-					if ( indexPairs[ tags[ prevMaterialIndex ] ] ) {
+		if ( this.offset >= this.size() ) return true;
+		return false;
 
-						currentIndex = indexPairs[ tags[ prevMaterialIndex ] ];
+	},
 
-					} else {
+	skip: function ( length ) {
 
-						currentIndex = indexNum;
-						indexPairs[ tags[ prevMaterialIndex ] ] = indexNum;
-						matNames[ indexNum ] = tags[ prevMaterialIndex ];
-						indexNum ++;
+		this.offset += length;
 
-					}
+	},
 
-					geometry.addGroup( prevStart, currentCount, currentIndex );
+	getUint8: function () {
 
-					prevStart += currentCount;
+		var value = this.dv.getUint8( this.offset );
+		this.offset += 1;
+		return value;
 
-					prevMaterialIndex = materialIndex;
-					currentCount = 0;
+	},
 
-				}
+	getUint16: function () {
 
-				currentCount += elemSize;
+		var value = this.dv.getUint16( this.offset );
+		this.offset += 2;
+		return value;
 
-			}
+	},
 
-			// the loop above doesn't add the last group, do that here.
-			if ( geometry.groups.length > 0 ) {
+	getInt32: function () {
 
-				var currentIndex;
-				if ( indexPairs[ tags[ materialIndex ] ] ) {
+		var value = this.dv.getInt32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-					currentIndex = indexPairs[ tags[ materialIndex ] ];
+	},
 
-				} else {
+	getUint32: function () {
 
-					currentIndex = indexNum;
-					indexPairs[ tags[ materialIndex ] ] = indexNum;
-					matNames[ indexNum ] = tags[ materialIndex ];
+		var value = this.dv.getUint32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-				}
+	},
 
-				geometry.addGroup( prevStart, currentCount, currentIndex );
+	getUint64: function () {
 
-			}
+		var low, high;
 
-			// Mat names from TAGS chunk, used to build up an array of materials for this geometry
-			geometry.userData.matNames = matNames;
+		high = this.getUint32();
+		low = this.getUint32();
+		return high * 0x100000000 + low;
 
-		},
+	},
 
-		splitMaterialIndices( polygonDimensions, indices ) {
+	getFloat32: function () {
 
-			var remappedIndices = [];
+		var value = this.dv.getFloat32( this.offset, false );
+		this.offset += 4;
+		return value;
 
-			polygonDimensions.forEach( function ( dim, i ) {
+	},
 
-				if ( dim <= 3 ) {
+	getFloat32Array: function ( size ) {
 
-					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
+		var a = [];
 
-				} else if ( dim === 4 ) {
+		for ( var i = 0; i < size; i ++ ) {
 
-					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ], indices[ i * 2 ], indices[ i * 2 + 1 ] );
+			a.push( this.getFloat32() );
 
-				} else {
+		}
 
-					 // ignore > 4 for now
-					for ( var k = 0; k < dim - 2; k ++ ) {
+		return a;
 
-						remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
+	},
 
-					}
+	getFloat64: function () {
 
-				}
+		var value = this.dv.getFloat64( this.offset, this.littleEndian );
+		this.offset += 8;
+		return value;
 
-			} );
+	},
 
-			return remappedIndices;
+	getFloat64Array: function ( size ) {
 
-		},
+		var a = [];
 
-		// UV maps:
-		// 1: are defined via index into an array of points, not into a geometry
-		// - the geometry is also defined by an index into this array, but the indexes may not match
-		// 2: there can be any number of UV maps for a single geometry. Here these are combined,
-		// 	with preference given to the first map encountered
-		// 3: UV maps can be partial - that is, defined for only a part of the geometry
-		// 4: UV maps can be VMAP or VMAD (discontinuous, to allow for seams). In practice, most
-		// UV maps are defined as partially VMAP and partially VMAD
-		// VMADs are currently not supported
-		parseUVs( geometry, layer ) {
+		for ( var i = 0; i < size; i ++ ) {
 
-			// start by creating a UV map set to zero for the whole geometry
-			var remappedUVs = Array.from( Array( geometry.attributes.position.count * 2 ), function () {
+			a.push( this.getFloat64() );
 
-				return 0;
+		}
 
-			} );
+		return a;
 
-			for ( var name in layer.uvs ) {
+	},
 
-				var uvs = layer.uvs[ name ].uvs;
-				var uvIndices = layer.uvs[ name ].uvIndices;
+	// get variable-length index data type
+	// VX ::= index[U2] | (index + 0xFF000000)[U4]
+	// If the index value is less than 65,280 (0xFF00),then VX === U2
+	// otherwise VX === U4 with bits 24-31 set
+	// When reading an index, if the first byte encountered is 255 (0xFF), then
+	// the four-byte form is being used and the first byte should be discarded or masked out.
+	getVariableLengthIndex() {
 
-				uvIndices.forEach( function ( i, j ) {
+		var firstByte = this.getUint8();
 
-					remappedUVs[ i * 2 ] = uvs[ j * 2 ];
-					remappedUVs[ i * 2 + 1 ] = uvs[ j * 2 + 1 ];
+		if ( firstByte === 255 ) {
 
-				} );
+			return this.getUint8() * 65536 + this.getUint8() * 256 + this.getUint8();
 
-			}
+		}
 
-			geometry.addAttribute( 'uv', new Float32BufferAttribute( remappedUVs, 2 ) );
+		return firstByte * 256 + this.getUint8();
 
-		},
+	},
 
-		parseMorphTargets( geometry, layer ) {
+	// An ID tag is a sequence of 4 bytes containing 7-bit ASCII values
+	getIDTag() {
 
-			var num = 0;
-			for ( var name in layer.morphTargets ) {
+		return this.getString( 4 );
 
-				var remappedPoints = geometry.attributes.position.array.slice();
+	},
 
-				if ( ! geometry.morphAttributes.position ) geometry.morphAttributes.position = [];
+	getString: function ( size ) {
 
-				var morphPoints = layer.morphTargets[ name ].points;
-				var morphIndices = layer.morphTargets[ name ].indices;
-				var type = layer.morphTargets[ name ].type;
+		if ( size === 0 ) return;
 
-				morphIndices.forEach( function ( i, j ) {
+		// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
+		var a = [];
 
-					if ( type === 'relative' ) {
+		if ( size ) {
 
-						remappedPoints[ i * 3 ] += morphPoints[ j * 3 ];
-						remappedPoints[ i * 3 + 1 ] += morphPoints[ j * 3 + 1 ];
-						remappedPoints[ i * 3 + 2 ] += morphPoints[ j * 3 + 2 ];
+			for ( var i = 0; i < size; i ++ ) {
 
-					} else {
+				a[ i ] = this.getUint8();
 
-						remappedPoints[ i * 3 ] = morphPoints[ j * 3 ];
-						remappedPoints[ i * 3 + 1 ] = morphPoints[ j * 3 + 1 ];
-						remappedPoints[ i * 3 + 2 ] = morphPoints[ j * 3 + 2 ];
+			}
 
-					}
+		} else {
 
-				} );
+			var currentChar;
+			var len = 0;
 
-				geometry.morphAttributes.position[ num ] = new Float32BufferAttribute( remappedPoints, 3 );
-				geometry.morphAttributes.position[ num ].name = name;
+			while ( currentChar !== 0 ) {
 
-				num ++;
+				currentChar = this.getUint8();
+				if ( currentChar !== 0 ) a.push( currentChar );
+				len ++;
 
 			}
 
-		},
-
-	};
-
-	// parse data from the IFF buffer.
-	// LWO3 files are in IFF format and can contain the following data types, referred to by shorthand codes
-	//
-	// ATOMIC DATA TYPES
-	// ID Tag - 4x 7 bit uppercase ASCII chars: ID4
-	// signed integer, 1, 2, or 4 byte length: I1, I2, I4
-	// unsigned integer, 1, 2, or 4 byte length: U1, U2, U4
-	// float, 4 byte length: F4
-	// string, series of ASCII chars followed by null byte (If the length of the string including the null terminating byte is odd, an extra null is added so that the data that follows will begin on an even byte boundary): S0
-	//
-	//  COMPOUND DATA TYPES
-	// Variable-length Index (index into an array or collection): U2 or U4 : VX
-	// Color (RGB): F4 + F4 + F4: COL12
-	// Coordinate (x, y, z): F4 + F4 + F4: VEC12
-	// Percentage F4 data type from 0->1 with 1 = 100%: FP4
-	// Angle in radian F4: ANG4
-	// Filename (string) S0: FNAM0
-	// XValue F4 + index (VX) + optional envelope( ENVL ): XVAL
-	// XValue vector VEC12 + index (VX) + optional envelope( ENVL ): XVAL3
-	//
-	// The IFF file is arranged in chunks:
-	// CHUNK = ID4 + length (U4) + length X bytes of data + optional 0 pad byte
-	// optional 0 pad byte is there to ensure chunk ends on even boundary, not counted in size
-
-	// Chunks are combined in Forms (collections of chunks)
-	// FORM = string 'FORM' (ID4) + length (U4) + type (ID4) + optional ( CHUNK | FORM )
-
-	// CHUNKS and FORMS are collectively referred to as blocks
-
-	// The entire file is contained in one top level FORM
-	function IFFParser() {}
-
-	IFFParser.prototype = {
-
-		constructor: IFFParser,
-
-		parse: function ( buffer ) {
-
-			// dump the whole buffer as a string for testing
-			// printBuffer( buffer );
-
-			this.reader = new DataViewReader( buffer );
-
-			this.tree = {
-				materials: {},
-				layers: [],
-				tags: [],
-				textures: [],
-			};
+			if ( ! isEven( len + 1 ) ) this.getUint8(); // if string with terminating nullbyte is uneven, extra nullbyte is added
 
-			// start out at the top level to add any data before first layer is encountered
-			this.currentLayer = this.tree;
-			this.currentForm = this.tree;
+		}
 
-			// parse blocks until end of file is reached
-			while ( ! this.reader.endOfFile() ) this.parseBlock();
+		return LoaderUtils.decodeText( new Uint8Array( a ) );
 
-			return this.tree;
+	},
 
-		},
+	getStringArray: function ( size ) {
 
-		parseBlock() {
+		var a = this.getString( size );
+		a = a.split( '\0' );
 
-			var blockID = this.reader.getIDTag();
-			var length = this.reader.getUint32(); // size of data in bytes
-			if ( this.tree.format === 'LWO2' && length > this.reader.dv.byteLength - this.reader.offset ) {
+		return a.filter( Boolean ); // return array with any empty strings removed
 
-				this.reader.offset -= 4;
-				length = this.reader.getUint16();
+	}
 
-			}
+};
 
+// ************** DEBUGGER  **************
 
-			// Data types may be found in either LWO2 OR LWO3 spec
-			switch ( blockID ) {
+function Debugger( ) {
 
-				case 'FORM': // form blocks may consist of sub -chunks or sub-forms
-					this.parseForm( length );
-					break;
+	this.active = false;
+	this.depth = 0;
+	this.formList = [];
 
-					// SKIPPED CHUNKS
-
-				// MISC skipped
-				case 'ICON': // Thumbnail Icon Image
-				case 'VMPA': // Vertex Map Parameter
-				case 'BBOX': // bounding box
-				// case 'VMMD':
-				// case 'VTYP':
-
-				// normal maps can be specified, normally on models imported from other applications. Currently ignored
-				case 'NORM':
-
-				// ENVL FORM skipped
-				case 'PRE ':
-				case 'POST':
-				case 'KEY ':
-				case 'SPAN':
-
-				// CLIP FORM skipped
-				case 'TIME':
-				case 'CLRS':
-				case 'CLRA':
-				case 'FILT':
-				case 'DITH':
-				case 'CONT':
-				case 'BRIT':
-				case 'SATR':
-				case 'HUE ':
-				case 'GAMM':
-				case 'NEGA':
-				case 'IFLT':
-				case 'PFLT':
-
-				// Image Map Layer skipped
-				case 'PROJ':
-				case 'AXIS':
-				case 'AAST':
-				case 'PIXB':
-				case 'STCK':
-
-				// Procedural Textures skipped
-				case 'VALU':
-
-				// Gradient Textures skipped
-				case 'PNAM':
-				case 'INAM':
-				case 'GRST':
-				case 'GREN':
-				case 'GRPT':
-				case 'FKEY':
-				case 'IKEY':
-
-				// Texture Mapping Form skipped
-				case 'CSYS':
-
-					// Surface CHUNKs skipped
-				case 'OPAQ': // top level 'opacity' checkbox
-				case 'CMAP': // clip map
-
-				// Surface node CHUNKS skipped
-				// These mainly specify the node editor setup in LW
-				case 'NLOC':
-				case 'NZOM':
-				case 'NVER':
-				case 'NSRV':
-				case 'NCRD':
-				case 'NMOD':
-				case 'NPRW':
-				case 'NPLA':
-				case 'VERS':
-				case 'ENUM':
-				case 'TAG ':
-
-				// Car Material CHUNKS
-				case 'CGMD':
-				case 'CGTY':
-				case 'CGST':
-				case 'CGEN':
-				case 'CGTS':
-				case 'CGTE':
-				case 'OSMP':
-				case 'OMDE':
-				case 'OUTR':
-					this.reader.skip( length );
-					break;
+}
 
-				case 'FLAG':
-					if ( this.tree.format === 'LWO2' ) {
+Debugger.prototype = {
 
-						this.reader.skip( 4 ); // not suported
+	constructor: Debugger,
 
-					} else {
+	enable: function () {
 
-						this.reader.skip( length );
+		this.active = true;
 
-					}
-					break;
-				// Skipped LWO2 chunks
-				case 'DIFF': // diffuse level, may be necessary to modulate COLR with this
-					this.currentSurface.diffusePower = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
-				case 'TRNL':
-				case 'REFL':
-				case 'GLOS':
-				case 'SHRP':
-				case 'RFOP':
-				case 'RSAN':
-				case 'TROP':
-				case 'RBLR':
-				case 'TBLR':
-				case 'CLRH':
-				case 'CLRF':
-				case 'ADTR':
-				case 'GLOW':
-				case 'LINE':
-				case 'ALPH':
-				case 'VCOL':
-				case 'ENAB':
-					this.reader.skip( length );
-					break;
-				case 'SURF':
-					if ( this.tree.format === 'LWO2' ) {
+	},
 
-						this.parseSurfaceLwo2( length );
+	log: function () {
 
-					}
-					break;
-				case 'CLIP':
-					if ( this.tree.format === 'LWO2' ) {
+		if ( ! this.active ) return;
 
-						this.parseClipLwo2( length );
+		var nodeType;
 
-					}
-					break;
-				// Texture node chunks (not in spec)
-				case 'IPIX': // usePixelBlending
-				case 'IMIP': // useMipMaps
-				case 'IMOD': // imageBlendingMode
-				case 'AMOD': // unknown
-				case 'IINV': // imageInvertAlpha
-				case 'INCR': // imageInvertColor
-				case 'IAXS': // imageAxis ( for non-UV maps)
-				case 'IFOT': // imageFallofType
-				case 'ITIM': // timing for animated textures
-				case 'IWRL':
-				case 'IUTI':
-				case 'IINX':
-				case 'IINY':
-				case 'IINZ':
-				case 'IREF': // possibly a VX for reused texture nodes
-					if ( length === 4 ) this.currentNode[ blockID ] = this.reader.getInt32();
-					else this.reader.skip( length );
-					break;
+		switch ( this.node ) {
 
-				case 'OTAG':
-					this.parseObjectTag();
-					break;
+			case 0:
+				nodeType = "FORM";
+				break;
 
-				case 'LAYR':
-					this.parseLayer( length );
-					break;
+			case 1:
+				nodeType = "CHK";
+				break;
 
-				case 'PNTS':
-					this.parsePoints( length );
-					break;
+			case 2:
+				nodeType = "S-CHK";
+				break;
 
-				case 'VMAP':
-					this.parseVertexMapping( length );
-					break;
+		}
 
-				case 'POLS':
-					this.parsePolygonList( length );
-					break;
+		console.log(
+			"| ".repeat( this.depth ) +
+			nodeType,
+			this.nodeID,
+			`( ${this.offset} ) -> ( ${this.dataOffset + this.length} )`,
+			( ( this.node == 0 ) ? " {" : "" ),
+			( ( this.skipped ) ? "SKIPPED" : "" ),
+			( ( this.node == 0 && this.skipped ) ? "}" : "" )
+		);
 
-				case 'TAGS':
-					this.parseTagStrings( length );
-					break;
+		if ( this.node == 0 && ! this.skipped ) {
 
-				case 'PTAG':
-					this.parsePolygonTagMapping( length );
-					break;
+			this.depth += 1;
+			this.formList.push( this.dataOffset + this.length );
 
-				case 'VMAD':
-					this.parseVertexMapping( length, true );
-					break;
+		}
 
-				// Misc CHUNKS
-				case 'DESC': // Description Line
-					this.currentForm.description = this.reader.getString();
-					break;
+		this.skipped = false;
 
-				case 'TEXT':
-				case 'CMNT':
-				case 'NCOM':
-					this.currentForm.comment = this.reader.getString();
-					break;
+	},
 
-					// Envelope Form
-				case 'NAME':
-					this.currentForm.channelName = this.reader.getString();
-					break;
+	closeForms: function () {
 
-					// Image Map Layer
+		if ( ! this.active ) return;
 
-				case 'WRAP':
-					this.currentForm.wrap = { w: this.reader.getUint16(), h: this.reader.getUint16() };
-					break;
+		for ( var i = this.formList.length - 1; i >= 0; i -- ) {
 
-				case 'IMAG':
-					var index = this.reader.getVariableLengthIndex();
-					this.currentForm.imageIndex = index;
-					break;
+			if ( this.offset >= this.formList[ i ] ) {
 
-					// Texture Mapping Form
+				this.depth -= 1;
+				console.log( "| ".repeat( this.depth ) + "}" );
+				this.formList.splice( - 1, 1 );
 
-				case 'OREF':
-					this.currentForm.referenceObject = this.reader.getString();
-					break;
+			}
 
-				case 'ROID':
-					this.currentForm.referenceObjectID = this.reader.getUint32();
-					break;
+		}
 
-					// Surface Blocks
+	}
 
-				case 'SSHN':
-					this.currentSurface.surfaceShaderName = this.reader.getString();
-					break;
+};
 
-				case 'AOVN':
-					this.currentSurface.surfaceCustomAOVName = this.reader.getString();
-					break;
+// ************** UTILITY FUNCTIONS **************
 
-					// Nodal Blocks
+function isEven( num ) {
 
-				case 'NSTA':
-					this.currentForm.disabled = this.reader.getUint16();
-					break;
+	return num % 2;
 
-				case 'NRNM':
-					this.currentForm.realName = this.reader.getString();
-					break;
+}
 
-				case 'NNME':
-					this.currentForm.refName = this.reader.getString();
-					this.currentSurface.nodes[ this.currentForm.refName ] = this.currentForm;
-					break;
+// calculate the length of the string in the buffer
+// this will be string.length + nullbyte + optional padbyte to make the length even
+function stringOffset( string ) {
 
-				// Nodal Blocks : connections
-				case 'INME':
-					if ( ! this.currentForm.nodeName ) this.currentForm.nodeName = [];
-					this.currentForm.nodeName.push( this.reader.getString() );
-					break;
+	return string.length + 1 + ( isEven( string.length + 1 ) ? 1 : 0 );
 
-				case 'IINN':
-					if ( ! this.currentForm.inputNodeName ) this.currentForm.inputNodeName = [];
-					this.currentForm.inputNodeName.push( this.reader.getString() );
-					break;
+}
 
-				case 'IINM':
-					if ( ! this.currentForm.inputName ) this.currentForm.inputName = [];
-					this.currentForm.inputName.push( this.reader.getString() );
-					break;
+// for testing purposes, dump buffer to console
+// printBuffer( this.reader.dv.buffer, this.reader.offset, length );
+function printBuffer( buffer, from, to ) {
 
-				case 'IONM':
-					if ( ! this.currentForm.inputOutputName ) this.currentForm.inputOutputName = [];
-					this.currentForm.inputOutputName.push( this.reader.getString() );
-					break;
+	console.log( LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) ) );
 
-				case 'FNAM':
-					this.currentForm.fileName = this.reader.getString();
-					break;
+}
 
-				case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
-					if ( length === 4 ) this.currentForm.textureChannel = this.reader.getIDTag();
-					else this.reader.skip( length );
-					break;
+var lwoTree;
 
-					// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
+var LWOLoader = function ( manager, parameters ) {
 
-				case 'SMAN':
-					var maxSmoothingAngle = this.reader.getFloat32();
-					this.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
-					break;
+	this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager;
 
-				// LWO2: Basic Surface Parameters
-				case 'COLR':
-					this.currentSurface.attributes.Color = {};
-					this.currentSurface.attributes.Color.value = this.reader.getFloat32Array( 3 );
-					this.reader.skip( 2 ); // VX: envelope
-					break;
+	parameters = parameters || {};
 
-				case 'LUMI':
-					this.currentSurface.attributes.luminosityLevel = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+	this.resourcePath = ( parameters.resourcePath !== undefined ) ? parameters.resourcePath : undefined;
 
-				case 'SPEC':
-					this.currentSurface.attributes.specularLevel = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+};
 
-				case 'TRAN':
-					this.currentSurface.attributes.opacity = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+LWOLoader.prototype = {
 
-				case 'BUMP':
-					this.currentSurface.attributes.bumpStrength = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+	constructor: LWOLoader,
 
-				case 'SIDE':
-					this.currentSurface.attributes.side = this.reader.getUint16();
-					break;
+	crossOrigin: 'anonymous',
 
-				case 'RIMG':
-					this.currentSurface.attributes.reflectionMap = this.reader.getVariableLengthIndex();
-					break;
+	load: function ( url, onLoad, onProgress, onError ) {
 
-				case 'RIND':
-					this.currentSurface.attributes.refractiveIndex = this.reader.getFloat32();
-					this.reader.skip( 2 );
-					break;
+		var self = this;
 
-				case 'TIMG':
-					this.currentSurface.attributes.refractionMap = this.reader.getVariableLengthIndex();
-					break;
+		var path = ( self.path === undefined ) ? extractParentUrl( url, 'Objects' ) : self.path;
 
-				case 'IMAP':
-					if ( this.tree.format === 'LWO2' ) {
+		// give the mesh a default name based on the filename
+		var modelName = url.split( path ).pop().split( '.' )[ 0 ];
 
-						this.reader.skip( 2 );
+		var loader = new FileLoader( this.manager );
+		loader.setPath( self.path );
+		loader.setResponseType( 'arraybuffer' );
 
-					} else {
+		loader.load( url, function ( buffer ) {
 
-						this.currentSurface.attributes.imageMapIndex = this.reader.getUint32();
+			// console.time( 'Total parsing: ' );
+			onLoad( self.parse( buffer, path, modelName ) );
+			// console.timeEnd( 'Total parsing: ' );
 
-					}
-					break;
+		}, onProgress, onError );
 
-				case 'IUVI': // uv channel name
-					this.currentNode.UVChannel = this.reader.getString( length );
-					break;
+	},
 
-				case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
-					this.currentNode.widthWrappingMode = this.reader.getUint32();
-					break;
-				case 'IVTL': // heightWrappingMode
-					this.currentNode.heightWrappingMode = this.reader.getUint32();
-					break;
+	setCrossOrigin: function ( value ) {
 
-				// LWO2 USE
-				case 'BLOK':
-					// skip
-					break;
+		this.crossOrigin = value;
+		return this;
 
-				default:
-					this.parseUnknownCHUNK( blockID, length );
+	},
 
-			}
+	setPath: function ( value ) {
 
-			if ( this.reader.offset >= this.currentFormEnd ) {
+		this.path = value;
+		return this;
 
-				this.currentForm = this.parentForm;
+	},
 
-			}
+	setResourcePath: function ( value ) {
 
-		},
-
-
-		///
-		// FORM PARSING METHODS
-		///
-
-		// Forms are organisational and can contain any number of sub chunks and sub forms
-		// FORM ::= 'FORM'[ID4], length[U4], type[ID4], ( chunk[CHUNK] | form[FORM] ) * }
-		parseForm( length ) {
-
-			var type = this.reader.getIDTag();
-
-			switch ( type ) {
-
-				// SKIPPED FORMS
-				// if skipForm( length ) is called, the entire form and any sub forms and chunks are skipped
-
-				case 'ISEQ': // Image sequence
-				case 'ANIM': // plug in animation
-				case 'STCC': // Color-cycling Still
-				case 'VPVL':
-				case 'VPRM':
-				case 'NROT':
-				case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
-				case 'WRPH': // image wrap h
-				case 'FUNC':
-				case 'FALL':
-				case 'OPAC':
-				case 'GRAD': // gradient texture
-				case 'ENVS':
-				case 'VMOP':
-				case 'VMBG':
-
-				// Car Material FORMS
-				case 'OMAX':
-				case 'STEX':
-				case 'CKBG':
-				case 'CKEY':
-				case 'VMLA':
-				case 'VMLB':
-					this.skipForm( length ); // not currently supported
-					break;
+		this.resourcePath = value;
+		return this;
 
-				// if break; is called directly, the position in the lwoTree is not created
-				// any sub chunks and forms are added to the parent form instead
-				case 'META':
-				case 'NNDS':
-				case 'NODS':
-				case 'NDTA':
-				case 'ADAT':
-				case 'AOVS':
-				case 'BLOK':
-
-				// used by texture nodes
-				case 'IBGC': // imageBackgroundColor
-				case 'IOPC': // imageOpacity
-				case 'IIMG': // hold reference to image path
-				case 'TXTR':
-					// this.setupForm( type, length );
-					break;
+	},
 
-				case 'IFAL': // imageFallof
-				case 'ISCL': // imageScale
-				case 'IPOS': // imagePosition
-				case 'IROT': // imageRotation
-				case 'IBMP':
-				case 'IUTD':
-				case 'IVTD':
-					this.parseTextureNodeAttribute( type );
-					break;
+	parse: function ( iffBuffer, path, modelName ) {
 
-				case 'LWO2':
-					this.tree.format = type;
-					break;
+		lwoTree = new IFFParser().parse( iffBuffer );
 
-				case 'LWO3':
-					this.tree.format = type;
-					break;
+		// console.log( 'lwoTree', lwoTree );
 
-				case 'ENVL':
-					this.parseEnvelope( length );
-					break;
+		var textureLoader = new TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
 
-					// CLIP FORM AND SUB FORMS
+		return new LWOTreeParser( textureLoader ).parse( modelName );
 
-				case 'CLIP':
-					if ( this.tree.format === 'LWO2' ) {
+	}
 
-						this.parseForm( length );
+};
 
-					} else {
+// Parse the lwoTree object
+function LWOTreeParser( textureLoader ) {
 
-						this.parseClip( length );
+	this.textureLoader = textureLoader;
 
-					}
-					break;
+}
 
-				case 'STIL':
-					this.parseImage();
-					break;
+LWOTreeParser.prototype = {
 
-				case 'XREF': // clone of another STIL
-					this.reader.skip( 8 ); // unknown
-					this.currentForm.referenceTexture = {
-						index: this.reader.getUint32(),
-						refName: this.reader.getString() // internal unique ref
-					};
-					break;
+	constructor: LWOTreeParser,
 
-					// Not in spec, used by texture nodes
+	parse: function ( modelName ) {
 
-				case 'IMST':
-					this.parseImageStateForm( length );
-					break;
+		this.materials = new MaterialParser( this.textureLoader ).parse();
+		this.defaultLayerName = modelName;
 
-					// SURF FORM AND SUB FORMS
+		this.meshes = this.parseLayers();
 
-				case 'SURF':
-					this.parseSurfaceForm( length );
-					break;
+		return {
+			materials: this.materials,
+			meshes: this.meshes,
+		};
 
-				case 'VALU': // Not in spec
-					this.parseValueForm( length );
-					break;
+	},
 
-				case 'NTAG':
-					this.parseSubNode( length );
-					break;
+	parseLayers() {
 
-				case 'ATTR': // BSDF Node Attributes
-				case 'SATR': // Standard Node Attributes
-					this.setupForm( 'attributes', length );
-					break;
+		// array of all meshes for building hierarchy
+		var meshes = [];
 
-				case 'NCON':
-					this.parseConnections( length );
-					break;
+		// final array containing meshes with scene graph hierarchy set up
+		var finalMeshes = [];
 
-				case 'SSHA':
-					this.parentForm = this.currentForm;
-					this.currentForm = this.currentSurface;
-					this.setupForm( 'surfaceShader', length );
-					break;
+		var geometryParser = new GeometryParser();
 
-				case 'SSHD':
-					this.setupForm( 'surfaceShaderData', length );
-					break;
+		var self = this;
+		lwoTree.layers.forEach( function ( layer ) {
 
-				case 'ENTR': // Not in spec
-					this.parseEntryForm( length );
-					break;
+			var geometry = geometryParser.parse( layer.geometry, layer );
 
-					// Image Map Layer
+			var mesh = self.parseMesh( geometry, layer );
 
-				case 'IMAP':
-					this.parseImageMap( length );
-					break;
+			meshes[ layer.number ] = mesh;
 
-				case 'TAMP':
-					this.parseXVAL( 'amplitude', length );
-					break;
+			if ( layer.parent === - 1 ) finalMeshes.push( mesh );
+			else meshes[ layer.parent ].add( mesh );
 
-					//Texture Mapping Form
 
-				case 'TMAP':
-					this.setupForm( 'textureMap', length );
-					break;
+		} );
 
-				case 'CNTR':
-					this.parseXVAL3( 'center', length );
-					break;
+		this.applyPivots( finalMeshes );
 
-				case 'SIZE':
-					this.parseXVAL3( 'scale', length );
-					break;
+		return finalMeshes;
 
-				case 'ROTA':
-					this.parseXVAL3( 'rotation', length );
-					break;
+	},
 
-				default:
-					this.parseUnknownForm( type, length );
+	parseMesh( geometry, layer ) {
 
-			}
+		var mesh;
 
-		},
+		var materials = this.getMaterials( geometry.userData.matNames, layer.geometry.type );
 
-		setupForm( type, length ) {
+		this.duplicateUVs( geometry, materials );
 
-			if ( ! this.currentForm ) this.currentForm = this.currentNode;
+		if ( layer.geometry.type === 'points' ) mesh = new Points( geometry, materials );
+		else if ( layer.geometry.type === 'lines' ) mesh = new LineSegments( geometry, materials );
+		else mesh = new Mesh( geometry, materials );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		if ( layer.name ) mesh.name = layer.name;
+		else mesh.name = this.defaultLayerName + '_layer_' + layer.number;
 
-			if ( ! this.currentForm[ type ] ) {
+		mesh.userData.pivot = layer.pivot;
 
-				this.currentForm[ type ] = {};
-				this.currentForm = this.currentForm[ type ];
+		return mesh;
 
+	},
 
-			} else {
+	// TODO: may need to be reversed in z to convert LWO to three.js coordinates
+	applyPivots( meshes ) {
 
-				// should never see this unless there's a bug in the reader
-				console.warn( 'LWOLoader: form already exists on parent: ', type, this.currentForm );
+		meshes.forEach( function ( mesh ) {
 
-				this.currentForm = this.currentForm[ type ];
+			mesh.traverse( function ( child ) {
 
-			}
+				var pivot = child.userData.pivot;
 
+				child.position.x += pivot[ 0 ];
+				child.position.y += pivot[ 1 ];
+				child.position.z += pivot[ 2 ];
 
-		},
+				if ( child.parent ) {
 
-		skipForm( length ) {
+					var parentPivot = child.parent.userData.pivot;
 
-			this.reader.skip( length - 4 );
+					child.position.x -= parentPivot[ 0 ];
+					child.position.y -= parentPivot[ 1 ];
+					child.position.z -= parentPivot[ 2 ];
 
-		},
+				}
 
-		parseUnknownForm( type, length ) {
+			} );
 
-			console.warn( 'LWOLoader: unknown FORM encountered: ' + type, length );
+		} );
 
-			printBuffer( this.reader.dv.buffer, this.reader.offset, length - 4 );
-			this.reader.skip( length - 4 );
+	},
 
-		},
+	getMaterials( namesArray, type ) {
 
-		parseSurfaceForm( length ) {
+		var materials = [];
 
-			this.reader.skip( 8 ); // unknown Uint32 x2
+		var self = this;
 
-			var name = this.reader.getString();
+		namesArray.forEach( function ( name, i ) {
 
-			var surface = {
-				attributes: {}, // LWO2 style non-node attributes will go here
-				connections: {},
-				name: name,
-				inputName: name,
-				nodes: {},
-				source: this.reader.getString(),
-			};
+			materials[ i ] = self.getMaterialByName( name );
 
-			this.tree.materials[ name ] = surface;
-			this.currentSurface = surface;
+		} );
 
-			this.parentForm = this.tree.materials;
-			this.currentForm = surface;
-			this.currentFormEnd = this.reader.offset + length;
+		// convert materials to line or point mats if required
+		if ( type === 'points' || type === 'lines' ) {
 
-		},
+			materials.forEach( function ( mat, i ) {
 
-		parseSurfaceLwo2( length ) {
+				var spec = {
+					color: mat.color,
+				};
 
-			var name = this.reader.getString();
+				if ( type === 'points' ) {
 
-			var surface = {
-				attributes: {}, // LWO2 style non-node attributes will go here
-				connections: {},
-				name: name,
-				nodes: {},
-				source: this.reader.getString(),
-			};
+					spec.size = 0.1;
+					spec.map = mat.map;
+					spec.morphTargets = mat.morphTargets;
+					materials[ i ] = new PointsMaterial( spec );
 
-			this.tree.materials[ name ] = surface;
-			this.currentSurface = surface;
+				} else if ( type === 'lines' ) {
 
-			this.parentForm = this.tree.materials;
-			this.currentForm = surface;
-			this.currentFormEnd = this.reader.offset + length;
+					materials[ i ] = new LineBasicMaterial( spec );
 
-		},
+				}
 
-		parseSubNode( length ) {
+			} );
 
-			// parse the NRNM CHUNK of the subnode FORM to get
-			// a meaningful name for the subNode
-			// some subnodes can be renamed, but Input and Surface cannot
+		}
 
-			this.reader.skip( 8 ); // NRNM + length
-			var name = this.reader.getString();
+		// if there is only one material, return that directly instead of array
+		var filtered = materials.filter( Boolean );
+		if ( filtered.length === 1 ) return filtered[ 0 ];
 
-			var node = {
-				name: name
-			};
-			this.currentForm = node;
-			this.currentNode = node;
+		return materials;
+
+	},
+
+	getMaterialByName( name ) {
 
-			this.currentFormEnd = this.reader.offset + length;
+		return this.materials.filter( function ( m ) {
 
+			return m.name === name;
 
-		},
+		} )[ 0 ];
 
-		// collect attributes from all nodes at the top level of a surface
-		parseConnections( length ) {
+	},
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+	// If the material has an aoMap, duplicate UVs
+	duplicateUVs( geometry, materials ) {
 
-			this.currentForm = this.currentSurface.connections;
+		var duplicateUVs = false;
 
-		},
+		if ( ! Array.isArray( materials ) ) {
 
-		// surface node attribute data, e.g. specular, roughness etc
-		parseEntryForm( length ) {
+			if ( materials.aoMap ) duplicateUVs = true;
 
-			this.reader.skip( 8 ); // NAME + length
-			var name = this.reader.getString();
-			this.currentForm = this.currentNode.attributes;
+		} else {
 
-			this.setupForm( name, length );
+			materials.forEach( function ( material ) {
 
-		},
+				if ( material.aoMap ) duplicateUVs = true;
 
-		// parse values from material - doesn't match up to other LWO3 data types
-		// sub form of entry form
-		parseValueForm() {
+			} );
+
+		}
 
-			this.reader.skip( 8 ); // unknown + length
+		if ( ! duplicateUVs ) return;
 
-			var valueType = this.reader.getString();
+		geometry.addAttribute( 'uv2', new BufferAttribute( geometry.attributes.uv.array, 2 ) );
 
-			if ( valueType === 'double' ) {
+	},
 
-				this.currentForm.value = this.reader.getUint64();
+};
 
-			} else if ( valueType === 'int' ) {
+function MaterialParser( textureLoader ) {
 
-				this.currentForm.value = this.reader.getUint32();
+	this.textureLoader = textureLoader;
 
-			} else if ( valueType === 'vparam' ) {
+}
 
-				this.reader.skip( 24 );
-				this.currentForm.value = this.reader.getFloat64();
+MaterialParser.prototype = {
 
-			} else if ( valueType === 'vparam3' ) {
+	constructor: MaterialParser,
 
-				this.reader.skip( 24 );
-				this.currentForm.value = this.reader.getFloat64Array( 3 );
+	parse: function () {
 
+		var materials = [];
+		this.textures = {};
+
+		for ( var name in lwoTree.materials ) {
+
+			if ( lwoTree.format === 'LWO3' ) {
+
+				materials.push( this.parseMaterial( lwoTree.materials[ name ], name, lwoTree.textures ) );
+
+			} else if ( lwoTree.format === 'LWO2' ) {
+
+				materials.push( this.parseMaterialLwo2( lwoTree.materials[ name ], name, lwoTree.textures ) );
 
 			}
 
-		},
+		}
 
-		// holds various data about texture node image state
-		// Data other thanmipMapLevel unknown
-		parseImageStateForm() {
+		return materials;
 
-			this.reader.skip( 8 ); // unknown
+	},
 
-			this.currentForm.mipMapLevel = this.reader.getFloat32();
+	parseMaterial( materialData, name, textures ) {
 
-		},
+		var params = {
+			name: name,
+			side: this.getSide( materialData.attributes ),
+			flatShading: this.getSmooth( materialData.attributes ),
+		};
 
-		// LWO2 style image data node OR LWO3 textures defined at top level in editor (not as SURF node)
-		parseImageMap( length ) {
+		var connections = this.parseConnections( materialData.connections, materialData.nodes );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		var maps = this.parseTextureNodes( connections.maps );
 
-			if ( ! this.currentForm.maps ) this.currentForm.maps = [];
+		this.parseAttributeImageMaps( connections.attributes, textures, maps, materialData.maps );
 
-			var map = {};
-			this.currentForm.maps.push( map );
-			this.currentForm = map;
+		var attributes = this.parseAttributes( connections.attributes, maps );
 
-			this.reader.skip( 10 ); // unknown, could be an issue if it contains a VX
+		this.parseEnvMap( connections, maps, attributes );
 
-		},
+		params = Object.assign( maps, params );
+		params = Object.assign( params, attributes );
 
-		parseTextureNodeAttribute( type ) {
+		var materialType = this.getMaterialType( connections.attributes );
 
-			this.reader.skip( 28 ); // FORM + length + VPRM + unknown + Uint32 x2 + float32
+		return new materialType( params );
 
-			this.reader.skip( 20 ); // FORM + length + VPVL + float32 + Uint32
+	},
 
-			switch ( type ) {
+	parseMaterialLwo2( materialData, name/*, textures*/ ) {
 
-				case 'ISCL':
-					this.currentNode.scale = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IPOS':
-					this.currentNode.position = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IROT':
-					this.currentNode.rotation = this.reader.getFloat32Array( 3 );
-					break;
-				case 'IFAL':
-					this.currentNode.falloff = this.reader.getFloat32Array( 3 );
-					break;
+		var params = {
+			name: name,
+			side: this.getSide( materialData.attributes ),
+			flatShading: this.getSmooth( materialData.attributes ),
+		};
 
-				case 'IBMP':
-					this.currentNode.amplitude = this.reader.getFloat32();
-					break;
-				case 'IUTD':
-					this.currentNode.uTiles = this.reader.getFloat32();
-					break;
-				case 'IVTD':
-					this.currentNode.vTiles = this.reader.getFloat32();
-					break;
+		var attributes = this.parseAttributes( materialData.attributes, {} );
+		params = Object.assign( params, attributes );
+		return new MeshPhongMaterial( params );
 
-			}
+	},
 
-			this.reader.skip( 2 ); // unknown
+	// Note: converting from left to right handed coords by switching x -> -x in vertices, and
+	// then switching mat FrontSide -> BackSide
+	// NB: this means that FrontSide and BackSide have been switched!
+	getSide( attributes ) {
 
+		if ( ! attributes.side ) return BackSide;
 
-		},
+		switch ( attributes.side ) {
 
-		// ENVL forms are currently ignored
-		parseEnvelope( length ) {
+			case 0:
+			case 1:
+				return BackSide;
+			case 2: return FrontSide;
+			case 3: return DoubleSide;
 
-			this.reader.skip( length - 4 ); // skipping  entirely for now
+		}
 
-		},
+	},
 
-		///
-		// CHUNK PARSING METHODS
-		///
+	getSmooth( attributes ) {
 
-		// clips can either be defined inside a surface node, or at the top
-		// level and they have a different format in each case
-		parseClip( length ) {
+		if ( ! attributes.smooth ) return true;
+		return ! attributes.smooth;
 
-			var tag = this.reader.getIDTag();
+	},
 
-			// inside surface node
-			if ( tag === 'FORM' ) {
+	parseConnections( connections, nodes ) {
 
-				this.reader.skip( 16 );
+		var materialConnections = {
+			maps: {}
+		};
 
-				this.currentNode.fileName = this.reader.getString();
+		var inputName = connections.inputName;
+		var inputNodeName = connections.inputNodeName;
+		var nodeName = connections.nodeName;
 
-				return;
+		var self = this;
+		inputName.forEach( function ( name, index ) {
+
+			if ( name === 'Material' ) {
+
+				var matNode = self.getNodeByRefName( inputNodeName[ index ], nodes );
+				materialConnections.attributes = matNode.attributes;
+				materialConnections.envMap = matNode.fileName;
+				materialConnections.name = inputNodeName[ index ];
 
 			}
 
-			// otherwise top level
-			this.reader.setOffset( this.reader.offset - 4 );
+		} );
 
-			this.currentFormEnd = this.reader.offset + length;
-			this.parentForm = this.currentForm;
+		nodeName.forEach( function ( name, index ) {
 
-			this.reader.skip( 8 ); // unknown
+			if ( name === materialConnections.name ) {
 
-			var texture = {
-				index: this.reader.getUint32()
-			};
-			this.tree.textures.push( texture );
-			this.currentForm = texture;
+				materialConnections.maps[ inputName[ index ] ] = self.getNodeByRefName( inputNodeName[ index ], nodes );
 
-		},
+			}
 
-		parseClipLwo2( length ) {
+		} );
 
-			var texture = {
-				index: this.reader.getUint32(),
-				fileName: ""
-			};
+		return materialConnections;
 
-			// seach STIL block
-			while ( true ) {
+	},
 
-				var tag = this.reader.getIDTag();
-				var n_length = this.reader.getUint16();
-				if ( tag === 'STIL' ) {
+	getNodeByRefName( refName, nodes ) {
 
-					texture.fileName = this.reader.getString();
-					break;
+		for ( var name in nodes ) {
 
-				}
+			if ( nodes[ name ].refName === refName ) return nodes[ name ];
 
-				if ( n_length >= length ) {
+		}
 
-					break;
+	},
 
-				}
+	parseTextureNodes( textureNodes ) {
+
+		var maps = {};
+
+		for ( var name in textureNodes ) {
+
+			var node = textureNodes[ name ];
+			var path = node.fileName;
+
+			if ( ! path ) return;
+
+			var texture = this.loadTexture( path );
+
+			if ( node.widthWrappingMode !== undefined ) texture.wrapS = this.getWrappingType( node.widthWrappingMode );
+			if ( node.heightWrappingMode !== undefined ) texture.wrapT = this.getWrappingType( node.heightWrappingMode );
+
+			switch ( name ) {
+
+				case 'Color':
+					maps.map = texture;
+					break;
+				case 'Roughness':
+					maps.roughnessMap = texture;
+					maps.roughness = 0.5;
+					break;
+				case 'Specular':
+					maps.specularMap = texture;
+					maps.specular = 0xffffff;
+					break;
+				case 'Luminous':
+					maps.emissiveMap = texture;
+					maps.emissive = 0x808080;
+					break;
+				case 'Luminous Color':
+					maps.emissive = 0x808080;
+					break;
+				case 'Metallic':
+					maps.metalnessMap = texture;
+					maps.metalness = 0.5;
+					break;
+				case 'Transparency':
+				case 'Alpha':
+					maps.alphaMap = texture;
+					maps.transparent = true;
+					break;
+				case 'Normal':
+					maps.normalMap = texture;
+					if ( node.amplitude !== undefined ) maps.normalScale = new Vector2( node.amplitude, node.amplitude );
+					break;
+				case 'Bump':
+					maps.bumpMap = texture;
+					break;
 
 			}
 
-			this.tree.textures.push( texture );
-			this.currentForm = texture;
+		}
 
-		},
+		// LWO BSDF materials can have both spec and rough, but this is not valid in three
+		if ( maps.roughnessMap && maps.specularMap ) delete maps.specularMap;
 
-		parseImage() {
+		return maps;
 
-			this.reader.skip( 8 ); // unknown
-			this.currentForm.fileName = this.reader.getString();
+	},
 
-		},
+	// maps can also be defined on individual material attributes, parse those here
+	// This occurs on Standard (Phong) surfaces
+	parseAttributeImageMaps( attributes, textures, maps ) {
 
-		parseXVAL( type, length ) {
+		for ( var name in attributes ) {
 
-			var endOffset = this.reader.offset + length - 4;
-			this.reader.skip( 8 );
+			var attribute = attributes[ name ];
 
-			this.currentForm[ type ] = this.reader.getFloat32();
+			if ( attribute.maps ) {
 
-			this.reader.setOffset( endOffset ); // set end offset directly to skip optional envelope
+				var mapData = attribute.maps[ 0 ];
 
-		},
+				var path = this.getTexturePathByIndex( mapData.imageIndex, textures );
+				if ( ! path ) return;
 
-		parseXVAL3( type, length ) {
+				var texture = this.loadTexture( path );
 
-			var endOffset = this.reader.offset + length - 4;
-			this.reader.skip( 8 );
+				if ( mapData.wrap !== undefined ) texture.wrapS = this.getWrappingType( mapData.wrap.w );
+				if ( mapData.wrap !== undefined ) texture.wrapT = this.getWrappingType( mapData.wrap.h );
 
-			this.currentForm[ type ] = {
-				x: this.reader.getFloat32(),
-				y: this.reader.getFloat32(),
-				z: this.reader.getFloat32(),
-			};
+				switch ( name ) {
 
-			this.reader.setOffset( endOffset );
+					case 'Color':
+						maps.map = texture;
+						break;
+					case 'Diffuse':
+						maps.aoMap = texture;
+						break;
+					case 'Roughness':
+						maps.roughnessMap = texture;
+						maps.roughness = 1;
+						break;
+					case 'Specular':
+						maps.specularMap = texture;
+						maps.specular = 0xffffff;
+						break;
+					case 'Luminosity':
+						maps.emissiveMap = texture;
+						maps.emissive = 0x808080;
+						break;
+					case 'Metallic':
+						maps.metalnessMap = texture;
+						maps.metalness = 1;
+						break;
+					case 'Transparency':
+					case 'Alpha':
+						maps.alphaMap = texture;
+						maps.transparent = true;
+						break;
+					case 'Normal':
+						maps.normalMap = texture;
+						break;
+					case 'Bump':
+						maps.bumpMap = texture;
+						break;
 
-		},
+				}
 
-		// Tags associated with an object
-		// OTAG { type[ID4], tag-string[S0] }
-		parseObjectTag() {
+			}
 
-			if ( ! this.tree.objectTags ) this.tree.objectTags = {};
+		}
 
-			this.tree.objectTags[ this.reader.getIDTag() ] = {
-				tagString: this.reader.getString()
-			};
+	},
 
-		},
+	parseAttributes( attributes, maps ) {
 
-		// Signals the start of a new layer. All the data chunks which follow will be included in this layer until another layer chunk is encountered.
-		// LAYR: number[U2], flags[U2], pivot[VEC12], name[S0], parent[U2]
-		parseLayer( length ) {
+		var params = {};
 
-			var layer = {
-				number: this.reader.getUint16(),
-				flags: this.reader.getUint16(), // If the least significant bit of flags is set, the layer is hidden.
-				pivot: this.reader.getFloat32Array( 3 ), // Note: this seems to be superflous, as the geometry is translated when pivot is present
-				name: this.reader.getString(),
-			};
+		// don't use color data if color map is present
+		if ( attributes.Color && ! maps.map ) {
 
-			this.tree.layers.push( layer );
-			this.currentLayer = layer;
+			params.color = new Color().fromArray( attributes.Color.value );
 
-			var parsedLength = 16 + stringOffset( this.currentLayer.name ); // index ( 2 ) + flags( 2 ) + pivot( 12 ) + stringlength
+		} else params.color = new Color();
 
-			// if we have not reached then end of the layer block, there must be a parent defined
-			this.currentLayer.parent = ( parsedLength < length ) ? this.reader.getUint16() : - 1; // omitted or -1 for no parent
 
-		},
+		if ( attributes.Transparency && attributes.Transparency.value !== 0 ) {
 
-		// VEC12 * ( F4 + F4 + F4 ) array of x,y,z vectors
-		// Converting from left to right handed coordinate system:
-		// x -> -x and switch material FrontSide -> BackSide
-		parsePoints( length ) {
+			params.opacity = 1 - attributes.Transparency.value;
+			params.transparent = true;
 
-			this.currentPoints = [];
-			for ( var i = 0; i < length / 4; i += 3 ) {
+		}
 
-				// z -> -z to match three.js right handed coords
-				this.currentPoints.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
+		if ( attributes[ 'Bump Height' ] ) params.bumpScale = attributes[ 'Bump Height' ].value * 0.1;
 
-			}
+		if ( attributes[ 'Refraction Index' ] ) params.refractionRatio = 1 / attributes[ 'Refraction Index' ].value;
+
+		this.parsePhysicalAttributes( params, attributes, maps );
+		this.parseStandardAttributes( params, attributes, maps );
+		this.parsePhongAttributes( params, attributes, maps );
 
-		},
+		return params;
 
-		// parse VMAP or VMAD
-		// Associates a set of floating-point vectors with a set of points.
-		// VMAP: { type[ID4], dimension[U2], name[S0], ( vert[VX], value[F4] # dimension ) * }
+	},
 
-		// VMAD Associates a set of floating-point vectors with the vertices of specific polygons.
-		// Similar to VMAP UVs, but associates with polygon vertices rather than points
-		// to solve to problem of UV seams:  VMAD chunks are paired with VMAPs of the same name,
-		// if they exist. The vector values in the VMAD will then replace those in the
-		// corresponding VMAP, but only for calculations involving the specified polygons.
-		// VMAD { type[ID4], dimension[U2], name[S0], ( vert[VX], poly[VX], value[F4] # dimension ) * }
-		parseVertexMapping( length, discontinuous ) {
+	parsePhysicalAttributes( params, attributes/*, maps*/ ) {
 
-			var finalOffset = this.reader.offset + length;
+		if ( attributes.Clearcoat && attributes.Clearcoat.value > 0 ) {
 
-			var channelName = this.reader.getString();
+			params.clearCoat = attributes.Clearcoat.value;
 
-			if ( this.reader.offset === finalOffset ) {
+			if ( attributes[ 'Clearcoat Gloss' ] ) {
 
-				// then we are in a texture node and the VMAP chunk is just a reference to a UV channel name
-				this.currentForm.UVChannel = channelName;
-				return;
+				params.clearCoatRoughness = 0.5 * ( 1 - attributes[ 'Clearcoat Gloss' ].value );
 
 			}
 
-			// otherwise reset to initial length and parse normal VMAP CHUNK
-			this.reader.setOffset( this.reader.offset - stringOffset( channelName ) );
+		}
 
-			var type = this.reader.getIDTag();
+	},
 
-			this.reader.getUint16(); // dimension
-			var name = this.reader.getString();
+	parseStandardAttributes( params, attributes, maps ) {
 
-			var remainingLength = length - 6 - stringOffset( name );
 
-			switch ( type ) {
+		if ( attributes.Luminous ) {
 
-				case 'TXUV':
-					this.parseUVMapping( name, finalOffset, discontinuous );
-					break;
-				case 'MORF':
-				case 'SPOT':
-					this.parseMorphTargets( name, finalOffset, type ); // can't be discontinuous
-					break;
-				// unsupported VMAPs
-				case 'APSL':
-				case 'NORM':
-				case 'WGHT':
-				case 'MNVW':
-				case 'PICK':
-				case 'RGB ':
-				case 'RGBA':
-					this.reader.skip( remainingLength );
-					break;
-				default:
-					console.warn( 'LWOLoader: unknown vertex map type: ' + type );
-					this.reader.skip( remainingLength );
+			params.emissiveIntensity = attributes.Luminous.value;
+
+			if ( attributes[ 'Luminous Color' ] && ! maps.emissive ) {
+
+				params.emissive = new Color().fromArray( attributes[ 'Luminous Color' ].value );
+
+			} else {
+
+				params.emissive = new Color( 0x808080 );
 
 			}
 
-		},
+		}
 
-		parseUVMapping( name, finalOffset, discontinuous ) {
+		if ( attributes.Roughness && ! maps.roughnessMap ) params.roughness = attributes.Roughness.value;
+		if ( attributes.Metallic && ! maps.metalnessMap ) params.metalness = attributes.Metallic.value;
 
-			var uvIndices = [];
-			var polyIndices = [];
-			var uvs = [];
+	},
 
-			while ( this.reader.offset < finalOffset ) {
+	parsePhongAttributes( params, attributes, maps ) {
 
-				uvIndices.push( this.reader.getVariableLengthIndex() );
+		if ( attributes.Diffuse ) params.color.multiplyScalar( attributes.Diffuse.value );
 
-				if ( discontinuous ) polyIndices.push( this.reader.getVariableLengthIndex() );
+		if ( attributes.Reflection ) {
 
-				uvs.push( this.reader.getFloat32(), this.reader.getFloat32() );
+			params.reflectivity = attributes.Reflection.value;
+			params.combine = AddOperation;
 
-			}
+		}
 
-			if ( discontinuous ) {
+		if ( attributes.Luminosity ) {
 
-				if ( ! this.currentLayer.discontinuousUVs ) this.currentLayer.discontinuousUVs = {};
+			params.emissiveIntensity = attributes.Luminosity.value;
 
-				this.currentLayer.discontinuousUVs[ name ] = {
-					uvIndices: uvIndices,
-					polyIndices: polyIndices,
-					uvs: uvs,
-				};
+			if ( ! maps.emissiveMap && ! maps.map ) {
 
-			} else {
+				params.emissive = params.color;
 
-				if ( ! this.currentLayer.uvs ) this.currentLayer.uvs = {};
+			} else {
 
-				this.currentLayer.uvs[ name ] = {
-					uvIndices: uvIndices,
-					uvs: uvs,
-				};
+				params.emissive = new Color( 0x808080 );
 
 			}
 
-		},
+		}
 
-		parseMorphTargets( name, finalOffset, type ) {
+		// parse specular if there is no roughness - we will interpret the material as 'Phong' in this case
+		if ( ! attributes.Roughness && attributes.Specular && ! maps.specularMap ) {
 
-			var indices = [];
-			var points = [];
+			if ( attributes[ 'Color Highlight' ] ) {
 
-			type = ( type === 'MORF' ) ? 'relative' : 'absolute';
+				params.specular = new Color().setScalar( attributes.Specular.value ).lerp( params.color.clone().multiplyScalar( attributes.Specular.value ), attributes[ 'Color Highlight' ].value );
 
-			while ( this.reader.offset < finalOffset ) {
+			} else {
 
-				indices.push( this.reader.getVariableLengthIndex() );
-				// z -> -z to match three.js right handed coords
-				points.push( this.reader.getFloat32(), this.reader.getFloat32(), - this.reader.getFloat32() );
+				params.specular = new Color().setScalar( attributes.Specular.value );
 
 			}
 
-			if ( ! this.currentLayer.morphTargets ) this.currentLayer.morphTargets = {};
+		}
 
-			this.currentLayer.morphTargets[ name ] = {
-				indices: indices,
-				points: points,
-				type: type,
-			};
+		if ( params.specular && attributes.Glossiness ) params.shininess = 7 + Math.pow( 2, attributes.Glossiness.value * 12 + 2 );
 
-		},
+	},
 
-		// A list of polygons for the current layer.
-		// POLS { type[ID4], ( numvert+flags[U2], vert[VX] # numvert ) * }
-		parsePolygonList( length ) {
+	parseEnvMap( connections, maps, attributes ) {
 
-			var finalOffset = this.reader.offset + length;
-			var type = this.reader.getIDTag();
+		if ( connections.envMap ) {
 
-			var indices = [];
+			var envMap = this.loadTexture( connections.envMap );
 
-			// hold a list of polygon sizes, to be split up later
-			var polygonDimensions = [];
+			if ( attributes.transparent && attributes.opacity < 0.999 ) {
 
-			while ( this.reader.offset < finalOffset ) {
+				envMap.mapping = EquirectangularRefractionMapping;
 
-				var numverts = this.reader.getUint16();
+				// Reflectivity and refraction mapping don't work well together in Phong materials
+				if ( attributes.reflectivity !== undefined ) {
 
-				//var flags = numverts & 64512; // 6 high order bits are flags - ignoring for now
-				numverts = numverts & 1023; // remaining ten low order bits are vertex num
-				polygonDimensions.push( numverts );
+					delete attributes.reflectivity;
+					delete attributes.combine;
 
-				for ( var j = 0; j < numverts; j ++ ) indices.push( this.reader.getVariableLengthIndex() );
+				}
 
-			}
+				if ( attributes.metalness !== undefined ) {
 
-			var geometryData = {
-				type: type,
-				vertexIndices: indices,
-				polygonDimensions: polygonDimensions,
-				points: this.currentPoints
-			};
+					delete attributes.metalness;
 
-			// Note: assuming that all polys will be lines or points if the first is
-			if ( polygonDimensions[ 0 ] === 1 ) geometryData.type = 'points';
-			else if ( polygonDimensions[ 0 ] === 2 ) geometryData.type = 'lines';
+				}
 
-			this.currentLayer.geometry = geometryData;
+			} else envMap.mapping = EquirectangularReflectionMapping;
 
-		},
+			maps.envMap = envMap;
 
-		// Lists the tag strings that can be associated with polygons by the PTAG chunk.
-		// TAGS { tag-string[S0] * }
-		parseTagStrings( length ) {
+		}
 
-			this.tree.tags = this.reader.getStringArray( length );
+	},
 
-		},
+	// get texture defined at top level by its index
+	getTexturePathByIndex( index ) {
 
-		// Associates tags of a given type with polygons in the most recent POLS chunk.
-		// PTAG { type[ID4], ( poly[VX], tag[U2] ) * }
-		parsePolygonTagMapping( length ) {
+		var fileName = '';
 
-			var finalOffset = this.reader.offset + length;
-			var type = this.reader.getIDTag();
-			if ( type === 'SURF' ) this.parseMaterialIndices( finalOffset );
-			else { //PART, SMGP, COLR not supported
+		if ( ! lwoTree.textures ) return fileName;
 
-				this.reader.skip( length - 4 );
+		lwoTree.textures.forEach( function ( texture ) {
 
-			}
+			if ( texture.index === index ) fileName = texture.fileName;
 
-		},
+		} );
 
-		parseMaterialIndices( finalOffset ) {
+		return fileName;
 
-			// array holds polygon index followed by material index
-			this.currentLayer.geometry.materialIndices = [];
+	},
 
-			var initialMatIndex;
+	loadTexture( path ) {
 
-			while ( this.reader.offset < finalOffset ) {
+		if ( ! path ) return null;
 
-				var polygonIndex = this.reader.getVariableLengthIndex();
-				var materialIndex = this.reader.getUint16();
+		var texture;
 
-				if ( ! initialMatIndex ) initialMatIndex = materialIndex; // set up first mat index
+		texture = this.textureLoader.load(
+			path,
+			undefined,
+			undefined,
+			function () {
 
-				this.currentLayer.geometry.materialIndices.push( polygonIndex, materialIndex );
+				console.warn( 'LWOLoader: non-standard resource hierarchy. Use \`resourcePath\` parameter to specify root content directory.' );
 
 			}
+		);
 
-		},
-
-		parseUnknownCHUNK( blockID, length ) {
+		return texture;
 
-			console.warn( 'LWOLoader: unknown chunk type: ' + blockID + ' length: ' + length );
+	},
 
-			// print the chunk plus some bytes padding either side
-			// printBuffer( this.reader.dv.buffer, this.reader.offset - 20, length + 40 );
+	// 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
+	getWrappingType( num ) {
 
-			var data = this.reader.getString( length );
+		switch ( num ) {
 
-			this.currentForm[ blockID ] = data;
+			case 0:
+				console.warn( 'LWOLoader: "Reset" texture wrapping type is not supported in three.js' );
+				return ClampToEdgeWrapping;
+			case 1: return RepeatWrapping;
+			case 2: return MirroredRepeatWrapping;
+			case 3: return ClampToEdgeWrapping;
 
 		}
 
-	};
-
-	function DataViewReader( buffer ) {
+	},
 
-		// For testing: dump whole buffer to console as a string
-		// printBuffer( buffer, 0, buffer.byteLength );
+	getMaterialType( nodeData ) {
 
-		this.dv = new DataView( buffer );
-		this.offset = 0;
+		if ( nodeData.Clearcoat && nodeData.Clearcoat.value > 0 ) return MeshPhysicalMaterial;
+		if ( nodeData.Roughness ) return MeshStandardMaterial;
+		return MeshPhongMaterial;
 
 	}
 
-	DataViewReader.prototype = {
+};
 
-		constructor: DataViewReader,
+function GeometryParser() {}
 
-		size: function () {
+GeometryParser.prototype = {
 
-			return this.dv.buffer.byteLength;
+	constructor: GeometryParser,
 
-		},
+	parse( geoData, layer ) {
 
-		setOffset( offset ) {
+		var geometry = new BufferGeometry();
 
-			if ( offset > 0 && offset < this.dv.buffer.byteLength ) {
+		geometry.addAttribute( 'position', new Float32BufferAttribute( geoData.points, 3 ) );
 
-				this.offset = offset;
+		var indices = this.splitIndices( geoData.vertexIndices, geoData.polygonDimensions );
+		geometry.setIndex( indices );
 
-			} else {
+		this.parseGroups( geometry, geoData );
 
-				console.error( 'LWOLoader: invalid buffer offset' );
+		geometry.computeVertexNormals();
 
-			}
+		this.parseUVs( geometry, layer, indices );
+		this.parseMorphTargets( geometry, layer, indices );
 
-		},
+		// TODO: z may need to be reversed to account for coordinate system change
+		geometry.translate( - layer.pivot[ 0 ], - layer.pivot[ 1 ], - layer.pivot[ 2 ] );
 
-		endOfFile: function () {
+		// var userData = geometry.userData;
+		// geometry = geometry.toNonIndexed()
+		// geometry.userData = userData;
 
-			if ( this.offset >= this.size() ) return true;
-			return false;
+		return geometry;
 
-		},
+	},
 
-		skip: function ( length ) {
+	// split quads into tris
+	splitIndices( indices, polygonDimensions ) {
 
-			this.offset += length;
+		var remappedIndices = [];
 
-		},
+		var i = 0;
+		polygonDimensions.forEach( function ( dim ) {
 
-		getUint8: function () {
+			if ( dim < 4 ) {
 
-			var value = this.dv.getUint8( this.offset );
-			this.offset += 1;
-			return value;
+				for ( var k = 0; k < dim; k ++ ) remappedIndices.push( indices[ i + k ] );
 
-		},
+			} else if ( dim === 4 ) {
 
-		getUint16: function () {
+				remappedIndices.push(
+					indices[ i ],
+					indices[ i + 1 ],
+					indices[ i + 2 ],
 
-			var value = this.dv.getUint16( this.offset );
-			this.offset += 2;
-			return value;
+					indices[ i ],
+					indices[ i + 2 ],
+					indices[ i + 3 ]
 
-		},
+				);
 
-		getInt32: function () {
+			} else if ( dim > 4 ) {
 
-			var value = this.dv.getInt32( this.offset, false );
-			this.offset += 4;
-			return value;
+				for ( var k = 1; k < dim - 1; k ++ ) {
 
-		},
+					remappedIndices.push( indices[ i ], indices[ i + k ], indices[ i + k + 1 ] );
 
-		getUint32: function () {
+				}
 
-			var value = this.dv.getUint32( this.offset, false );
-			this.offset += 4;
-			return value;
+				console.warn( 'LWOLoader: polygons with greater than 4 sides are not supported' );
 
-		},
+			}
 
-		getUint64: function () {
+			i += dim;
 
-			var low, high;
+		} );
 
-			high = this.getUint32();
-			low = this.getUint32();
-			return high * 0x100000000 + low;
+		return remappedIndices;
 
-		},
+	},
 
-		getFloat32: function () {
+	// NOTE: currently ignoring poly indices and assuming that they are intelligently ordered
+	parseGroups( geometry, geoData ) {
 
-			var value = this.dv.getFloat32( this.offset, false );
-			this.offset += 4;
-			return value;
+		var tags = lwoTree.tags;
+		var matNames = [];
 
-		},
+		var elemSize = 3;
+		if ( geoData.type === 'lines' ) elemSize = 2;
+		if ( geoData.type === 'points' ) elemSize = 1;
 
-		getFloat32Array: function ( size ) {
+		var remappedIndices = this.splitMaterialIndices( geoData.polygonDimensions, geoData.materialIndices );
 
-			var a = [];
+		var indexNum = 0; // create new indices in numerical order
+		var indexPairs = {}; // original indices mapped to numerical indices
 
-			for ( var i = 0; i < size; i ++ ) {
+		var prevMaterialIndex;
 
-				a.push( this.getFloat32() );
+		var prevStart = 0;
+		var currentCount = 0;
 
-			}
+		for ( var i = 0; i < remappedIndices.length; i += 2 ) {
 
-			return a;
+			var materialIndex = remappedIndices[ i + 1 ];
 
-		},
+			if ( i === 0 ) matNames[ indexNum ] = tags[ materialIndex ];
 
-		getFloat64: function () {
+			if ( prevMaterialIndex === undefined ) prevMaterialIndex = materialIndex;
 
-			var value = this.dv.getFloat64( this.offset, this.littleEndian );
-			this.offset += 8;
-			return value;
+			if ( materialIndex !== prevMaterialIndex ) {
 
-		},
+				var currentIndex;
+				if ( indexPairs[ tags[ prevMaterialIndex ] ] ) {
 
-		getFloat64Array: function ( size ) {
+					currentIndex = indexPairs[ tags[ prevMaterialIndex ] ];
 
-			var a = [];
+				} else {
 
-			for ( var i = 0; i < size; i ++ ) {
+					currentIndex = indexNum;
+					indexPairs[ tags[ prevMaterialIndex ] ] = indexNum;
+					matNames[ indexNum ] = tags[ prevMaterialIndex ];
+					indexNum ++;
 
-				a.push( this.getFloat64() );
+				}
+
+				geometry.addGroup( prevStart, currentCount, currentIndex );
+
+				prevStart += currentCount;
+
+				prevMaterialIndex = materialIndex;
+				currentCount = 0;
 
 			}
 
-			return a;
+			currentCount += elemSize;
+
+		}
 
-		},
+		// the loop above doesn't add the last group, do that here.
+		if ( geometry.groups.length > 0 ) {
 
-		// get variable-length index data type
-		// VX ::= index[U2] | (index + 0xFF000000)[U4]
-		// If the index value is less than 65,280 (0xFF00),then VX === U2
-		// otherwise VX === U4 with bits 24-31 set
-		// When reading an index, if the first byte encountered is 255 (0xFF), then
-		// the four-byte form is being used and the first byte should be discarded or masked out.
-		getVariableLengthIndex() {
+			var currentIndex;
+			if ( indexPairs[ tags[ materialIndex ] ] ) {
 
-			var firstByte = this.getUint8();
+				currentIndex = indexPairs[ tags[ materialIndex ] ];
 
-			if ( firstByte === 255 ) {
+			} else {
 
-				return this.getUint8() * 65536 + this.getUint8() * 256 + this.getUint8();
+				currentIndex = indexNum;
+				indexPairs[ tags[ materialIndex ] ] = indexNum;
+				matNames[ indexNum ] = tags[ materialIndex ];
 
 			}
 
-			return firstByte * 256 + this.getUint8();
+			geometry.addGroup( prevStart, currentCount, currentIndex );
+
+		}
+
+		// Mat names from TAGS chunk, used to build up an array of materials for this geometry
+		geometry.userData.matNames = matNames;
+
+	},
 
-		},
+	splitMaterialIndices( polygonDimensions, indices ) {
 
-		// An ID tag is a sequence of 4 bytes containing 7-bit ASCII values
-		getIDTag() {
+		var remappedIndices = [];
 
-			return this.getString( 4 );
+		polygonDimensions.forEach( function ( dim, i ) {
 
-		},
+			if ( dim <= 3 ) {
 
-		getString: function ( size ) {
+				remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
-			if ( size === 0 ) return;
+			} else if ( dim === 4 ) {
 
-			// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
-			var a = [];
+				remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ], indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
-			if ( size ) {
+			} else {
 
-				for ( var i = 0; i < size; i ++ ) {
+				 // ignore > 4 for now
+				for ( var k = 0; k < dim - 2; k ++ ) {
 
-					a[ i ] = this.getUint8();
+					remappedIndices.push( indices[ i * 2 ], indices[ i * 2 + 1 ] );
 
 				}
 
-			} else {
+			}
 
-				var currentChar;
-				var len = 0;
+		} );
 
-				while ( currentChar !== 0 ) {
+		return remappedIndices;
 
-					currentChar = this.getUint8();
-					if ( currentChar !== 0 ) a.push( currentChar );
-					len ++;
+	},
 
-				}
+	// UV maps:
+	// 1: are defined via index into an array of points, not into a geometry
+	// - the geometry is also defined by an index into this array, but the indexes may not match
+	// 2: there can be any number of UV maps for a single geometry. Here these are combined,
+	// 	with preference given to the first map encountered
+	// 3: UV maps can be partial - that is, defined for only a part of the geometry
+	// 4: UV maps can be VMAP or VMAD (discontinuous, to allow for seams). In practice, most
+	// UV maps are defined as partially VMAP and partially VMAD
+	// VMADs are currently not supported
+	parseUVs( geometry, layer ) {
 
-				if ( ! isEven( len + 1 ) ) this.getUint8(); // if string with terminating nullbyte is uneven, extra nullbyte is added
+		// start by creating a UV map set to zero for the whole geometry
+		var remappedUVs = Array.from( Array( geometry.attributes.position.count * 2 ), function () {
 
-			}
+			return 0;
 
-			return LoaderUtils.decodeText( new Uint8Array( a ) );
+		} );
 
-		},
+		for ( var name in layer.uvs ) {
 
-		getStringArray: function ( size ) {
+			var uvs = layer.uvs[ name ].uvs;
+			var uvIndices = layer.uvs[ name ].uvIndices;
 
-			var a = this.getString( size );
-			a = a.split( '\0' );
+			uvIndices.forEach( function ( i, j ) {
 
-			return a.filter( Boolean ); // return array with any empty strings removed
+				remappedUVs[ i * 2 ] = uvs[ j * 2 ];
+				remappedUVs[ i * 2 + 1 ] = uvs[ j * 2 + 1 ];
+
+			} );
 
 		}
 
-	};
+		geometry.addAttribute( 'uv', new Float32BufferAttribute( remappedUVs, 2 ) );
 
-	// ************** UTILITY FUNCTIONS **************
+	},
 
-	function isEven( num ) {
+	parseMorphTargets( geometry, layer ) {
 
-		return num % 2;
+		var num = 0;
+		for ( var name in layer.morphTargets ) {
 
-	}
+			var remappedPoints = geometry.attributes.position.array.slice();
 
-	// calculate the length of the string in the buffer
-	// this will be string.length + nullbyte + optional padbyte to make the length even
-	function stringOffset( string ) {
+			if ( ! geometry.morphAttributes.position ) geometry.morphAttributes.position = [];
 
-		return string.length + 1 + ( isEven( string.length + 1 ) ? 1 : 0 );
+			var morphPoints = layer.morphTargets[ name ].points;
+			var morphIndices = layer.morphTargets[ name ].indices;
+			var type = layer.morphTargets[ name ].type;
 
-	}
+			morphIndices.forEach( function ( i, j ) {
 
-	// for testing purposes, dump buffer to console
-	// printBuffer( this.reader.dv.buffer, this.reader.offset, length );
-	function printBuffer( buffer, from, to ) {
+				if ( type === 'relative' ) {
 
-		console.log( LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) ) );
+					remappedPoints[ i * 3 ] += morphPoints[ j * 3 ];
+					remappedPoints[ i * 3 + 1 ] += morphPoints[ j * 3 + 1 ];
+					remappedPoints[ i * 3 + 2 ] += morphPoints[ j * 3 + 2 ];
 
-	}
+				} else {
+
+					remappedPoints[ i * 3 ] = morphPoints[ j * 3 ];
+					remappedPoints[ i * 3 + 1 ] = morphPoints[ j * 3 + 1 ];
+					remappedPoints[ i * 3 + 2 ] = morphPoints[ j * 3 + 2 ];
+
+				}
+
+			} );
+
+			geometry.morphAttributes.position[ num ] = new Float32BufferAttribute( remappedPoints, 3 );
+			geometry.morphAttributes.position[ num ].name = name;
+
+			num ++;
+
+		}
+
+	},
+
+};
+
+
+// ************** UTILITY FUNCTIONS **************
+
+function extractParentUrl( url, dir ) {
+
+	var index = url.indexOf( dir );
+
+	if ( index === - 1 ) return './';
 
-	return LWOLoader;
+	return url.substr( 0, index );
 
-} )();
+}
 
 export { LWOLoader };

+ 15 - 0
examples/jsm/loaders/VTKLoader.d.ts

@@ -0,0 +1,15 @@
+import {
+  BufferGeometry,
+  LoadingManager
+} from '../../../src/Three';
+
+export class VRMLLoader {
+  constructor(manager?: LoadingManager);
+  manager: LoadingManager;
+  path: string;
+
+  load(url: string, onLoad: (geometry: BufferGeometry) => void, onProgress?: (event: ProgressEvent) => void, onError?: (event: ErrorEvent) => void): void;
+  setPath(path: string): this;
+
+  parse(data: ArrayBuffer | string, path: string): BufferGeometry;
+}

+ 1186 - 0
examples/jsm/loaders/VTKLoader.js

@@ -0,0 +1,1186 @@
+/**
+ * @author mrdoob / http://mrdoob.com/
+ * @author Alex Pletzer
+ *
+ * Updated on 22.03.2017
+ * VTK header is now parsed and used to extract all the compressed data
+ * @author Andrii Iudin https://github.com/andreyyudin
+ * @author Paul Kibet Korir https://github.com/polarise
+ * @author Sriram Somasundharam https://github.com/raamssundar
+ */
+
+import {
+	BufferAttribute,
+	BufferGeometry,
+	DefaultLoadingManager,
+	EventDispatcher,
+	FileLoader,
+	Float32BufferAttribute,
+	LoaderUtils
+} from "../../../build/three.module.js";
+
+var VTKLoader = function ( manager ) {
+
+	this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager;
+
+};
+
+Object.assign( VTKLoader.prototype, EventDispatcher.prototype, {
+
+	load: function ( url, onLoad, onProgress, onError ) {
+
+		var scope = this;
+
+		var loader = new FileLoader( scope.manager );
+		loader.setPath( scope.path );
+		loader.setResponseType( 'arraybuffer' );
+		loader.load( url, function ( text ) {
+
+			onLoad( scope.parse( text ) );
+
+		}, onProgress, onError );
+
+	},
+
+	setPath: function ( value ) {
+
+		this.path = value;
+		return this;
+
+	},
+
+	parse: function ( data ) {
+
+		function parseASCII( data ) {
+
+			// connectivity of the triangles
+			var indices = [];
+
+			// triangles vertices
+			var positions = [];
+
+			// red, green, blue colors in the range 0 to 1
+			var colors = [];
+
+			// normal vector, one per vertex
+			var normals = [];
+
+			var result;
+
+			// pattern for reading vertices, 3 floats or integers
+			var pat3Floats = /(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)/g;
+
+			// pattern for connectivity, an integer followed by any number of ints
+			// the first integer is the number of polygon nodes
+			var patConnectivity = /^(\d+)\s+([\s\d]*)/;
+
+			// indicates start of vertex data section
+			var patPOINTS = /^POINTS /;
+
+			// indicates start of polygon connectivity section
+			var patPOLYGONS = /^POLYGONS /;
+
+			// indicates start of triangle strips section
+			var patTRIANGLE_STRIPS = /^TRIANGLE_STRIPS /;
+
+			// POINT_DATA number_of_values
+			var patPOINT_DATA = /^POINT_DATA[ ]+(\d+)/;
+
+			// CELL_DATA number_of_polys
+			var patCELL_DATA = /^CELL_DATA[ ]+(\d+)/;
+
+			// Start of color section
+			var patCOLOR_SCALARS = /^COLOR_SCALARS[ ]+(\w+)[ ]+3/;
+
+			// NORMALS Normals float
+			var patNORMALS = /^NORMALS[ ]+(\w+)[ ]+(\w+)/;
+
+			var inPointsSection = false;
+			var inPolygonsSection = false;
+			var inTriangleStripSection = false;
+			var inPointDataSection = false;
+			var inCellDataSection = false;
+			var inColorSection = false;
+			var inNormalsSection = false;
+
+			var lines = data.split( '\n' );
+
+			for ( var i in lines ) {
+
+				var line = lines[ i ];
+
+				if ( line.indexOf( 'DATASET' ) === 0 ) {
+
+					var dataset = line.split( ' ' )[ 1 ];
+
+					if ( dataset !== 'POLYDATA' ) throw new Error( 'Unsupported DATASET type: ' + dataset );
+
+				} else if ( inPointsSection ) {
+
+					// get the vertices
+					while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+
+						var x = parseFloat( result[ 1 ] );
+						var y = parseFloat( result[ 2 ] );
+						var z = parseFloat( result[ 3 ] );
+						positions.push( x, y, z );
+
+					}
+
+				} else if ( inPolygonsSection ) {
+
+					if ( ( result = patConnectivity.exec( line ) ) !== null ) {
+
+						// numVertices i0 i1 i2 ...
+						var numVertices = parseInt( result[ 1 ] );
+						var inds = result[ 2 ].split( /\s+/ );
+
+						if ( numVertices >= 3 ) {
+
+							var i0 = parseInt( inds[ 0 ] );
+							var i1, i2;
+							var k = 1;
+							// split the polygon in numVertices - 2 triangles
+							for ( var j = 0; j < numVertices - 2; ++ j ) {
+
+								i1 = parseInt( inds[ k ] );
+								i2 = parseInt( inds[ k + 1 ] );
+								indices.push( i0, i1, i2 );
+								k ++;
+
+							}
+
+						}
+
+					}
+
+				} else if ( inTriangleStripSection ) {
+
+					if ( ( result = patConnectivity.exec( line ) ) !== null ) {
+
+						// numVertices i0 i1 i2 ...
+						var numVertices = parseInt( result[ 1 ] );
+						var inds = result[ 2 ].split( /\s+/ );
+
+						if ( numVertices >= 3 ) {
+
+							var i0, i1, i2;
+							// split the polygon in numVertices - 2 triangles
+							for ( var j = 0; j < numVertices - 2; j ++ ) {
+
+								if ( j % 2 === 1 ) {
+
+									i0 = parseInt( inds[ j ] );
+									i1 = parseInt( inds[ j + 2 ] );
+									i2 = parseInt( inds[ j + 1 ] );
+									indices.push( i0, i1, i2 );
+
+								} else {
+
+									i0 = parseInt( inds[ j ] );
+									i1 = parseInt( inds[ j + 1 ] );
+									i2 = parseInt( inds[ j + 2 ] );
+									indices.push( i0, i1, i2 );
+
+								}
+
+							}
+
+						}
+
+					}
+
+				} else if ( inPointDataSection || inCellDataSection ) {
+
+					if ( inColorSection ) {
+
+						// Get the colors
+
+						while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+
+							var r = parseFloat( result[ 1 ] );
+							var g = parseFloat( result[ 2 ] );
+							var b = parseFloat( result[ 3 ] );
+							colors.push( r, g, b );
+
+						}
+
+					} else if ( inNormalsSection ) {
+
+						// Get the normal vectors
+
+						while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+
+							var nx = parseFloat( result[ 1 ] );
+							var ny = parseFloat( result[ 2 ] );
+							var nz = parseFloat( result[ 3 ] );
+							normals.push( nx, ny, nz );
+
+						}
+
+					}
+
+				}
+
+				if ( patPOLYGONS.exec( line ) !== null ) {
+
+					inPolygonsSection = true;
+					inPointsSection = false;
+					inTriangleStripSection = false;
+
+				} else if ( patPOINTS.exec( line ) !== null ) {
+
+					inPolygonsSection = false;
+					inPointsSection = true;
+					inTriangleStripSection = false;
+
+				} else if ( patTRIANGLE_STRIPS.exec( line ) !== null ) {
+
+					inPolygonsSection = false;
+					inPointsSection = false;
+					inTriangleStripSection = true;
+
+				} else if ( patPOINT_DATA.exec( line ) !== null ) {
+
+					inPointDataSection = true;
+					inPointsSection = false;
+					inPolygonsSection = false;
+					inTriangleStripSection = false;
+
+				} else if ( patCELL_DATA.exec( line ) !== null ) {
+
+					inCellDataSection = true;
+					inPointsSection = false;
+					inPolygonsSection = false;
+					inTriangleStripSection = false;
+
+				} else if ( patCOLOR_SCALARS.exec( line ) !== null ) {
+
+					inColorSection = true;
+					inNormalsSection = false;
+					inPointsSection = false;
+					inPolygonsSection = false;
+					inTriangleStripSection = false;
+
+				} else if ( patNORMALS.exec( line ) !== null ) {
+
+					inNormalsSection = true;
+					inColorSection = false;
+					inPointsSection = false;
+					inPolygonsSection = false;
+					inTriangleStripSection = false;
+
+				}
+
+			}
+
+			var geometry = new BufferGeometry();
+			geometry.setIndex( indices );
+			geometry.addAttribute( 'position', new Float32BufferAttribute( positions, 3 ) );
+
+			if ( normals.length === positions.length ) {
+
+				geometry.addAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
+
+			}
+
+			if ( colors.length !== indices.length ) {
+
+				// stagger
+
+				if ( colors.length === positions.length ) {
+
+					geometry.addAttribute( 'color', new Float32BufferAttribute( colors, 3 ) );
+
+				}
+
+			} else {
+
+				// cell
+
+				geometry = geometry.toNonIndexed();
+				var numTriangles = geometry.attributes.position.count / 3;
+
+				if ( colors.length === ( numTriangles * 3 ) ) {
+
+					var newColors = [];
+
+					for ( var i = 0; i < numTriangles; i ++ ) {
+
+						var r = colors[ 3 * i + 0 ];
+						var g = colors[ 3 * i + 1 ];
+						var b = colors[ 3 * i + 2 ];
+
+						newColors.push( r, g, b );
+						newColors.push( r, g, b );
+						newColors.push( r, g, b );
+
+					}
+
+					geometry.addAttribute( 'color', new Float32BufferAttribute( newColors, 3 ) );
+
+				}
+
+			}
+
+			return geometry;
+
+		}
+
+		function parseBinary( data ) {
+
+			var count, pointIndex, i, numberOfPoints, s;
+			var buffer = new Uint8Array( data );
+			var dataView = new DataView( data );
+
+			// Points and normals, by default, are empty
+			var points = [];
+			var normals = [];
+			var indices = [];
+
+			// Going to make a big array of strings
+			var vtk = [];
+			var index = 0;
+
+			function findString( buffer, start ) {
+
+				var index = start;
+				var c = buffer[ index ];
+				var s = [];
+				while ( c !== 10 ) {
+
+					s.push( String.fromCharCode( c ) );
+					index ++;
+					c = buffer[ index ];
+
+				}
+
+				return { start: start,
+					end: index,
+					next: index + 1,
+					parsedString: s.join( '' ) };
+
+			}
+
+			var state, line;
+
+			while ( true ) {
+
+				// Get a string
+				state = findString( buffer, index );
+				line = state.parsedString;
+
+				if ( line.indexOf( 'DATASET' ) === 0 ) {
+
+					var dataset = line.split( ' ' )[ 1 ];
+
+					if ( dataset !== 'POLYDATA' ) throw new Error( 'Unsupported DATASET type: ' + dataset );
+
+				} else if ( line.indexOf( 'POINTS' ) === 0 ) {
+
+					vtk.push( line );
+					// Add the points
+					numberOfPoints = parseInt( line.split( ' ' )[ 1 ], 10 );
+
+					// Each point is 3 4-byte floats
+					count = numberOfPoints * 4 * 3;
+
+					points = new Float32Array( numberOfPoints * 3 );
+
+					pointIndex = state.next;
+					for ( i = 0; i < numberOfPoints; i ++ ) {
+
+						points[ 3 * i ] = dataView.getFloat32( pointIndex, false );
+						points[ 3 * i + 1 ] = dataView.getFloat32( pointIndex + 4, false );
+						points[ 3 * i + 2 ] = dataView.getFloat32( pointIndex + 8, false );
+						pointIndex = pointIndex + 12;
+
+					}
+					// increment our next pointer
+					state.next = state.next + count + 1;
+
+				} else if ( line.indexOf( 'TRIANGLE_STRIPS' ) === 0 ) {
+
+					var numberOfStrips = parseInt( line.split( ' ' )[ 1 ], 10 );
+					var size = parseInt( line.split( ' ' )[ 2 ], 10 );
+					// 4 byte integers
+					count = size * 4;
+
+					indices = new Uint32Array( 3 * size - 9 * numberOfStrips );
+					var indicesIndex = 0;
+
+					pointIndex = state.next;
+					for ( i = 0; i < numberOfStrips; i ++ ) {
+
+						// For each strip, read the first value, then record that many more points
+						var indexCount = dataView.getInt32( pointIndex, false );
+						var strip = [];
+						pointIndex += 4;
+						for ( s = 0; s < indexCount; s ++ ) {
+
+							strip.push( dataView.getInt32( pointIndex, false ) );
+							pointIndex += 4;
+
+						}
+
+						// retrieves the n-2 triangles from the triangle strip
+						for ( var j = 0; j < indexCount - 2; j ++ ) {
+
+							if ( j % 2 ) {
+
+								indices[ indicesIndex ++ ] = strip[ j ];
+								indices[ indicesIndex ++ ] = strip[ j + 2 ];
+								indices[ indicesIndex ++ ] = strip[ j + 1 ];
+
+							} else {
+
+
+								indices[ indicesIndex ++ ] = strip[ j ];
+								indices[ indicesIndex ++ ] = strip[ j + 1 ];
+								indices[ indicesIndex ++ ] = strip[ j + 2 ];
+
+							}
+
+						}
+
+					}
+					// increment our next pointer
+					state.next = state.next + count + 1;
+
+				} else if ( line.indexOf( 'POLYGONS' ) === 0 ) {
+
+					var numberOfStrips = parseInt( line.split( ' ' )[ 1 ], 10 );
+					var size = parseInt( line.split( ' ' )[ 2 ], 10 );
+					// 4 byte integers
+					count = size * 4;
+
+					indices = new Uint32Array( 3 * size - 9 * numberOfStrips );
+					var indicesIndex = 0;
+
+					pointIndex = state.next;
+					for ( i = 0; i < numberOfStrips; i ++ ) {
+
+						// For each strip, read the first value, then record that many more points
+						var indexCount = dataView.getInt32( pointIndex, false );
+						var strip = [];
+						pointIndex += 4;
+						for ( s = 0; s < indexCount; s ++ ) {
+
+							strip.push( dataView.getInt32( pointIndex, false ) );
+							pointIndex += 4;
+
+						}
+
+						// divide the polygon in n-2 triangle
+						for ( var j = 1; j < indexCount - 1; j ++ ) {
+
+							indices[ indicesIndex ++ ] = strip[ 0 ];
+							indices[ indicesIndex ++ ] = strip[ j ];
+							indices[ indicesIndex ++ ] = strip[ j + 1 ];
+
+						}
+
+					}
+					// increment our next pointer
+					state.next = state.next + count + 1;
+
+				} else if ( line.indexOf( 'POINT_DATA' ) === 0 ) {
+
+					numberOfPoints = parseInt( line.split( ' ' )[ 1 ], 10 );
+
+					// Grab the next line
+					state = findString( buffer, state.next );
+
+					// Now grab the binary data
+					count = numberOfPoints * 4 * 3;
+
+					normals = new Float32Array( numberOfPoints * 3 );
+					pointIndex = state.next;
+					for ( i = 0; i < numberOfPoints; i ++ ) {
+
+						normals[ 3 * i ] = dataView.getFloat32( pointIndex, false );
+						normals[ 3 * i + 1 ] = dataView.getFloat32( pointIndex + 4, false );
+						normals[ 3 * i + 2 ] = dataView.getFloat32( pointIndex + 8, false );
+						pointIndex += 12;
+
+					}
+
+					// Increment past our data
+					state.next = state.next + count;
+
+				}
+
+				// Increment index
+				index = state.next;
+
+				if ( index >= buffer.byteLength ) {
+
+					break;
+
+				}
+
+			}
+
+			var geometry = new BufferGeometry();
+			geometry.setIndex( new BufferAttribute( indices, 1 ) );
+			geometry.addAttribute( 'position', new BufferAttribute( points, 3 ) );
+
+			if ( normals.length === points.length ) {
+
+				geometry.addAttribute( 'normal', new BufferAttribute( normals, 3 ) );
+
+			}
+
+			return geometry;
+
+		}
+
+		function Float32Concat( first, second ) {
+
+		    var firstLength = first.length, result = new Float32Array( firstLength + second.length );
+
+		    result.set( first );
+		    result.set( second, firstLength );
+
+		    return result;
+
+		}
+
+		function Int32Concat( first, second ) {
+
+		    var firstLength = first.length, result = new Int32Array( firstLength + second.length );
+
+		    result.set( first );
+		    result.set( second, firstLength );
+
+		    return result;
+
+		}
+
+		function parseXML( stringFile ) {
+
+			// Changes XML to JSON, based on https://davidwalsh.name/convert-xml-json
+
+			function xmlToJson( xml ) {
+
+				// Create the return object
+				var obj = {};
+
+				if ( xml.nodeType === 1 ) { // element
+
+					// do attributes
+
+					if ( xml.attributes ) {
+
+						if ( xml.attributes.length > 0 ) {
+
+							obj[ 'attributes' ] = {};
+
+							for ( var j = 0; j < xml.attributes.length; j ++ ) {
+
+								var attribute = xml.attributes.item( j );
+								obj[ 'attributes' ][ attribute.nodeName ] = attribute.nodeValue.trim();
+
+							}
+
+						}
+
+					}
+
+				} else if ( xml.nodeType === 3 ) { // text
+
+					obj = xml.nodeValue.trim();
+
+				}
+
+				// do children
+				if ( xml.hasChildNodes() ) {
+
+					for ( var i = 0; i < xml.childNodes.length; i ++ ) {
+
+						var item = xml.childNodes.item( i );
+						var nodeName = item.nodeName;
+
+						if ( typeof obj[ nodeName ] === 'undefined' ) {
+
+							var tmp = xmlToJson( item );
+
+							if ( tmp !== '' ) obj[ nodeName ] = tmp;
+
+						} else {
+
+							if ( typeof obj[ nodeName ].push === 'undefined' ) {
+
+								var old = obj[ nodeName ];
+								obj[ nodeName ] = [ old ];
+
+							}
+
+							var tmp = xmlToJson( item );
+
+							if ( tmp !== '' ) obj[ nodeName ].push( tmp );
+
+						}
+
+					}
+
+				}
+
+				return obj;
+
+			}
+
+			// Taken from Base64-js
+			function Base64toByteArray( b64 ) {
+
+				var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array;
+				var i;
+				var lookup = [];
+				var revLookup = [];
+				var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
+				var len = code.length;
+
+				for ( i = 0; i < len; i ++ ) {
+
+					lookup[ i ] = code[ i ];
+
+				}
+
+				for ( i = 0; i < len; ++ i ) {
+
+					revLookup[ code.charCodeAt( i ) ] = i;
+
+				}
+
+				revLookup[ '-'.charCodeAt( 0 ) ] = 62;
+				revLookup[ '_'.charCodeAt( 0 ) ] = 63;
+
+				var j, l, tmp, placeHolders, arr;
+				var len = b64.length;
+
+				if ( len % 4 > 0 ) {
+
+					throw new Error( 'Invalid string. Length must be a multiple of 4' );
+
+				}
+
+				placeHolders = b64[ len - 2 ] === '=' ? 2 : b64[ len - 1 ] === '=' ? 1 : 0;
+				arr = new Arr( len * 3 / 4 - placeHolders );
+				l = placeHolders > 0 ? len - 4 : len;
+
+				var L = 0;
+
+				for ( i = 0, j = 0; i < l; i += 4, j += 3 ) {
+
+					tmp = ( revLookup[ b64.charCodeAt( i ) ] << 18 ) | ( revLookup[ b64.charCodeAt( i + 1 ) ] << 12 ) | ( revLookup[ b64.charCodeAt( i + 2 ) ] << 6 ) | revLookup[ b64.charCodeAt( i + 3 ) ];
+					arr[ L ++ ] = ( tmp & 0xFF0000 ) >> 16;
+					arr[ L ++ ] = ( tmp & 0xFF00 ) >> 8;
+					arr[ L ++ ] = tmp & 0xFF;
+
+				}
+
+				if ( placeHolders === 2 ) {
+
+					tmp = ( revLookup[ b64.charCodeAt( i ) ] << 2 ) | ( revLookup[ b64.charCodeAt( i + 1 ) ] >> 4 );
+					arr[ L ++ ] = tmp & 0xFF;
+
+				} else if ( placeHolders === 1 ) {
+
+					tmp = ( revLookup[ b64.charCodeAt( i ) ] << 10 ) | ( revLookup[ b64.charCodeAt( i + 1 ) ] << 4 ) | ( revLookup[ b64.charCodeAt( i + 2 ) ] >> 2 );
+					arr[ L ++ ] = ( tmp >> 8 ) & 0xFF;
+					arr[ L ++ ] = tmp & 0xFF;
+
+				}
+
+				return arr;
+
+			}
+
+			function parseDataArray( ele, compressed ) {
+
+				var numBytes = 0;
+
+				if ( json.attributes.header_type === 'UInt64' ) {
+
+					numBytes = 8;
+
+				}	else if ( json.attributes.header_type === 'UInt32' ) {
+
+					numBytes = 4;
+
+				}
+
+
+				// Check the format
+				if ( ele.attributes.format === 'binary' && compressed ) {
+
+					var rawData, content, byteData, blocks, cSizeStart, headerSize, padding, dataOffsets, currentOffset;
+
+					if ( ele.attributes.type === 'Float32' ) {
+
+						var txt = new Float32Array( );
+
+					} else if ( ele.attributes.type === 'Int64' ) {
+
+						var txt = new Int32Array( );
+
+					}
+
+					// VTP data with the header has the following structure:
+					// [#blocks][#u-size][#p-size][#c-size-1][#c-size-2]...[#c-size-#blocks][DATA]
+					//
+					// Each token is an integer value whose type is specified by "header_type" at the top of the file (UInt32 if no type specified). The token meanings are:
+					// [#blocks] = Number of blocks
+					// [#u-size] = Block size before compression
+					// [#p-size] = Size of last partial block (zero if it not needed)
+					// [#c-size-i] = Size in bytes of block i after compression
+					//
+					// The [DATA] portion stores contiguously every block appended together. The offset from the beginning of the data section to the beginning of a block is
+					// computed by summing the compressed block sizes from preceding blocks according to the header.
+
+					rawData = ele[ '#text' ];
+
+					byteData = Base64toByteArray( rawData );
+
+					blocks = byteData[ 0 ];
+					for ( var i = 1; i < numBytes - 1; i ++ ) {
+
+						blocks = blocks | ( byteData[ i ] << ( i * numBytes ) );
+
+					}
+
+					headerSize = ( blocks + 3 ) * numBytes;
+					padding = ( ( headerSize % 3 ) > 0 ) ? 3 - ( headerSize % 3 ) : 0;
+					headerSize = headerSize + padding;
+
+					dataOffsets = [];
+					currentOffset = headerSize;
+					dataOffsets.push( currentOffset );
+
+					// Get the blocks sizes after the compression.
+					// There are three blocks before c-size-i, so we skip 3*numBytes
+					cSizeStart = 3 * numBytes;
+
+					for ( var i = 0; i < blocks; i ++ ) {
+
+						var currentBlockSize = byteData[ i * numBytes + cSizeStart ];
+
+						for ( var j = 1; j < numBytes - 1; j ++ ) {
+
+							// Each data point consists of 8 bytes regardless of the header type
+							currentBlockSize = currentBlockSize | ( byteData[ i * numBytes + cSizeStart + j ] << ( j * 8 ) );
+
+						}
+
+						currentOffset = currentOffset + currentBlockSize;
+						dataOffsets.push( currentOffset );
+
+					}
+
+					for ( var i = 0; i < dataOffsets.length - 1; i ++ ) {
+
+						var inflate = new Zlib.Inflate( byteData.slice( dataOffsets[ i ], dataOffsets[ i + 1 ] ), { resize: true, verify: true } ); // eslint-disable-line no-undef
+						content = inflate.decompress();
+						content = content.buffer;
+
+						if ( ele.attributes.type === 'Float32' ) {
+
+							content = new Float32Array( content );
+							txt = Float32Concat( txt, content );
+
+						} else if ( ele.attributes.type === 'Int64' ) {
+
+							content = new Int32Array( content );
+							txt = Int32Concat( txt, content );
+
+						}
+
+					}
+
+					delete ele[ '#text' ];
+
+					if ( ele.attributes.type === 'Int64' ) {
+
+						if ( ele.attributes.format === 'binary' ) {
+
+							txt = txt.filter( function ( el, idx ) {
+
+								if ( idx % 2 !== 1 ) return true;
+
+							} );
+
+						}
+
+					}
+
+				} else {
+
+					if ( ele.attributes.format === 'binary' && ! compressed ) {
+
+						var content = Base64toByteArray( ele[ '#text' ] );
+
+						//  VTP data for the uncompressed case has the following structure:
+						// [#bytes][DATA]
+						// where "[#bytes]" is an integer value specifying the number of bytes in the block of data following it.
+						content = content.slice( numBytes ).buffer;
+
+					} else {
+
+						if ( ele[ '#text' ] ) {
+
+							var content = ele[ '#text' ].split( /\s+/ ).filter( function ( el ) {
+
+								if ( el !== '' ) return el;
+
+							} );
+
+						} else {
+
+							var content = new Int32Array( 0 ).buffer;
+
+						}
+
+					}
+
+					delete ele[ '#text' ];
+
+					// Get the content and optimize it
+					if ( ele.attributes.type === 'Float32' ) {
+
+						var txt = new Float32Array( content );
+
+					} else if ( ele.attributes.type === 'Int32' ) {
+
+						var txt = new Int32Array( content );
+
+					} else if ( ele.attributes.type === 'Int64' ) {
+
+						var txt = new Int32Array( content );
+
+						if ( ele.attributes.format === 'binary' ) {
+
+							txt = txt.filter( function ( el, idx ) {
+
+								if ( idx % 2 !== 1 ) return true;
+
+							} );
+
+						}
+
+					}
+
+				} // endif ( ele.attributes.format === 'binary' && compressed )
+
+				return txt;
+
+			}
+
+			// Main part
+			// Get Dom
+			var dom = null;
+
+			if ( window.DOMParser ) {
+
+				try {
+
+					dom = ( new DOMParser() ).parseFromString( stringFile, 'text/xml' );
+
+				} catch ( e ) {
+
+					dom = null;
+
+				}
+
+			} else if ( window.ActiveXObject ) {
+
+				try {
+
+					dom = new ActiveXObject( 'Microsoft.XMLDOM' ); // eslint-disable-line no-undef
+					dom.async = false;
+
+					if ( ! dom.loadXML( /* xml */ ) ) {
+
+						throw new Error( dom.parseError.reason + dom.parseError.srcText );
+
+					}
+
+				} catch ( e ) {
+
+					dom = null;
+
+				}
+
+			} else {
+
+				throw new Error( 'Cannot parse xml string!' );
+
+			}
+
+			// Get the doc
+			var doc = dom.documentElement;
+			// Convert to json
+			var json = xmlToJson( doc );
+			var points = [];
+			var normals = [];
+			var indices = [];
+
+			if ( json.PolyData ) {
+
+				var piece = json.PolyData.Piece;
+				var compressed = json.attributes.hasOwnProperty( 'compressor' );
+
+				// Can be optimized
+				// Loop through the sections
+				var sections = [ 'PointData', 'Points', 'Strips', 'Polys' ];// +['CellData', 'Verts', 'Lines'];
+				var sectionIndex = 0, numberOfSections = sections.length;
+
+				while ( sectionIndex < numberOfSections ) {
+
+					var section = piece[ sections[ sectionIndex ] ];
+
+					// If it has a DataArray in it
+
+					if ( section && section.DataArray ) {
+
+						// Depending on the number of DataArrays
+
+						if ( Object.prototype.toString.call( section.DataArray ) === '[object Array]' ) {
+
+							var arr = section.DataArray;
+
+						} else {
+
+							var arr = [ section.DataArray ];
+
+						}
+
+						var dataArrayIndex = 0, numberOfDataArrays = arr.length;
+
+						while ( dataArrayIndex < numberOfDataArrays ) {
+
+							// Parse the DataArray
+							if ( ( '#text' in arr[ dataArrayIndex ] ) && ( arr[ dataArrayIndex ][ '#text' ].length > 0 ) ) {
+
+								arr[ dataArrayIndex ].text = parseDataArray( arr[ dataArrayIndex ], compressed );
+
+							}
+
+							dataArrayIndex ++;
+
+						}
+
+						switch ( sections[ sectionIndex ] ) {
+
+							// if iti is point data
+							case 'PointData':
+
+								var numberOfPoints = parseInt( piece.attributes.NumberOfPoints );
+								var normalsName = section.attributes.Normals;
+
+								if ( numberOfPoints > 0 ) {
+
+									for ( var i = 0, len = arr.length; i < len; i ++ ) {
+
+										if ( normalsName === arr[ i ].attributes.Name ) {
+
+											var components = arr[ i ].attributes.NumberOfComponents;
+											normals = new Float32Array( numberOfPoints * components );
+											normals.set( arr[ i ].text, 0 );
+
+										}
+
+									}
+
+								}
+
+								break;
+
+							// if it is points
+							case 'Points':
+
+								var numberOfPoints = parseInt( piece.attributes.NumberOfPoints );
+
+								if ( numberOfPoints > 0 ) {
+
+									var components = section.DataArray.attributes.NumberOfComponents;
+									points = new Float32Array( numberOfPoints * components );
+									points.set( section.DataArray.text, 0 );
+
+								}
+
+								break;
+
+							// if it is strips
+							case 'Strips':
+
+								var numberOfStrips = parseInt( piece.attributes.NumberOfStrips );
+
+								if ( numberOfStrips > 0 ) {
+
+									var connectivity = new Int32Array( section.DataArray[ 0 ].text.length );
+									var offset = new Int32Array( section.DataArray[ 1 ].text.length );
+									connectivity.set( section.DataArray[ 0 ].text, 0 );
+									offset.set( section.DataArray[ 1 ].text, 0 );
+
+									var size = numberOfStrips + connectivity.length;
+									indices = new Uint32Array( 3 * size - 9 * numberOfStrips );
+
+									var indicesIndex = 0;
+
+									for ( var i = 0, len = numberOfStrips; i < len; i ++ ) {
+
+										var strip = [];
+
+										for ( var s = 0, len1 = offset[ i ], len0 = 0; s < len1 - len0; s ++ ) {
+
+											strip.push( connectivity[ s ] );
+
+											if ( i > 0 ) len0 = offset[ i - 1 ];
+
+										}
+
+										for ( var j = 0, len1 = offset[ i ], len0 = 0; j < len1 - len0 - 2; j ++ ) {
+
+											if ( j % 2 ) {
+
+												indices[ indicesIndex ++ ] = strip[ j ];
+												indices[ indicesIndex ++ ] = strip[ j + 2 ];
+												indices[ indicesIndex ++ ] = strip[ j + 1 ];
+
+											} else {
+
+												indices[ indicesIndex ++ ] = strip[ j ];
+												indices[ indicesIndex ++ ] = strip[ j + 1 ];
+												indices[ indicesIndex ++ ] = strip[ j + 2 ];
+
+											}
+
+											if ( i > 0 ) len0 = offset[ i - 1 ];
+
+										}
+
+									}
+
+								}
+
+								break;
+
+							// if it is polys
+							case 'Polys':
+
+								var numberOfPolys = parseInt( piece.attributes.NumberOfPolys );
+
+								if ( numberOfPolys > 0 ) {
+
+									var connectivity = new Int32Array( section.DataArray[ 0 ].text.length );
+									var offset = new Int32Array( section.DataArray[ 1 ].text.length );
+									connectivity.set( section.DataArray[ 0 ].text, 0 );
+									offset.set( section.DataArray[ 1 ].text, 0 );
+
+									var size = numberOfPolys + connectivity.length;
+									indices = new Uint32Array( 3 * size - 9 * numberOfPolys );
+									var indicesIndex = 0, connectivityIndex = 0;
+									var i = 0, len = numberOfPolys, len0 = 0;
+
+									while ( i < len ) {
+
+										var poly = [];
+										var s = 0, len1 = offset[ i ];
+
+										while ( s < len1 - len0 ) {
+
+											poly.push( connectivity[ connectivityIndex ++ ] );
+											s ++;
+
+										}
+
+										var j = 1;
+
+										while ( j < len1 - len0 - 1 ) {
+
+											indices[ indicesIndex ++ ] = poly[ 0 ];
+											indices[ indicesIndex ++ ] = poly[ j ];
+											indices[ indicesIndex ++ ] = poly[ j + 1 ];
+											j ++;
+
+										}
+
+										i ++;
+										len0 = offset[ i - 1 ];
+
+									}
+
+								}
+
+								break;
+
+							default:
+								break;
+
+						}
+
+					}
+
+					sectionIndex ++;
+
+				}
+
+				var geometry = new BufferGeometry();
+				geometry.setIndex( new BufferAttribute( indices, 1 ) );
+				geometry.addAttribute( 'position', new BufferAttribute( points, 3 ) );
+
+				if ( normals.length === points.length ) {
+
+					geometry.addAttribute( 'normal', new BufferAttribute( normals, 3 ) );
+
+				}
+
+				return geometry;
+
+			} else {
+
+				throw new Error( 'Unsupported DATASET type' );
+
+			}
+
+		}
+
+		function getStringFile( data ) {
+
+			var stringFile = '';
+			var charArray = new Uint8Array( data );
+			var i = 0;
+			var len = charArray.length;
+
+			while ( len -- ) {
+
+				stringFile += String.fromCharCode( charArray[ i ++ ] );
+
+			}
+
+			return stringFile;
+
+		}
+
+		// get the 5 first lines of the files to check if there is the key word binary
+		var meta = LoaderUtils.decodeText( new Uint8Array( data, 0, 250 ) ).split( '\n' );
+
+		if ( meta[ 0 ].indexOf( 'xml' ) !== - 1 ) {
+
+			return parseXML( getStringFile( data ) );
+
+		} else if ( meta[ 2 ].includes( 'ASCII' ) ) {
+
+			return parseASCII( getStringFile( data ) );
+
+		} else {
+
+			return parseBinary( data );
+
+		}
+
+	}
+
+} );
+
+export { VTKLoader };

+ 68 - 0
examples/jsm/misc/Ocean.d.ts

@@ -0,0 +1,68 @@
+import {
+  OrthographicCamera,
+  Mesh,
+  Scene,
+  ShaderMaterial,
+  Vector3,
+  WebGLRenderer,
+  WebGLRenderTarget,
+} from '../../../src/Three';
+
+export class Ocean {
+  constructor( renderer: WebGLRenderer, camera?: OrthographicCamera, scene?: Scene, options?: object );
+
+	changed: boolean;
+	initial: boolean;
+
+	oceanCamera: OrthographicCamera
+	renderer: WebGLRenderer;
+  scene: Scene;
+
+  clearColor: number[];
+	geometryOrigin: number[];
+	sunDirectionX: number;
+	sunDirectionY: number;
+	sunDirectionZ: number;
+	oceanColor: Vector3;
+	skyColor: Vector3;
+	exposure: number;
+	geometryResolution: number;
+	geometrySize: number;
+	resolution: number;
+	floatSize: number;
+	windX: number;
+	windY: number;
+	size: number;
+  choppiness: number;
+
+  initialSpectrumFramebuffer: WebGLRenderTarget;
+	spectrumFramebuffer: WebGLRenderTarget;
+	pingPhaseFramebuffer: WebGLRenderTarget;
+	pongPhaseFramebuffer: WebGLRenderTarget;
+	pingTransformFramebuffer: WebGLRenderTarget;
+	pongTransformFramebuffer: WebGLRenderTarget;
+	displacementMapFramebuffer: WebGLRenderTarget;
+	normalMapFramebuffer: WebGLRenderTarget;
+
+  matrixNeedsUpdate: boolean;
+
+  materialOceanHorizontal: ShaderMaterial;
+  materialOceanVertical: ShaderMaterial;
+  materialInitialSpectrum: ShaderMaterial;
+  materialPhase: ShaderMaterial;
+  materialSpectrum: ShaderMaterial;
+  materialNormal: ShaderMaterial;
+  materialOcean: ShaderMaterial;
+
+  screenQuad: Mesh;
+
+  generateSeedPhaseTexture(): void;
+	generateMesh(): void;
+  render(): void;
+  renderInitialSpectrum(): void;
+  renderWavePhase(): void;
+  renderSpectrum(): void;
+  renderSpectrumFFT(): void;
+  renderNormalMap(): void;
+
+}

+ 411 - 0
examples/jsm/misc/Ocean.js

@@ -0,0 +1,411 @@
+/*
+	three.js Ocean
+*/
+
+import {
+	ClampToEdgeWrapping,
+	DataTexture,
+	FloatType,
+	HalfFloatType,
+	LinearFilter,
+	Mesh,
+	NearestFilter,
+	OrthographicCamera,
+	PlaneBufferGeometry,
+	RGBAFormat,
+	RepeatWrapping,
+	Scene,
+	ShaderMaterial,
+	UniformsUtils,
+	Vector2,
+	Vector3,
+	WebGLRenderTarget
+} from "../../../build/three.module.js";
+import { OceanShaders } from "../shaders/OceanShaders.js";
+
+var Ocean = function ( renderer, camera, scene, options ) {
+
+	// flag used to trigger parameter changes
+	this.changed = true;
+	this.initial = true;
+
+	// Assign required parameters as object properties
+	this.oceanCamera = new OrthographicCamera(); //camera.clone();
+	this.oceanCamera.position.z = 1;
+	this.renderer = renderer;
+	this.renderer.clearColor( 0xffffff );
+
+	this.scene = new Scene();
+
+	// Assign optional parameters as variables and object properties
+	function optionalParameter( value, defaultValue ) {
+
+		return value !== undefined ? value : defaultValue;
+
+	}
+	options = options || {};
+	this.clearColor = optionalParameter( options.CLEAR_COLOR, [ 1.0, 1.0, 1.0, 0.0 ] );
+	this.geometryOrigin = optionalParameter( options.GEOMETRY_ORIGIN, [ - 1000.0, - 1000.0 ] );
+	this.sunDirectionX = optionalParameter( options.SUN_DIRECTION[ 0 ], - 1.0 );
+	this.sunDirectionY = optionalParameter( options.SUN_DIRECTION[ 1 ], 1.0 );
+	this.sunDirectionZ = optionalParameter( options.SUN_DIRECTION[ 2 ], 1.0 );
+	this.oceanColor = optionalParameter( options.OCEAN_COLOR, new Vector3( 0.004, 0.016, 0.047 ) );
+	this.skyColor = optionalParameter( options.SKY_COLOR, new Vector3( 3.2, 9.6, 12.8 ) );
+	this.exposure = optionalParameter( options.EXPOSURE, 0.35 );
+	this.geometryResolution = optionalParameter( options.GEOMETRY_RESOLUTION, 32 );
+	this.geometrySize = optionalParameter( options.GEOMETRY_SIZE, 2000 );
+	this.resolution = optionalParameter( options.RESOLUTION, 64 );
+	this.floatSize = optionalParameter( options.SIZE_OF_FLOAT, 4 );
+	this.windX = optionalParameter( options.INITIAL_WIND[ 0 ], 10.0 );
+	this.windY = optionalParameter( options.INITIAL_WIND[ 1 ], 10.0 );
+	this.size = optionalParameter( options.INITIAL_SIZE, 250.0 );
+	this.choppiness = optionalParameter( options.INITIAL_CHOPPINESS, 1.5 );
+
+	//
+	this.matrixNeedsUpdate = false;
+
+	// Setup framebuffer pipeline
+	var renderTargetType = optionalParameter( options.USE_HALF_FLOAT, false ) ? HalfFloatType : FloatType;
+	var LinearClampParams = {
+		minFilter: LinearFilter,
+		magFilter: LinearFilter,
+		wrapS: ClampToEdgeWrapping,
+		wrapT: ClampToEdgeWrapping,
+		format: RGBAFormat,
+		stencilBuffer: false,
+		depthBuffer: false,
+		premultiplyAlpha: false,
+		type: renderTargetType
+	};
+	var NearestClampParams = {
+		minFilter: NearestFilter,
+		magFilter: NearestFilter,
+		wrapS: ClampToEdgeWrapping,
+		wrapT: ClampToEdgeWrapping,
+		format: RGBAFormat,
+		stencilBuffer: false,
+		depthBuffer: false,
+		premultiplyAlpha: false,
+		type: renderTargetType
+	};
+	var NearestRepeatParams = {
+		minFilter: NearestFilter,
+		magFilter: NearestFilter,
+		wrapS: RepeatWrapping,
+		wrapT: RepeatWrapping,
+		format: RGBAFormat,
+		stencilBuffer: false,
+		depthBuffer: false,
+		premultiplyAlpha: false,
+		type: renderTargetType
+	};
+	this.initialSpectrumFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestRepeatParams );
+	this.spectrumFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestClampParams );
+	this.pingPhaseFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestClampParams );
+	this.pongPhaseFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestClampParams );
+	this.pingTransformFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestClampParams );
+	this.pongTransformFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, NearestClampParams );
+	this.displacementMapFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, LinearClampParams );
+	this.normalMapFramebuffer = new WebGLRenderTarget( this.resolution, this.resolution, LinearClampParams );
+
+	// Define shaders and constant uniforms
+	////////////////////////////////////////
+
+	// 0 - The vertex shader used in all of the simulation steps
+	var fullscreeenVertexShader = OceanShaders[ "ocean_sim_vertex" ];
+
+	// 1 - Horizontal wave vertices used for FFT
+	var oceanHorizontalShader = OceanShaders[ "ocean_subtransform" ];
+	var oceanHorizontalUniforms = UniformsUtils.clone( oceanHorizontalShader.uniforms );
+	this.materialOceanHorizontal = new ShaderMaterial( {
+		uniforms: oceanHorizontalUniforms,
+		vertexShader: fullscreeenVertexShader.vertexShader,
+		fragmentShader: "#define HORIZONTAL \n" + oceanHorizontalShader.fragmentShader
+	} );
+	this.materialOceanHorizontal.uniforms.u_transformSize = { value: this.resolution };
+	this.materialOceanHorizontal.uniforms.u_subtransformSize = { value: null };
+	this.materialOceanHorizontal.uniforms.u_input = { value: null };
+	this.materialOceanHorizontal.depthTest = false;
+
+	// 2 - Vertical wave vertices used for FFT
+	var oceanVerticalShader = OceanShaders[ "ocean_subtransform" ];
+	var oceanVerticalUniforms = UniformsUtils.clone( oceanVerticalShader.uniforms );
+	this.materialOceanVertical = new ShaderMaterial( {
+		uniforms: oceanVerticalUniforms,
+		vertexShader: fullscreeenVertexShader.vertexShader,
+		fragmentShader: oceanVerticalShader.fragmentShader
+	} );
+	this.materialOceanVertical.uniforms.u_transformSize = { value: this.resolution };
+	this.materialOceanVertical.uniforms.u_subtransformSize = { value: null };
+	this.materialOceanVertical.uniforms.u_input = { value: null };
+	this.materialOceanVertical.depthTest = false;
+
+	// 3 - Initial spectrum used to generate height map
+	var initialSpectrumShader = OceanShaders[ "ocean_initial_spectrum" ];
+	var initialSpectrumUniforms = UniformsUtils.clone( initialSpectrumShader.uniforms );
+	this.materialInitialSpectrum = new ShaderMaterial( {
+		uniforms: initialSpectrumUniforms,
+		vertexShader: initialSpectrumShader.vertexShader,
+		fragmentShader: initialSpectrumShader.fragmentShader
+	} );
+	this.materialInitialSpectrum.uniforms.u_wind = { value: new Vector2() };
+	this.materialInitialSpectrum.uniforms.u_resolution = { value: this.resolution };
+	this.materialInitialSpectrum.depthTest = false;
+
+	// 4 - Phases used to animate heightmap
+	var phaseShader = OceanShaders[ "ocean_phase" ];
+	var phaseUniforms = UniformsUtils.clone( phaseShader.uniforms );
+	this.materialPhase = new ShaderMaterial( {
+		uniforms: phaseUniforms,
+		vertexShader: fullscreeenVertexShader.vertexShader,
+		fragmentShader: phaseShader.fragmentShader
+	} );
+	this.materialPhase.uniforms.u_resolution = { value: this.resolution };
+	this.materialPhase.depthTest = false;
+
+	// 5 - Shader used to update spectrum
+	var spectrumShader = OceanShaders[ "ocean_spectrum" ];
+	var spectrumUniforms = UniformsUtils.clone( spectrumShader.uniforms );
+	this.materialSpectrum = new ShaderMaterial( {
+		uniforms: spectrumUniforms,
+		vertexShader: fullscreeenVertexShader.vertexShader,
+		fragmentShader: spectrumShader.fragmentShader
+	} );
+	this.materialSpectrum.uniforms.u_initialSpectrum = { value: null };
+	this.materialSpectrum.uniforms.u_resolution = { value: this.resolution };
+	this.materialSpectrum.depthTest = false;
+
+	// 6 - Shader used to update spectrum normals
+	var normalShader = OceanShaders[ "ocean_normals" ];
+	var normalUniforms = UniformsUtils.clone( normalShader.uniforms );
+	this.materialNormal = new ShaderMaterial( {
+		uniforms: normalUniforms,
+		vertexShader: fullscreeenVertexShader.vertexShader,
+		fragmentShader: normalShader.fragmentShader
+	} );
+	this.materialNormal.uniforms.u_displacementMap = { value: null };
+	this.materialNormal.uniforms.u_resolution = { value: this.resolution };
+	this.materialNormal.depthTest = false;
+
+	// 7 - Shader used to update normals
+	var oceanShader = OceanShaders[ "ocean_main" ];
+	var oceanUniforms = UniformsUtils.clone( oceanShader.uniforms );
+	this.materialOcean = new ShaderMaterial( {
+		uniforms: oceanUniforms,
+		vertexShader: oceanShader.vertexShader,
+		fragmentShader: oceanShader.fragmentShader
+	} );
+	// this.materialOcean.wireframe = true;
+	this.materialOcean.uniforms.u_geometrySize = { value: this.resolution };
+	this.materialOcean.uniforms.u_displacementMap = { value: this.displacementMapFramebuffer.texture };
+	this.materialOcean.uniforms.u_normalMap = { value: this.normalMapFramebuffer.texture };
+	this.materialOcean.uniforms.u_oceanColor = { value: this.oceanColor };
+	this.materialOcean.uniforms.u_skyColor = { value: this.skyColor };
+	this.materialOcean.uniforms.u_sunDirection = { value: new Vector3( this.sunDirectionX, this.sunDirectionY, this.sunDirectionZ ) };
+	this.materialOcean.uniforms.u_exposure = { value: this.exposure };
+
+	// Disable blending to prevent default premultiplied alpha values
+	this.materialOceanHorizontal.blending = 0;
+	this.materialOceanVertical.blending = 0;
+	this.materialInitialSpectrum.blending = 0;
+	this.materialPhase.blending = 0;
+	this.materialSpectrum.blending = 0;
+	this.materialNormal.blending = 0;
+	this.materialOcean.blending = 0;
+
+	// Create the simulation plane
+	this.screenQuad = new Mesh( new PlaneBufferGeometry( 2, 2 ) );
+	this.scene.add( this.screenQuad );
+
+	// Initialise spectrum data
+	this.generateSeedPhaseTexture();
+
+	// Generate the ocean mesh
+	this.generateMesh();
+
+};
+
+Ocean.prototype.generateMesh = function () {
+
+	var geometry = new PlaneBufferGeometry( this.geometrySize, this.geometrySize, this.geometryResolution, this.geometryResolution );
+
+	geometry.rotateX( - Math.PI / 2 );
+
+	this.oceanMesh = new Mesh( geometry, this.materialOcean );
+
+};
+
+Ocean.prototype.render = function () {
+
+	var currentRenderTarget = this.renderer.getRenderTarget();
+
+	this.scene.overrideMaterial = null;
+
+	if ( this.changed )
+		this.renderInitialSpectrum();
+
+	this.renderWavePhase();
+	this.renderSpectrum();
+	this.renderSpectrumFFT();
+	this.renderNormalMap();
+	this.scene.overrideMaterial = null;
+
+	this.renderer.setRenderTarget( currentRenderTarget );
+
+};
+
+Ocean.prototype.generateSeedPhaseTexture = function () {
+
+	// Setup the seed texture
+	this.pingPhase = true;
+	var phaseArray = new window.Float32Array( this.resolution * this.resolution * 4 );
+	for ( var i = 0; i < this.resolution; i ++ ) {
+
+		for ( var j = 0; j < this.resolution; j ++ ) {
+
+			phaseArray[ i * this.resolution * 4 + j * 4 ] = Math.random() * 2.0 * Math.PI;
+			phaseArray[ i * this.resolution * 4 + j * 4 + 1 ] = 0.0;
+			phaseArray[ i * this.resolution * 4 + j * 4 + 2 ] = 0.0;
+			phaseArray[ i * this.resolution * 4 + j * 4 + 3 ] = 0.0;
+
+		}
+
+	}
+
+	this.pingPhaseTexture = new DataTexture( phaseArray, this.resolution, this.resolution, RGBAFormat );
+	this.pingPhaseTexture.wrapS = ClampToEdgeWrapping;
+	this.pingPhaseTexture.wrapT = ClampToEdgeWrapping;
+	this.pingPhaseTexture.type = FloatType;
+	this.pingPhaseTexture.needsUpdate = true;
+
+};
+
+Ocean.prototype.renderInitialSpectrum = function () {
+
+	this.scene.overrideMaterial = this.materialInitialSpectrum;
+	this.materialInitialSpectrum.uniforms.u_wind.value.set( this.windX, this.windY );
+	this.materialInitialSpectrum.uniforms.u_size.value = this.size;
+
+	this.renderer.setRenderTarget( this.initialSpectrumFramebuffer );
+	this.renderer.clear();
+	this.renderer.render( this.scene, this.oceanCamera );
+
+};
+
+Ocean.prototype.renderWavePhase = function () {
+
+	this.scene.overrideMaterial = this.materialPhase;
+	this.screenQuad.material = this.materialPhase;
+	if ( this.initial ) {
+
+		this.materialPhase.uniforms.u_phases.value = this.pingPhaseTexture;
+		this.initial = false;
+
+	} else {
+
+		this.materialPhase.uniforms.u_phases.value = this.pingPhase ? this.pingPhaseFramebuffer.texture : this.pongPhaseFramebuffer.texture;
+
+	}
+	this.materialPhase.uniforms.u_deltaTime.value = this.deltaTime;
+	this.materialPhase.uniforms.u_size.value = this.size;
+	this.renderer.setRenderTarget( this.pingPhase ? this.pongPhaseFramebuffer : this.pingPhaseFramebuffer );
+	this.renderer.render( this.scene, this.oceanCamera );
+	this.pingPhase = ! this.pingPhase;
+
+};
+
+Ocean.prototype.renderSpectrum = function () {
+
+	this.scene.overrideMaterial = this.materialSpectrum;
+	this.materialSpectrum.uniforms.u_initialSpectrum.value = this.initialSpectrumFramebuffer.texture;
+	this.materialSpectrum.uniforms.u_phases.value = this.pingPhase ? this.pingPhaseFramebuffer.texture : this.pongPhaseFramebuffer.texture;
+	this.materialSpectrum.uniforms.u_choppiness.value = this.choppiness;
+	this.materialSpectrum.uniforms.u_size.value = this.size;
+
+	this.renderer.setRenderTarget( this.spectrumFramebuffer );
+	this.renderer.render( this.scene, this.oceanCamera );
+
+};
+
+Ocean.prototype.renderSpectrumFFT = function () {
+
+	// GPU FFT using Stockham formulation
+	var iterations = Math.log( this.resolution ) / Math.log( 2 ); // log2
+
+	this.scene.overrideMaterial = this.materialOceanHorizontal;
+
+	for ( var i = 0; i < iterations; i ++ ) {
+
+		if ( i === 0 ) {
+
+			this.materialOceanHorizontal.uniforms.u_input.value = this.spectrumFramebuffer.texture;
+			this.materialOceanHorizontal.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.pingTransformFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		} else if ( i % 2 === 1 ) {
+
+			this.materialOceanHorizontal.uniforms.u_input.value = this.pingTransformFramebuffer.texture;
+			this.materialOceanHorizontal.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.pongTransformFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		} else {
+
+			this.materialOceanHorizontal.uniforms.u_input.value = this.pongTransformFramebuffer.texture;
+			this.materialOceanHorizontal.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.pingTransformFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		}
+
+	}
+	this.scene.overrideMaterial = this.materialOceanVertical;
+	for ( var i = iterations; i < iterations * 2; i ++ ) {
+
+		if ( i === iterations * 2 - 1 ) {
+
+			this.materialOceanVertical.uniforms.u_input.value = ( iterations % 2 === 0 ) ? this.pingTransformFramebuffer.texture : this.pongTransformFramebuffer.texture;
+			this.materialOceanVertical.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.displacementMapFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		} else if ( i % 2 === 1 ) {
+
+			this.materialOceanVertical.uniforms.u_input.value = this.pingTransformFramebuffer.texture;
+			this.materialOceanVertical.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.pongTransformFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		} else {
+
+			this.materialOceanVertical.uniforms.u_input.value = this.pongTransformFramebuffer.texture;
+			this.materialOceanVertical.uniforms.u_subtransformSize.value = Math.pow( 2, ( i % ( iterations ) ) + 1 );
+
+			this.renderer.setRenderTarget( this.pingTransformFramebuffer );
+			this.renderer.render( this.scene, this.oceanCamera );
+
+		}
+
+	}
+
+};
+
+Ocean.prototype.renderNormalMap = function () {
+
+	this.scene.overrideMaterial = this.materialNormal;
+	if ( this.changed ) this.materialNormal.uniforms.u_size.value = this.size;
+	this.materialNormal.uniforms.u_displacementMap.value = this.displacementMapFramebuffer.texture;
+
+	this.renderer.setRenderTarget( this.normalMapFramebuffer );
+	this.renderer.clear();
+	this.renderer.render( this.scene, this.oceanCamera );
+
+};
+
+export { Ocean };

+ 1 - 1
examples/jsm/postprocessing/EffectComposer.d.ts

@@ -22,7 +22,7 @@ export class EffectComposer {
   addPass(pass: Pass): void;
   insertPass(pass: Pass, index: number): void;
   isLastEnabledPass(): boolean;
-  render(deltaTime: number): void;
+  render(deltaTime?: number): void;
   reset(renderTarget?: WebGLRenderTarget): void;
   setSize(width: number, height: number): void;
   setPixelRatio(pixelRatio: number): void;

+ 1 - 1
examples/jsm/postprocessing/RenderPass.d.ts

@@ -8,7 +8,7 @@ import {
 import { Pass } from './Pass';
 
 export class RenderPass extends Pass {
-  constructor(scene: Scene, camera: Camera, overrideMaterial: Material, clearColor: Color, clearAlpha: number);
+  constructor(scene: Scene, camera: Camera, overrideMaterial?: Material, clearColor?: Color, clearAlpha?: number);
   scene: Scene;
   camera: Camera;
   overrideMaterial: Material;

+ 75 - 0
examples/jsm/shaders/OceanShaders.d.ts

@@ -0,0 +1,75 @@
+import {
+  Texture,
+  Uniform,
+  Vector2,
+} from '../../../src/Three';
+
+export interface OceanShaders {
+
+  ocean_sim_vertex: {
+    vertexShader: string;
+  }
+  ocean_subtransform: {
+    uniforms: {
+      u_input: Uniform,
+      u_transformSize: Uniform,
+      u_subtransformSize: Uniform,
+    },
+    fragmentShader: string;
+  }
+  ocean_initial_spectrum: {
+    uniforms: {
+      u_wind: Uniform,
+      u_resolution: Uniform,
+      u_size: Uniform,
+    },
+    vertexShader: string;
+    fragmentShader: string;
+  }
+  ocean_phase: {
+    uniforms: {
+      u_phases: Uniform,
+      u_deltaTime: Uniform,
+      u_resolution: Uniform,
+      u_size: Uniform,
+    },
+    vertexShader: string;
+    fragmentShader: string;
+  }
+  ocean_spectrum: {
+    uniforms: {
+      u_size: Uniform,
+      u_resolution: Uniform,
+      u_choppiness: Uniform,
+      u_phases: Uniform,
+      u_initialSpectrum: Uniform,
+    },
+    fragmentShader: string;
+  }
+  ocean_normals: {
+    uniforms: {
+      u_displacementMap: Uniform,
+      u_resolution: Uniform,
+      u_size: Uniform,
+    },
+    fragmentShader: string;
+  }
+  ocean_main: {
+    uniforms: {
+      u_displacementMap: Uniform,
+      u_normalMap: Uniform,
+      u_geometrySize: Uniform,
+      u_size: Uniform,
+      u_projectionMatrix: Uniform,
+      u_viewMatrix: Uniform,
+      u_cameraPosition: Uniform,
+      u_skyColor: Uniform,
+      u_oceanColor: Uniform,
+      u_sunDirection: Uniform,
+      u_exposure: Uniform,
+    },
+    vertexShader: string;
+    fragmentShader: string;
+  }
+
+}

+ 377 - 0
examples/jsm/shaders/OceanShaders.js

@@ -0,0 +1,377 @@
+/* Author: Aleksandr Albert
+// Website: www.routter.co.tt
+
+// Description: A deep water ocean shader set
+// based on an implementation of a Tessendorf Waves
+// originally presented by David Li ( www.david.li/waves )
+
+// The general method is to apply shaders to simulation Framebuffers
+// and then sample these framebuffers when rendering the ocean mesh
+
+// The set uses 7 shaders:
+
+// -- Simulation shaders
+// [1] ocean_sim_vertex         -> Vertex shader used to set up a 2x2 simulation plane centered at (0,0)
+// [2] ocean_subtransform       -> Fragment shader used to subtransform the mesh (generates the displacement map)
+// [3] ocean_initial_spectrum   -> Fragment shader used to set intitial wave frequency at a texel coordinate
+// [4] ocean_phase              -> Fragment shader used to set wave phase at a texel coordinate
+// [5] ocean_spectrum           -> Fragment shader used to set current wave frequency at a texel coordinate
+// [6] ocean_normal             -> Fragment shader used to set face normals at a texel coordinate
+
+// -- Rendering Shader
+// [7] ocean_main               -> Vertex and Fragment shader used to create the final render
+*/
+
+import {
+	Vector2
+} from "../../../build/three.module.js";
+
+var OceanShaders = {}
+OceanShaders[ 'ocean_sim_vertex' ] = {
+	vertexShader: [
+		'varying vec2 vUV;',
+
+		'void main (void) {',
+			'vUV = position.xy * 0.5 + 0.5;',
+			'gl_Position = vec4(position, 1.0 );',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_subtransform' ] = {
+	uniforms: {
+		"u_input": { value: null },
+		"u_transformSize": { value: 512.0 },
+		"u_subtransformSize": { value: 250.0 }
+	},
+	fragmentShader: [
+		//GPU FFT using a Stockham formulation
+
+		'precision highp float;',
+		'#include <common>',
+
+		'uniform sampler2D u_input;',
+		'uniform float u_transformSize;',
+		'uniform float u_subtransformSize;',
+
+		'varying vec2 vUV;',
+
+		'vec2 multiplyComplex (vec2 a, vec2 b) {',
+			'return vec2(a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]);',
+		'}',
+
+		'void main (void) {',
+			'#ifdef HORIZONTAL',
+			'float index = vUV.x * u_transformSize - 0.5;',
+			'#else',
+			'float index = vUV.y * u_transformSize - 0.5;',
+			'#endif',
+
+			'float evenIndex = floor(index / u_subtransformSize) * (u_subtransformSize * 0.5) + mod(index, u_subtransformSize * 0.5);',
+
+			//transform two complex sequences simultaneously
+			'#ifdef HORIZONTAL',
+			'vec4 even = texture2D(u_input, vec2(evenIndex + 0.5, gl_FragCoord.y) / u_transformSize).rgba;',
+			'vec4 odd = texture2D(u_input, vec2(evenIndex + u_transformSize * 0.5 + 0.5, gl_FragCoord.y) / u_transformSize).rgba;',
+			'#else',
+			'vec4 even = texture2D(u_input, vec2(gl_FragCoord.x, evenIndex + 0.5) / u_transformSize).rgba;',
+			'vec4 odd = texture2D(u_input, vec2(gl_FragCoord.x, evenIndex + u_transformSize * 0.5 + 0.5) / u_transformSize).rgba;',
+			'#endif',
+
+			'float twiddleArgument = -2.0 * PI * (index / u_subtransformSize);',
+			'vec2 twiddle = vec2(cos(twiddleArgument), sin(twiddleArgument));',
+
+			'vec2 outputA = even.xy + multiplyComplex(twiddle, odd.xy);',
+			'vec2 outputB = even.zw + multiplyComplex(twiddle, odd.zw);',
+
+			'gl_FragColor = vec4(outputA, outputB);',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_initial_spectrum' ] = {
+	uniforms: {
+		"u_wind": { value: new Vector2( 10.0, 10.0 ) },
+		"u_resolution": { value: 512.0 },
+		"u_size": { value: 250.0 }
+	},
+	vertexShader: [
+		'void main (void) {',
+			'gl_Position = vec4(position, 1.0);',
+		'}'
+	].join( '\n' ),
+	fragmentShader: [
+		'precision highp float;',
+		'#include <common>',
+
+		'const float G = 9.81;',
+		'const float KM = 370.0;',
+		'const float CM = 0.23;',
+
+		'uniform vec2 u_wind;',
+		'uniform float u_resolution;',
+		'uniform float u_size;',
+
+		'float omega (float k) {',
+			'return sqrt(G * k * (1.0 + pow2(k / KM)));',
+		'}',
+
+		'float tanh (float x) {',
+			'return (1.0 - exp(-2.0 * x)) / (1.0 + exp(-2.0 * x));',
+		'}',
+
+		'void main (void) {',
+			'vec2 coordinates = gl_FragCoord.xy - 0.5;',
+
+			'float n = (coordinates.x < u_resolution * 0.5) ? coordinates.x : coordinates.x - u_resolution;',
+			'float m = (coordinates.y < u_resolution * 0.5) ? coordinates.y : coordinates.y - u_resolution;',
+
+			'vec2 K = (2.0 * PI * vec2(n, m)) / u_size;',
+			'float k = length(K);',
+
+			'float l_wind = length(u_wind);',
+
+			'float Omega = 0.84;',
+			'float kp = G * pow2(Omega / l_wind);',
+
+			'float c = omega(k) / k;',
+			'float cp = omega(kp) / kp;',
+
+			'float Lpm = exp(-1.25 * pow2(kp / k));',
+			'float gamma = 1.7;',
+			'float sigma = 0.08 * (1.0 + 4.0 * pow(Omega, -3.0));',
+			'float Gamma = exp(-pow2(sqrt(k / kp) - 1.0) / 2.0 * pow2(sigma));',
+			'float Jp = pow(gamma, Gamma);',
+			'float Fp = Lpm * Jp * exp(-Omega / sqrt(10.0) * (sqrt(k / kp) - 1.0));',
+			'float alphap = 0.006 * sqrt(Omega);',
+			'float Bl = 0.5 * alphap * cp / c * Fp;',
+
+			'float z0 = 0.000037 * pow2(l_wind) / G * pow(l_wind / cp, 0.9);',
+			'float uStar = 0.41 * l_wind / log(10.0 / z0);',
+			'float alpham = 0.01 * ((uStar < CM) ? (1.0 + log(uStar / CM)) : (1.0 + 3.0 * log(uStar / CM)));',
+			'float Fm = exp(-0.25 * pow2(k / KM - 1.0));',
+			'float Bh = 0.5 * alpham * CM / c * Fm * Lpm;',
+
+			'float a0 = log(2.0) / 4.0;',
+			'float am = 0.13 * uStar / CM;',
+			'float Delta = tanh(a0 + 4.0 * pow(c / cp, 2.5) + am * pow(CM / c, 2.5));',
+
+			'float cosPhi = dot(normalize(u_wind), normalize(K));',
+
+			'float S = (1.0 / (2.0 * PI)) * pow(k, -4.0) * (Bl + Bh) * (1.0 + Delta * (2.0 * cosPhi * cosPhi - 1.0));',
+
+			'float dk = 2.0 * PI / u_size;',
+			'float h = sqrt(S / 2.0) * dk;',
+
+			'if (K.x == 0.0 && K.y == 0.0) {',
+				'h = 0.0;', //no DC term
+			'}',
+			'gl_FragColor = vec4(h, 0.0, 0.0, 0.0);',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_phase' ] = {
+	uniforms: {
+		"u_phases": { value: null },
+		"u_deltaTime": { value: null },
+		"u_resolution": { value: null },
+		"u_size": { value: null }
+	},
+	fragmentShader: [
+		'precision highp float;',
+		'#include <common>',
+
+		'const float G = 9.81;',
+		'const float KM = 370.0;',
+
+		'varying vec2 vUV;',
+
+		'uniform sampler2D u_phases;',
+		'uniform float u_deltaTime;',
+		'uniform float u_resolution;',
+		'uniform float u_size;',
+
+		'float omega (float k) {',
+			'return sqrt(G * k * (1.0 + k * k / KM * KM));',
+		'}',
+
+		'void main (void) {',
+			'float deltaTime = 1.0 / 60.0;',
+			'vec2 coordinates = gl_FragCoord.xy - 0.5;',
+			'float n = (coordinates.x < u_resolution * 0.5) ? coordinates.x : coordinates.x - u_resolution;',
+			'float m = (coordinates.y < u_resolution * 0.5) ? coordinates.y : coordinates.y - u_resolution;',
+			'vec2 waveVector = (2.0 * PI * vec2(n, m)) / u_size;',
+
+			'float phase = texture2D(u_phases, vUV).r;',
+			'float deltaPhase = omega(length(waveVector)) * u_deltaTime;',
+			'phase = mod(phase + deltaPhase, 2.0 * PI);',
+
+			'gl_FragColor = vec4(phase, 0.0, 0.0, 0.0);',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_spectrum' ] = {
+	uniforms: {
+		"u_size": { value: null },
+		"u_resolution": { value: null },
+		"u_choppiness": { value: null },
+		"u_phases": { value: null },
+		"u_initialSpectrum": { value: null }
+	},
+	fragmentShader: [
+		'precision highp float;',
+		'#include <common>',
+
+		'const float G = 9.81;',
+		'const float KM = 370.0;',
+
+		'varying vec2 vUV;',
+
+		'uniform float u_size;',
+		'uniform float u_resolution;',
+		'uniform float u_choppiness;',
+		'uniform sampler2D u_phases;',
+		'uniform sampler2D u_initialSpectrum;',
+
+		'vec2 multiplyComplex (vec2 a, vec2 b) {',
+			'return vec2(a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]);',
+		'}',
+
+		'vec2 multiplyByI (vec2 z) {',
+			'return vec2(-z[1], z[0]);',
+		'}',
+
+		'float omega (float k) {',
+			'return sqrt(G * k * (1.0 + k * k / KM * KM));',
+		'}',
+
+		'void main (void) {',
+			'vec2 coordinates = gl_FragCoord.xy - 0.5;',
+			'float n = (coordinates.x < u_resolution * 0.5) ? coordinates.x : coordinates.x - u_resolution;',
+			'float m = (coordinates.y < u_resolution * 0.5) ? coordinates.y : coordinates.y - u_resolution;',
+			'vec2 waveVector = (2.0 * PI * vec2(n, m)) / u_size;',
+
+			'float phase = texture2D(u_phases, vUV).r;',
+			'vec2 phaseVector = vec2(cos(phase), sin(phase));',
+
+			'vec2 h0 = texture2D(u_initialSpectrum, vUV).rg;',
+			'vec2 h0Star = texture2D(u_initialSpectrum, vec2(1.0 - vUV + 1.0 / u_resolution)).rg;',
+			'h0Star.y *= -1.0;',
+
+			'vec2 h = multiplyComplex(h0, phaseVector) + multiplyComplex(h0Star, vec2(phaseVector.x, -phaseVector.y));',
+
+			'vec2 hX = -multiplyByI(h * (waveVector.x / length(waveVector))) * u_choppiness;',
+			'vec2 hZ = -multiplyByI(h * (waveVector.y / length(waveVector))) * u_choppiness;',
+
+			//no DC term
+			'if (waveVector.x == 0.0 && waveVector.y == 0.0) {',
+				'h = vec2(0.0);',
+				'hX = vec2(0.0);',
+				'hZ = vec2(0.0);',
+			'}',
+
+			'gl_FragColor = vec4(hX + multiplyByI(h), hZ);',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_normals' ] = {
+	uniforms: {
+		"u_displacementMap": { value: null },
+		"u_resolution": { value: null },
+		"u_size": { value: null }
+	},
+	fragmentShader: [
+		'precision highp float;',
+
+		'varying vec2 vUV;',
+
+		'uniform sampler2D u_displacementMap;',
+		'uniform float u_resolution;',
+		'uniform float u_size;',
+
+		'void main (void) {',
+			'float texel = 1.0 / u_resolution;',
+			'float texelSize = u_size / u_resolution;',
+
+			'vec3 center = texture2D(u_displacementMap, vUV).rgb;',
+			'vec3 right = vec3(texelSize, 0.0, 0.0) + texture2D(u_displacementMap, vUV + vec2(texel, 0.0)).rgb - center;',
+			'vec3 left = vec3(-texelSize, 0.0, 0.0) + texture2D(u_displacementMap, vUV + vec2(-texel, 0.0)).rgb - center;',
+			'vec3 top = vec3(0.0, 0.0, -texelSize) + texture2D(u_displacementMap, vUV + vec2(0.0, -texel)).rgb - center;',
+			'vec3 bottom = vec3(0.0, 0.0, texelSize) + texture2D(u_displacementMap, vUV + vec2(0.0, texel)).rgb - center;',
+
+			'vec3 topRight = cross(right, top);',
+			'vec3 topLeft = cross(top, left);',
+			'vec3 bottomLeft = cross(left, bottom);',
+			'vec3 bottomRight = cross(bottom, right);',
+
+			'gl_FragColor = vec4(normalize(topRight + topLeft + bottomLeft + bottomRight), 1.0);',
+		'}'
+	].join( '\n' )
+};
+OceanShaders[ 'ocean_main' ] = {
+	uniforms: {
+		"u_displacementMap": { value: null },
+		"u_normalMap": { value: null },
+		"u_geometrySize": { value: null },
+		"u_size": { value: null },
+		"u_projectionMatrix": { value: null },
+		"u_viewMatrix": { value: null },
+		"u_cameraPosition": { value: null },
+		"u_skyColor": { value: null },
+		"u_oceanColor": { value: null },
+		"u_sunDirection": { value: null },
+		"u_exposure": { value: null }
+	},
+	vertexShader: [
+		'precision highp float;',
+
+		'varying vec3 vPos;',
+		'varying vec2 vUV;',
+
+		'uniform mat4 u_projectionMatrix;',
+		'uniform mat4 u_viewMatrix;',
+		'uniform float u_size;',
+		'uniform float u_geometrySize;',
+		'uniform sampler2D u_displacementMap;',
+
+		'void main (void) {',
+			'vec3 newPos = position + texture2D(u_displacementMap, uv).rgb * (u_geometrySize / u_size);',
+			'vPos = newPos;',
+			'vUV = uv;',
+			'gl_Position = u_projectionMatrix * u_viewMatrix * vec4(newPos, 1.0);',
+		'}'
+	].join( '\n' ),
+	fragmentShader: [
+		'precision highp float;',
+
+		'varying vec3 vPos;',
+		'varying vec2 vUV;',
+
+		'uniform sampler2D u_displacementMap;',
+		'uniform sampler2D u_normalMap;',
+		'uniform vec3 u_cameraPosition;',
+		'uniform vec3 u_oceanColor;',
+		'uniform vec3 u_skyColor;',
+		'uniform vec3 u_sunDirection;',
+		'uniform float u_exposure;',
+
+		'vec3 hdr (vec3 color, float exposure) {',
+			'return 1.0 - exp(-color * exposure);',
+		'}',
+
+		'void main (void) {',
+			'vec3 normal = texture2D(u_normalMap, vUV).rgb;',
+
+			'vec3 view = normalize(u_cameraPosition - vPos);',
+			'float fresnel = 0.02 + 0.98 * pow(1.0 - dot(normal, view), 5.0);',
+			'vec3 sky = fresnel * u_skyColor;',
+
+			'float diffuse = clamp(dot(normal, normalize(u_sunDirection)), 0.0, 1.0);',
+			'vec3 water = (1.0 - fresnel) * u_oceanColor * u_skyColor * diffuse;',
+
+			'vec3 color = sky + water;',
+
+			'gl_FragColor = vec4(hdr(color, u_exposure), 1.0);',
+		'}'
+	].join( '\n' )
+};
+
+export { OceanShaders };

+ 16 - 0
examples/jsm/shaders/VolumeShader.d.ts

@@ -0,0 +1,16 @@
+import {
+  Uniform
+} from '../../../src/Three';
+
+export interface VolumeShader {
+  uniforms: {
+    u_size: Uniform;
+    u_renderstyle: Uniform;
+    u_renderthreshold: Uniform;
+    u_clim: Uniform;
+    u_data: Uniform;
+    u_cmdata: Uniform;
+  };
+  vertexShader: string;
+  fragmentShader: string;
+}

+ 331 - 0
examples/jsm/shaders/VolumeShader.js

@@ -0,0 +1,331 @@
+/**
+ * @author Almar Klein / http://almarklein.org
+ *
+ * Shaders to render 3D volumes using raycasting.
+ * The applied techniques are based on similar implementations in the Visvis and Vispy projects.
+ * This is not the only approach, therefore it's marked 1.
+ */
+
+import {
+	Vector2,
+	Vector3
+} from "../../../build/three.module.js";
+
+var VolumeRenderShader1 = {
+	uniforms: {
+				"u_size": { value: new Vector3( 1, 1, 1 ) },
+				"u_renderstyle": { value: 0 },
+				"u_renderthreshold": { value: 0.5 },
+				"u_clim": { value: new Vector2( 1, 1 ) },
+				"u_data": { value: null },
+				"u_cmdata": { value: null }
+		},
+		vertexShader: [
+				'varying vec4 v_nearpos;',
+				'varying vec4 v_farpos;',
+				'varying vec3 v_position;',
+
+				'mat4 inversemat(mat4 m) {',
+						// Taken from https://github.com/stackgl/glsl-inverse/blob/master/index.glsl
+						// This function is licenced by the MIT license to Mikola Lysenko
+						'float',
+						'a00 = m[0][0], a01 = m[0][1], a02 = m[0][2], a03 = m[0][3],',
+						'a10 = m[1][0], a11 = m[1][1], a12 = m[1][2], a13 = m[1][3],',
+						'a20 = m[2][0], a21 = m[2][1], a22 = m[2][2], a23 = m[2][3],',
+						'a30 = m[3][0], a31 = m[3][1], a32 = m[3][2], a33 = m[3][3],',
+
+						'b00 = a00 * a11 - a01 * a10,',
+						'b01 = a00 * a12 - a02 * a10,',
+						'b02 = a00 * a13 - a03 * a10,',
+						'b03 = a01 * a12 - a02 * a11,',
+						'b04 = a01 * a13 - a03 * a11,',
+						'b05 = a02 * a13 - a03 * a12,',
+						'b06 = a20 * a31 - a21 * a30,',
+						'b07 = a20 * a32 - a22 * a30,',
+						'b08 = a20 * a33 - a23 * a30,',
+						'b09 = a21 * a32 - a22 * a31,',
+						'b10 = a21 * a33 - a23 * a31,',
+						'b11 = a22 * a33 - a23 * a32,',
+
+						'det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;',
+
+				'return mat4(',
+						'a11 * b11 - a12 * b10 + a13 * b09,',
+						'a02 * b10 - a01 * b11 - a03 * b09,',
+						'a31 * b05 - a32 * b04 + a33 * b03,',
+						'a22 * b04 - a21 * b05 - a23 * b03,',
+						'a12 * b08 - a10 * b11 - a13 * b07,',
+						'a00 * b11 - a02 * b08 + a03 * b07,',
+						'a32 * b02 - a30 * b05 - a33 * b01,',
+						'a20 * b05 - a22 * b02 + a23 * b01,',
+						'a10 * b10 - a11 * b08 + a13 * b06,',
+						'a01 * b08 - a00 * b10 - a03 * b06,',
+						'a30 * b04 - a31 * b02 + a33 * b00,',
+						'a21 * b02 - a20 * b04 - a23 * b00,',
+						'a11 * b07 - a10 * b09 - a12 * b06,',
+						'a00 * b09 - a01 * b07 + a02 * b06,',
+						'a31 * b01 - a30 * b03 - a32 * b00,',
+						'a20 * b03 - a21 * b01 + a22 * b00) / det;',
+				'}',
+
+
+				'void main() {',
+						// Prepare transforms to map to "camera view". See also:
+						// https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
+						'mat4 viewtransformf = viewMatrix;',
+						'mat4 viewtransformi = inversemat(viewMatrix);',
+
+						// Project local vertex coordinate to camera position. Then do a step
+						// backward (in cam coords) to the near clipping plane, and project back. Do
+						// the same for the far clipping plane. This gives us all the information we
+						// need to calculate the ray and truncate it to the viewing cone.
+						'vec4 position4 = vec4(position, 1.0);',
+						'vec4 pos_in_cam = viewtransformf * position4;',
+
+						// Intersection of ray and near clipping plane (z = -1 in clip coords)
+						'pos_in_cam.z = -pos_in_cam.w;',
+						'v_nearpos = viewtransformi * pos_in_cam;',
+
+						// Intersection of ray and far clipping plane (z = +1 in clip coords)
+						'pos_in_cam.z = pos_in_cam.w;',
+						'v_farpos = viewtransformi * pos_in_cam;',
+
+						// Set varyings and output pos
+						'v_position = position;',
+						'gl_Position = projectionMatrix * viewMatrix * modelMatrix * position4;',
+				'}',
+		].join( '\n' ),
+	fragmentShader: [
+				'precision highp float;',
+				'precision mediump sampler3D;',
+
+				'uniform vec3 u_size;',
+				'uniform int u_renderstyle;',
+				'uniform float u_renderthreshold;',
+				'uniform vec2 u_clim;',
+
+				'uniform sampler3D u_data;',
+				'uniform sampler2D u_cmdata;',
+
+				'varying vec3 v_position;',
+				'varying vec4 v_nearpos;',
+				'varying vec4 v_farpos;',
+
+				// The maximum distance through our rendering volume is sqrt(3).
+				'const int MAX_STEPS = 887;	// 887 for 512^3, 1774 for 1024^3',
+				'const int REFINEMENT_STEPS = 4;',
+				'const float relative_step_size = 1.0;',
+				'const vec4 ambient_color = vec4(0.2, 0.4, 0.2, 1.0);',
+				'const vec4 diffuse_color = vec4(0.8, 0.2, 0.2, 1.0);',
+				'const vec4 specular_color = vec4(1.0, 1.0, 1.0, 1.0);',
+				'const float shininess = 40.0;',
+
+				'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+				'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+
+				'float sample1(vec3 texcoords);',
+				'vec4 apply_colormap(float val);',
+				'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray);',
+
+
+				'void main() {',
+						// Normalize clipping plane info
+						'vec3 farpos = v_farpos.xyz / v_farpos.w;',
+						'vec3 nearpos = v_nearpos.xyz / v_nearpos.w;',
+
+						// Calculate unit vector pointing in the view direction through this fragment.
+						'vec3 view_ray = normalize(nearpos.xyz - farpos.xyz);',
+
+						// Compute the (negative) distance to the front surface or near clipping plane.
+						// v_position is the back face of the cuboid, so the initial distance calculated in the dot
+						// product below is the distance from near clip plane to the back of the cuboid
+						'float distance = dot(nearpos - v_position, view_ray);',
+						'distance = max(distance, min((-0.5 - v_position.x) / view_ray.x,',
+																				'(u_size.x - 0.5 - v_position.x) / view_ray.x));',
+						'distance = max(distance, min((-0.5 - v_position.y) / view_ray.y,',
+																				'(u_size.y - 0.5 - v_position.y) / view_ray.y));',
+						'distance = max(distance, min((-0.5 - v_position.z) / view_ray.z,',
+																				'(u_size.z - 0.5 - v_position.z) / view_ray.z));',
+
+																				// Now we have the starting position on the front surface
+						'vec3 front = v_position + view_ray * distance;',
+
+						// Decide how many steps to take
+						'int nsteps = int(-distance / relative_step_size + 0.5);',
+						'if ( nsteps < 1 )',
+								'discard;',
+
+						// Get starting location and step vector in texture coordinates
+						'vec3 step = ((v_position - front) / u_size) / float(nsteps);',
+						'vec3 start_loc = front / u_size;',
+
+						// For testing: show the number of steps. This helps to establish
+						// whether the rays are correctly oriented
+						//'gl_FragColor = vec4(0.0, float(nsteps) / 1.0 / u_size.x, 1.0, 1.0);',
+						//'return;',
+
+						'if (u_renderstyle == 0)',
+								'cast_mip(start_loc, step, nsteps, view_ray);',
+						'else if (u_renderstyle == 1)',
+								'cast_iso(start_loc, step, nsteps, view_ray);',
+
+						'if (gl_FragColor.a < 0.05)',
+								'discard;',
+				'}',
+
+
+				'float sample1(vec3 texcoords) {',
+						'/* Sample float value from a 3D texture. Assumes intensity data. */',
+						'return texture(u_data, texcoords.xyz).r;',
+				'}',
+
+
+				'vec4 apply_colormap(float val) {',
+						'val = (val - u_clim[0]) / (u_clim[1] - u_clim[0]);',
+						'return texture2D(u_cmdata, vec2(val, 0.5));',
+				'}',
+
+
+				'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+						'float max_val = -1e6;',
+						'int max_i = 100;',
+						'vec3 loc = start_loc;',
+
+						// Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+						// non-constant expression. So we use a hard-coded max, and an additional condition
+						// inside the loop.
+						'for (int iter=0; iter<MAX_STEPS; iter++) {',
+								'if (iter >= nsteps)',
+										'break;',
+								// Sample from the 3D texture
+								'float val = sample1(loc);',
+								// Apply MIP operation
+								'if (val > max_val) {',
+										'max_val = val;',
+										'max_i = iter;',
+								'}',
+								// Advance location deeper into the volume
+								'loc += step;',
+						'}',
+
+						// Refine location, gives crispier images
+						'vec3 iloc = start_loc + step * (float(max_i) - 0.5);',
+						'vec3 istep = step / float(REFINEMENT_STEPS);',
+						'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+								'max_val = max(max_val, sample1(iloc));',
+								'iloc += istep;',
+						'}',
+
+						// Resolve final color
+						'gl_FragColor = apply_colormap(max_val);',
+				'}',
+
+
+				'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+						'gl_FragColor = vec4(0.0);	// init transparent',
+						'vec4 color3 = vec4(0.0);	// final color',
+						'vec3 dstep = 1.5 / u_size;	// step to sample derivative',
+						'vec3 loc = start_loc;',
+
+						'float low_threshold = u_renderthreshold - 0.02 * (u_clim[1] - u_clim[0]);',
+
+						// Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+						// non-constant expression. So we use a hard-coded max, and an additional condition
+						// inside the loop.
+						'for (int iter=0; iter<MAX_STEPS; iter++) {',
+								'if (iter >= nsteps)',
+										'break;',
+
+										// Sample from the 3D texture
+								'float val = sample1(loc);',
+
+								'if (val > low_threshold) {',
+								// Take the last interval in smaller steps
+										'vec3 iloc = loc - 0.5 * step;',
+										'vec3 istep = step / float(REFINEMENT_STEPS);',
+										'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+												'val = sample1(iloc);',
+												'if (val > u_renderthreshold) {',
+														'gl_FragColor = add_lighting(val, iloc, dstep, view_ray);',
+														'return;',
+												'}',
+												'iloc += istep;',
+										'}',
+								'}',
+
+								// Advance location deeper into the volume
+								'loc += step;',
+						'}',
+				'}',
+
+
+				'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray)',
+				'{',
+						// Calculate color by incorporating lighting
+
+						// View direction
+						'vec3 V = normalize(view_ray);',
+
+						// calculate normal vector from gradient
+						'vec3 N;',
+						'float val1, val2;',
+						'val1 = sample1(loc + vec3(-step[0], 0.0, 0.0));',
+						'val2 = sample1(loc + vec3(+step[0], 0.0, 0.0));',
+						'N[0] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+						'val1 = sample1(loc + vec3(0.0, -step[1], 0.0));',
+						'val2 = sample1(loc + vec3(0.0, +step[1], 0.0));',
+						'N[1] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+						'val1 = sample1(loc + vec3(0.0, 0.0, -step[2]));',
+						'val2 = sample1(loc + vec3(0.0, 0.0, +step[2]));',
+						'N[2] = val1 - val2;',
+						'val = max(max(val1, val2), val);',
+
+						'float gm = length(N); // gradient magnitude',
+						'N = normalize(N);',
+
+						// Flip normal so it points towards viewer
+						'float Nselect = float(dot(N, V) > 0.0);',
+						'N = (2.0 * Nselect - 1.0) * N;	// ==	Nselect * N - (1.0-Nselect)*N;',
+
+						// Init colors
+						'vec4 ambient_color = vec4(0.0, 0.0, 0.0, 0.0);',
+						'vec4 diffuse_color = vec4(0.0, 0.0, 0.0, 0.0);',
+						'vec4 specular_color = vec4(0.0, 0.0, 0.0, 0.0);',
+
+						// note: could allow multiple lights
+						'for (int i=0; i<1; i++)',
+						'{',
+								 // Get light direction (make sure to prevent zero devision)
+								'vec3 L = normalize(view_ray);	//lightDirs[i];',
+								'float lightEnabled = float( length(L) > 0.0 );',
+								'L = normalize(L + (1.0 - lightEnabled));',
+
+								// Calculate lighting properties
+								'float lambertTerm = clamp(dot(N, L), 0.0, 1.0);',
+								'vec3 H = normalize(L+V); // Halfway vector',
+								'float specularTerm = pow(max(dot(H, N), 0.0), shininess);',
+
+								// Calculate mask
+								'float mask1 = lightEnabled;',
+
+								// Calculate colors
+								'ambient_color +=	mask1 * ambient_color;	// * gl_LightSource[i].ambient;',
+								'diffuse_color +=	mask1 * lambertTerm;',
+								'specular_color += mask1 * specularTerm * specular_color;',
+						'}',
+
+						// Calculate final color by componing different components
+						'vec4 final_color;',
+						'vec4 color = apply_colormap(val);',
+						'final_color = color * (ambient_color + diffuse_color) + specular_color;',
+						'final_color.a = color.a;',
+						'return final_color;',
+				'}',
+	].join( '\n' )
+};
+
+export { VolumeRenderShader1 };

BIN
examples/models/3mf/truck.3mf


BIN
examples/models/lwo/Images/Env_map_sphere_1.jpg


BIN
examples/models/lwo/Images/environments/kiara_8_sunset.jpg


BIN
examples/models/lwo/Images/environments/kloofendal_48d_partly_cloudy.jpg


BIN
examples/models/lwo/Images/material-PBR-abstract/Abstract_008_basecolor.jpg


BIN
examples/models/lwo/Images/material-PBR-abstract/Abstract_008_bump.jpg


BIN
examples/models/lwo/Images/material-PBR-abstract/Abstract_008_metallic.jpg


BIN
examples/models/lwo/Images/material-PBR-abstract/Abstract_008_normal.jpg


BIN
examples/models/lwo/Images/material-PBR-abstract/Abstract_008_roughness.jpg


BIN
examples/models/lwo/Images/material-Phong-metal/167_BUMP.jpg


BIN
examples/models/lwo/Images/material-Phong-metal/167_COLOR.JPG


BIN
examples/models/lwo/Images/material-Phong-metal/167_GLOSS.jpg


BIN
examples/models/lwo/Images/material-Phong-metal/167_SPEC.jpg


BIN
examples/models/lwo/Images/normal_2.jpg


+ 3 - 0
examples/models/lwo/LICENSE.txt

@@ -0,0 +1,3 @@
+Models by on the z. Licensed under Creative Commons Attribution 4.0 International License
+Environment images by HDRI Haven. Licensed under Creative Commons CC0 Public Domain
+Texture images by CC0 Textures. Licensed under Creative Commons CC0 Public Domain

BIN
examples/models/lwo/Objects/LWO3/Demo.lwo


BIN
examples/models/lwo/StandardMaterials.lwo


BIN
examples/textures/compressed/PavingStones.basis


BIN
examples/textures/compressed/kodim20.basis


+ 1 - 1
examples/webgl_gpgpu_birds.html

@@ -28,7 +28,7 @@
 		<script src="js/libs/stats.min.js"></script>
 		<script src="js/libs/dat.gui.min.js"></script>
 
-		<script src="js/GPUComputationRenderer.js"></script>
+		<script src="js/misc/GPUComputationRenderer.js"></script>
 
 		<!--
 		TODO: If you're reading this, you may wish to improve this example by

+ 1 - 1
examples/webgl_gpgpu_protoplanet.html

@@ -25,7 +25,7 @@
 		<script src="js/libs/dat.gui.min.js"></script>
 		<script src="js/controls/OrbitControls.js"></script>
 
-		<script src="js/GPUComputationRenderer.js"></script>
+		<script src="js/misc/GPUComputationRenderer.js"></script>
 
 
 		<!-- Fragment shader for protoplanet's position -->

+ 1 - 1
examples/webgl_gpgpu_water.html

@@ -22,7 +22,7 @@
 		<script src="js/controls/OrbitControls.js"></script>
 		<script src="js/math/SimplexNoise.js"></script>
 
-		<script src="js/GPUComputationRenderer.js"></script>
+		<script src="js/misc/GPUComputationRenderer.js"></script>
 
 
 		<!-- This is the 'compute shader' for the water heightmap: -->

+ 40 - 25
examples/webgl_loader_lwo.html

@@ -4,13 +4,15 @@
 		<title>three.js webgl - LWO loader</title>
 		<meta charset="utf-8">
 		<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+
 		<link type="text/css" rel="stylesheet" href="main.css">
 	</head>
 
 	<body>
 		<div id="info">
-			<a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> - LWOLoader<br />
-			Loader for Lightwave LWO V3 file format, by <a href="https://discoverthreejs.com/" target="_blank" rel="noopener">Discover three.js</a>
+			<a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> - LWOLoader
+			<P>Lightwave Object loader by <a href="https://discoverthreejs.com/" target="_blank" rel="noopener">Discover three.js</a></P>
+			Models by <a href="https://onthez.com/" target="_blank" rel="noopener">on the z</a> - Environment images by <a href="https://hdrihaven.com/" target="_blank" rel="noopener">HDRI Haven</a>
 		</div>
 
 		<script src="../build/three.js"></script>
@@ -18,7 +20,6 @@
 		<script src="js/controls/OrbitControls.js"></script>
 
 		<script src="js/WebGL.js"></script>
-		<script src="js/libs/stats.min.js"></script>
 
 		<script>
 
@@ -28,7 +29,7 @@
 
 			}
 
-			var container, stats, controls;
+			var container, controls;
 			var camera, scene, renderer;
 
 			function init() {
@@ -37,62 +38,76 @@
 				document.body.appendChild( container );
 
 				camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 1, 200 );
-				camera.position.set( 5, 4, 20 );
+				camera.position.set( -.7, 14.6, 43.2 );
 
 				scene = new THREE.Scene();
 				scene.background = new THREE.Color( 0xa0a0a0 );
 
-				var ambientLight = new THREE.HemisphereLight( 0xddeeff, 0x0f0e0d, 2 );
+				var ambientLight = new THREE.AmbientLight( 0xaaaaaa, 1.75 );
+				scene.add( ambientLight );
+
+				var light = new THREE.DirectionalLight( 0xffffff, 1 );
+				light.position.set( 0, 200, 100 );
+				light.castShadow = true;
+				light.shadow.camera.top = 180;
+				light.shadow.camera.bottom = -100;
+				light.shadow.camera.left = -120;
+				light.shadow.camera.right = 120;
+				scene.add( light );
+
+				light = new THREE.DirectionalLight( 0xffffff, 0.7 );
+				light.position.set( -100, 200, -100 );
+				scene.add( light );
 
-				var mainLight = new THREE.DirectionalLight( 0xffffff, 2 );
-				mainLight.position.set( 10, 10, - 10 );
+				light = new THREE.DirectionalLight( 0xffffff, 0.4 );
+				light.position.set( 100, -200, 100 );
+				scene.add( light );
 
-				scene.add( ambientLight, mainLight );
+				light = new THREE.DirectionalLight( 0xffffff, 1 );
+				light.position.set( -100, -100, 100 );
+				scene.add( light );
 
 				var grid = new THREE.GridHelper( 200, 20, 0x000000, 0x000000 );
-				grid.material.opacity = 0.2;
+				grid.material.opacity = 0.3;
 				grid.material.transparent = true;
 				scene.add( grid );
 
 				var loader = new THREE.LWOLoader();
-				loader.load( 'models/lwo/StandardMaterials.lwo', function ( lwo ) {
+				loader.load( 'models/lwo/Objects/LWO3/Demo.lwo', function ( object ) {
+					var phong = object.meshes[0];
+					phong.position.set( -2, 12, 0 );
 
-					const mesh = lwo.meshes[ 0 ];
+					var standard = object.meshes[1];
+					standard.position.set( 2, 12, 0 );
 
-					mesh.position.set( 3, 6, 0 );
+					var rocket = object.meshes[2];
+					rocket.position.set( 0, 10.5, -1 );
 
-					scene.add( mesh );
+					scene.add( phong, standard, rocket );
 
 				} );
 
-				renderer = new THREE.WebGLRenderer( { antialias: true } );
+				renderer = new THREE.WebGLRenderer( { antialias: true, alpha: true } );
 				renderer.setPixelRatio( window.devicePixelRatio );
 				renderer.setSize( window.innerWidth, window.innerHeight );
+				renderer.shadowMap.enabled = true;
 				renderer.physicallyCorrectLights = true;
-				renderer.gammaFactor = 2.2;
+				renderer.gammaFactor = 1.18;
 				renderer.gammaOutput = true;
 				container.appendChild( renderer.domElement );
 
 				controls = new THREE.OrbitControls( camera, renderer.domElement );
-				controls.target.y = 4;
+				controls.target.set( 1.33, 10, -6.7 );
 				controls.update();
 
 				renderer.setAnimationLoop( function () {
 
-					stats.begin();
-
 					renderer.render( scene, camera );
 
-					stats.end();
-
 				} );
 
 				window.addEventListener( 'resize', onWindowResize, false );
 
-				// stats
-				stats = new Stats();
-				container.appendChild( stats.dom );
-
 			}
 
 			function onWindowResize() {

+ 5 - 1
examples/webgl_loader_texture_basis.html

@@ -51,12 +51,16 @@
 				loader.setTranscoderPath( 'js/libs/basis/' );
 				loader.detectSupport( renderer );
 
-				loader.load( 'textures/compressed/kodim20.basis', function ( texture ) {
+				loader.load( 'textures/compressed/PavingStones.basis', function ( texture ) {
 
 					texture.encoding = THREE.sRGBEncoding;
 					material.map = texture;
 					material.needsUpdate = true;
 
+				}, undefined, function ( error ) {
+
+					console.error( error );
+
 				} );
 
 				window.addEventListener( 'resize', onWindowResize, false );

+ 1 - 1
examples/webgl_physics_convex_break.html

@@ -20,7 +20,7 @@
 	<script src="js/controls/OrbitControls.js"></script>
 	<script src="js/WebGL.js"></script>
 	<script src="js/libs/stats.min.js"></script>
-	<script src="js/ConvexObjectBreaker.js"></script>
+	<script src="js/misc/ConvexObjectBreaker.js"></script>
 	<script src="js/math/ConvexHull.js"></script>
 	<script src="js/geometries/ConvexGeometry.js"></script>
 

+ 1 - 1
examples/webgl_shaders_ocean2.html

@@ -17,7 +17,7 @@
 		<script src="js/libs/dat.gui.min.js"></script>
 		<script src="js/controls/OrbitControls.js"></script>
 		<script src="js/shaders/OceanShaders.js"></script>
-		<script src="js/Ocean.js"></script>
+		<script src="js/misc/Ocean.js"></script>
 
 		<script>
 			var stats = new Stats();

+ 2 - 0
src/materials/SpriteMaterial.d.ts

@@ -6,6 +6,7 @@ export interface SpriteMaterialParameters extends MaterialParameters {
 	color?: Color | string | number;
 	map?: Texture;
 	rotation?: number;
+	sizeAttenuation?: boolean;
 }
 
 export class SpriteMaterial extends Material {
@@ -15,6 +16,7 @@ export class SpriteMaterial extends Material {
 	color: Color;
 	map: Texture | null;
 	rotation: number;
+	sizeAttenuation: boolean;
 	isSpriteMaterial: true;
 
 	setValues( parameters: SpriteMaterialParameters ): void;

+ 93 - 0
src/renderers/WebGLRenderTargetCube.js

@@ -2,6 +2,7 @@ import { WebGLRenderTarget } from './WebGLRenderTarget.js';
 
 /**
  * @author alteredq / http://alteredqualia.com
+ * @author WestLangley / http://github.com/WestLangley
  */
 
 function WebGLRenderTargetCube( width, height, options ) {
@@ -15,5 +16,97 @@ WebGLRenderTargetCube.prototype.constructor = WebGLRenderTargetCube;
 
 WebGLRenderTargetCube.prototype.isWebGLRenderTargetCube = true;
 
+WebGLRenderTargetCube.prototype.fromEquirectangularTexture = function ( renderer, texture ) {
+
+	this.texture.type = texture.type;
+	this.texture.format = texture.format;
+	this.texture.encoding = texture.encoding;
+
+	var scene = new THREE.Scene();
+
+	var shader = {
+
+		uniforms: {
+			tEquirect: { value: null },
+		},
+
+		vertexShader:
+
+			`
+			varying vec3 vWorldDirection;
+
+			vec3 transformDirection( in vec3 dir, in mat4 matrix ) {
+
+				return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );
+
+			}
+
+			void main() {
+
+				vWorldDirection = transformDirection( position, modelMatrix );
+
+				#include <begin_vertex>
+				#include <project_vertex>
+
+			}
+			`,
+
+		fragmentShader:
+
+			`
+			uniform sampler2D tEquirect;
+
+			varying vec3 vWorldDirection;
+
+			#define RECIPROCAL_PI 0.31830988618
+			#define RECIPROCAL_PI2 0.15915494
+
+			void main() {
+
+				vec3 direction = normalize( vWorldDirection );
+
+				vec2 sampleUV;
+
+				sampleUV.y = asin( clamp( direction.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
+
+				sampleUV.x = atan( direction.z, direction.x ) * RECIPROCAL_PI2 + 0.5;
+
+				gl_FragColor = texture2D( tEquirect, sampleUV );
+
+			}
+			`
+	};
+
+	var material = new THREE.ShaderMaterial( {
+
+		type: 'CubemapFromEquirect',
+
+		uniforms: THREE.UniformsUtils.clone( shader.uniforms ),
+		vertexShader: shader.vertexShader,
+		fragmentShader: shader.fragmentShader,
+		side: THREE.BackSide,
+		blending: THREE.NoBlending
+
+	} );
+
+	material.uniforms.tEquirect.value = texture;
+
+	var mesh = new THREE.Mesh( new THREE.BoxBufferGeometry( 5, 5, 5 ), material );
+
+	scene.add( mesh );
+
+	var camera = new THREE.CubeCamera( 1, 10, 1 );
+
+	camera.renderTarget = this;
+	camera.renderTarget.texture.name = 'CubeCameraTexture';
+
+	camera.update( renderer, scene );
+
+	mesh.geometry.dispose();
+	mesh.material.dispose();
+
+	return this;
+
+};
 
 export { WebGLRenderTargetCube };

+ 1 - 1
src/renderers/shaders/ShaderChunk/skinning_pars_vertex.glsl.js

@@ -6,7 +6,7 @@ export default /* glsl */`
 
 	#ifdef BONE_TEXTURE
 
-		uniform sampler2D boneTexture;
+		uniform highp sampler2D boneTexture;
 		uniform int boneTextureSize;
 
 		mat4 getBoneMatrix( const in float i ) {

+ 7 - 0
utils/modularize.js

@@ -10,6 +10,9 @@ var srcFolder = __dirname + '/../examples/js/';
 var dstFolder = __dirname + '/../examples/jsm/';
 
 var files = [
+	{ path: 'animation/AnimationClipCreator.js', dependencies: [], ignoreList: [] },
+	{ path: 'animation/TimelinerController.js', dependencies: [], ignoreList: [] },
+
 	{ path: 'cameras/CinematicCamera.js', dependencies: [ { name: 'BokehShader', path: 'shaders/BokehShader2.js' }, { name: 'BokehDepthShader', path: 'shaders/BokehShader2.js' } ], ignoreList: [] },
 
 	{ path: 'controls/DragControls.js', dependencies: [], ignoreList: [] },
@@ -90,6 +93,7 @@ var files = [
 	{ path: 'loaders/TDSLoader.js', dependencies: [], ignoreList: [] },
 	{ path: 'loaders/TGALoader.js', dependencies: [], ignoreList: [] },
 	{ path: 'loaders/VRMLLoader.js', dependencies: [], ignoreList: [] },
+	{ path: 'loaders/VTKLoader.js', dependencies: [], ignoreList: [] },
 
 	{ path: 'math/ColorConverter.js', dependencies: [], ignoreList: [] },
 	{ path: 'math/ConvexHull.js', dependencies: [], ignoreList: [] },
@@ -98,6 +102,7 @@ var files = [
 	{ path: 'math/SimplexNoise.js', dependencies: [], ignoreList: [] },
 
 	{ path: 'misc/CarControls.js', dependencies: [], ignoreList: [] },
+	{ path: 'misc/Ocean.js', dependencies: [ { name: 'OceanShaders', path: 'shaders/OceanShaders.js' } ], ignoreList: [] },
 
 	{ path: 'modifiers/ExplodeModifier.js', dependencies: [], ignoreList: [] },
 	{ path: 'modifiers/SimplifyModifier.js', dependencies: [], ignoreList: [] },
@@ -180,6 +185,7 @@ var files = [
 	{ path: 'shaders/LuminosityShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/MirrorShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/NormalMapShader.js', dependencies: [], ignoreList: [] },
+	{ path: 'shaders/OceanShaders.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/ParallaxShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/PixelShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/RGBShiftShader.js', dependencies: [], ignoreList: [] },
@@ -199,6 +205,7 @@ var files = [
 	{ path: 'shaders/VerticalBlurShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/VerticalTiltShiftShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/VignetteShader.js', dependencies: [], ignoreList: [] },
+	{ path: 'shaders/VolumeShader.js', dependencies: [], ignoreList: [] },
 	{ path: 'shaders/WaterRefractionShader.js', dependencies: [], ignoreList: [] },
 
 	{ path: 'utils/BufferGeometryUtils.js', dependencies: [], ignoreList: [] },