ソースを参照

Examples: Convert loaders to ES6 Part III. (#21616)

Michael Herzog 4 年 前
コミット
9ff1484f82

+ 73 - 55
examples/js/loaders/3DMLoader.js

@@ -1,40 +1,43 @@
 ( function () {
 
-	var Rhino3dmLoader = function ( manager ) {
-
-		THREE.Loader.call( this, manager );
-		this.libraryPath = '';
-		this.libraryPending = null;
-		this.libraryBinary = null;
-		this.libraryConfig = {};
-		this.url = '';
-		this.workerLimit = 4;
-		this.workerPool = [];
-		this.workerNextTaskID = 1;
-		this.workerSourceURL = '';
-		this.workerConfig = {};
-		this.materials = [];
-
-	};
-
-	Rhino3dmLoader.taskCache = new WeakMap();
-	Rhino3dmLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-		constructor: Rhino3dmLoader,
-		setLibraryPath: function ( path ) {
+	const _taskCache = new WeakMap();
+
+	class Rhino3dmLoader extends THREE.Loader {
+
+		constructor( manager ) {
+
+			super( manager );
+			this.libraryPath = '';
+			this.libraryPending = null;
+			this.libraryBinary = null;
+			this.libraryConfig = {};
+			this.url = '';
+			this.workerLimit = 4;
+			this.workerPool = [];
+			this.workerNextTaskID = 1;
+			this.workerSourceURL = '';
+			this.workerConfig = {};
+			this.materials = [];
+
+		}
+
+		setLibraryPath( path ) {
 
 			this.libraryPath = path;
 			return this;
 
-		},
-		setWorkerLimit: function ( workerLimit ) {
+		}
+
+		setWorkerLimit( workerLimit ) {
 
 			this.workerLimit = workerLimit;
 			return this;
 
-		},
-		load: function ( url, onLoad, onProgress, onError ) {
+		}
+
+		load( url, onLoad, onProgress, onError ) {
 
-			var loader = new THREE.FileLoader( this.manager );
+			const loader = new THREE.FileLoader( this.manager );
 			loader.setPath( this.path );
 			loader.setResponseType( 'arraybuffer' );
 			loader.setRequestHeader( this.requestHeader );
@@ -43,9 +46,10 @@
 
 				// Check for an existing task using this buffer. A transferred buffer cannot be transferred
 				// again from this thread.
-				if ( Rhino3dmLoader.taskCache.has( buffer ) ) {
+				if ( _taskCache.has( buffer ) ) {
+
+					const cachedTask = _taskCache.get( buffer );
 
-					var cachedTask = Rhino3dmLoader.taskCache.get( buffer );
 					return cachedTask.promise.then( onLoad ).catch( onError );
 
 				}
@@ -54,13 +58,15 @@
 
 			}, onProgress, onError );
 
-		},
-		debug: function () {
+		}
+
+		debug() {
 
 			console.log( 'Task load: ', this.workerPool.map( worker => worker._taskLoad ) );
 
-		},
-		decodeObjects: function ( buffer, url ) {
+		}
+
+		decodeObjects( buffer, url ) {
 
 			var worker;
 			var taskID;
@@ -99,19 +105,22 @@
 
 			} ); // Cache the task result.
 
-			Rhino3dmLoader.taskCache.set( buffer, {
+			_taskCache.set( buffer, {
 				url: url,
 				promise: objectPending
 			} );
+
 			return objectPending;
 
-		},
-		parse: function ( data, onLoad, onError ) {
+		}
+
+		parse( data, onLoad, onError ) {
 
 			this.decodeObjects( data, '' ).then( onLoad ).catch( onError );
 
-		},
-		_compareMaterials: function ( material ) {
+		}
+
+		_compareMaterials( material ) {
 
 			var mat = {};
 			mat.name = material.name;
@@ -143,8 +152,9 @@
 			this.materials.push( material );
 			return material;
 
-		},
-		_createMaterial: function ( material ) {
+		}
+
+		_createMaterial( material ) {
 
 			if ( material === undefined ) {
 
@@ -213,8 +223,9 @@
 
 			return mat;
 
-		},
-		_createGeometry: function ( data ) {
+		}
+
+		_createGeometry( data ) {
 
 			// console.log(data);
 			var object = new THREE.Object3D();
@@ -342,8 +353,9 @@
 			object.userData[ 'materials' ] = this.materials;
 			return object;
 
-		},
-		_createObject: function ( obj, mat ) {
+		}
+
+		_createObject( obj, mat ) {
 
 			var loader = new THREE.BufferGeometryLoader();
 			var attributes = obj.attributes;
@@ -545,8 +557,9 @@
 
 			}
 
-		},
-		_initLibrary: function () {
+		}
+
+		_initLibrary() {
 
 			if ( ! this.libraryPending ) {
 
@@ -571,7 +584,7 @@
 
 					//this.libraryBinary = binaryContent;
 					this.libraryConfig.wasmBinary = binaryContent;
-					var fn = Rhino3dmLoader.Rhino3dmWorker.toString();
+					var fn = Rhino3dmWorker.toString();
 					var body = [ '/* rhino3dm.js */', jsContent, '/* worker */', fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) ) ].join( '\n' );
 					this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
 
@@ -581,8 +594,9 @@
 
 			return this.libraryPending;
 
-		},
-		_getWorker: function ( taskCost ) {
+		}
+
+		_getWorker( taskCost ) {
 
 			return this._initLibrary().then( () => {
 
@@ -638,15 +652,17 @@
 
 			} );
 
-		},
-		_releaseTask: function ( worker, taskID ) {
+		}
+
+		_releaseTask( worker, taskID ) {
 
 			worker._taskLoad -= worker._taskCosts[ taskID ];
 			delete worker._callbacks[ taskID ];
 			delete worker._taskCosts[ taskID ];
 
-		},
-		dispose: function () {
+		}
+
+		dispose() {
 
 			for ( var i = 0; i < this.workerPool.length; ++ i ) {
 
@@ -658,10 +674,12 @@
 			return this;
 
 		}
-	} );
+
+	}
 	/* WEB WORKER */
 
-	Rhino3dmLoader.Rhino3dmWorker = function () {
+
+	function Rhino3dmWorker() {
 
 		var libraryPending;
 		var libraryConfig;
@@ -1257,7 +1275,7 @@
 
 		}
 
-	};
+	}
 
 	THREE.Rhino3dmLoader = Rhino3dmLoader;
 

+ 135 - 173
examples/js/loaders/DRACOLoader.js

@@ -1,82 +1,66 @@
 ( function () {
 
-	var DRACOLoader = function ( manager ) {
-
-		THREE.Loader.call( this, manager );
-		this.decoderPath = '';
-		this.decoderConfig = {};
-		this.decoderBinary = null;
-		this.decoderPending = null;
-		this.workerLimit = 4;
-		this.workerPool = [];
-		this.workerNextTaskID = 1;
-		this.workerSourceURL = '';
-		this.defaultAttributeIDs = {
-			position: 'POSITION',
-			normal: 'NORMAL',
-			color: 'COLOR',
-			uv: 'TEX_COORD'
-		};
-		this.defaultAttributeTypes = {
-			position: 'Float32Array',
-			normal: 'Float32Array',
-			color: 'Float32Array',
-			uv: 'Float32Array'
-		};
+	const _taskCache = new WeakMap();
+
+	class DRACOLoader extends THREE.Loader {
+
+		constructor( manager ) {
+
+			super( manager );
+			this.decoderPath = '';
+			this.decoderConfig = {};
+			this.decoderBinary = null;
+			this.decoderPending = null;
+			this.workerLimit = 4;
+			this.workerPool = [];
+			this.workerNextTaskID = 1;
+			this.workerSourceURL = '';
+			this.defaultAttributeIDs = {
+				position: 'POSITION',
+				normal: 'NORMAL',
+				color: 'COLOR',
+				uv: 'TEX_COORD'
+			};
+			this.defaultAttributeTypes = {
+				position: 'Float32Array',
+				normal: 'Float32Array',
+				color: 'Float32Array',
+				uv: 'Float32Array'
+			};
 
-	};
+		}
 
-	DRACOLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-		constructor: DRACOLoader,
-		setDecoderPath: function ( path ) {
+		setDecoderPath( path ) {
 
 			this.decoderPath = path;
 			return this;
 
-		},
-		setDecoderConfig: function ( config ) {
+		}
+
+		setDecoderConfig( config ) {
 
 			this.decoderConfig = config;
 			return this;
 
-		},
-		setWorkerLimit: function ( workerLimit ) {
+		}
+
+		setWorkerLimit( workerLimit ) {
 
 			this.workerLimit = workerLimit;
 			return this;
 
-		},
-
-		/** @deprecated */
-		setVerbosity: function () {
-
-			console.warn( 'THREE.DRACOLoader: The .setVerbosity() method has been removed.' );
-
-		},
-
-		/** @deprecated */
-		setDrawMode: function () {
-
-			console.warn( 'THREE.DRACOLoader: The .setDrawMode() method has been removed.' );
-
-		},
-
-		/** @deprecated */
-		setSkipDequantization: function () {
-
-			console.warn( 'THREE.DRACOLoader: The .setSkipDequantization() method has been removed.' );
+		}
 
-		},
-		load: function ( url, onLoad, onProgress, onError ) {
+		load( url, onLoad, onProgress, onError ) {
 
-			var loader = new THREE.FileLoader( this.manager );
+			const loader = new THREE.FileLoader( this.manager );
 			loader.setPath( this.path );
 			loader.setResponseType( 'arraybuffer' );
 			loader.setRequestHeader( this.requestHeader );
 			loader.setWithCredentials( this.withCredentials );
 			loader.load( url, buffer => {
 
-				var taskConfig = {
+				const taskConfig = {
 					attributeIDs: this.defaultAttributeIDs,
 					attributeTypes: this.defaultAttributeTypes,
 					useUniqueIDs: false
@@ -85,27 +69,29 @@
 
 			}, onProgress, onError );
 
-		},
-
+		}
 		/** @deprecated Kept for backward-compatibility with previous DRACOLoader versions. */
-		decodeDracoFile: function ( buffer, callback, attributeIDs, attributeTypes ) {
 
-			var taskConfig = {
+
+		decodeDracoFile( buffer, callback, attributeIDs, attributeTypes ) {
+
+			const taskConfig = {
 				attributeIDs: attributeIDs || this.defaultAttributeIDs,
 				attributeTypes: attributeTypes || this.defaultAttributeTypes,
 				useUniqueIDs: !! attributeIDs
 			};
 			this.decodeGeometry( buffer, taskConfig ).then( callback );
 
-		},
-		decodeGeometry: function ( buffer, taskConfig ) {
+		}
+
+		decodeGeometry( buffer, taskConfig ) {
 
 			// TODO: For backward-compatibility, support 'attributeTypes' objects containing
 			// references (rather than names) to typed array constructors. These must be
 			// serialized before sending them to the worker.
-			for ( var attribute in taskConfig.attributeTypes ) {
+			for ( const attribute in taskConfig.attributeTypes ) {
 
-				var type = taskConfig.attributeTypes[ attribute ];
+				const type = taskConfig.attributeTypes[ attribute ];
 
 				if ( type.BYTES_PER_ELEMENT !== undefined ) {
 
@@ -116,12 +102,12 @@
 			} //
 
 
-			var taskKey = JSON.stringify( taskConfig ); // Check for an existing task using this buffer. A transferred buffer cannot be transferred
+			const taskKey = JSON.stringify( taskConfig ); // Check for an existing task using this buffer. A transferred buffer cannot be transferred
 			// again from this thread.
 
-			if ( DRACOLoader.taskCache.has( buffer ) ) {
+			if ( _taskCache.has( buffer ) ) {
 
-				var cachedTask = DRACOLoader.taskCache.get( buffer );
+				const cachedTask = _taskCache.get( buffer );
 
 				if ( cachedTask.key === taskKey ) {
 
@@ -140,12 +126,12 @@
 			} //
 
 
-			var worker;
-			var taskID = this.workerNextTaskID ++;
-			var taskCost = buffer.byteLength; // Obtain a worker and assign a task, and construct a geometry instance
+			let worker;
+			const taskID = this.workerNextTaskID ++;
+			const taskCost = buffer.byteLength; // Obtain a worker and assign a task, and construct a geometry instance
 			// when the task completes.
 
-			var geometryPending = this._getWorker( taskID, taskCost ).then( _worker => {
+			const geometryPending = this._getWorker( taskID, taskCost ).then( _worker => {
 
 				worker = _worker;
 				return new Promise( ( resolve, reject ) => {
@@ -177,16 +163,18 @@
 
 			} ); // Cache the task result.
 
-			DRACOLoader.taskCache.set( buffer, {
+			_taskCache.set( buffer, {
 				key: taskKey,
 				promise: geometryPending
 			} );
+
 			return geometryPending;
 
-		},
-		_createGeometry: function ( geometryData ) {
+		}
+
+		_createGeometry( geometryData ) {
 
-			var geometry = new THREE.BufferGeometry();
+			const geometry = new THREE.BufferGeometry();
 
 			if ( geometryData.index ) {
 
@@ -194,22 +182,23 @@
 
 			}
 
-			for ( var i = 0; i < geometryData.attributes.length; i ++ ) {
+			for ( let i = 0; i < geometryData.attributes.length; i ++ ) {
 
-				var attribute = geometryData.attributes[ i ];
-				var name = attribute.name;
-				var array = attribute.array;
-				var itemSize = attribute.itemSize;
+				const attribute = geometryData.attributes[ i ];
+				const name = attribute.name;
+				const array = attribute.array;
+				const itemSize = attribute.itemSize;
 				geometry.setAttribute( name, new THREE.BufferAttribute( array, itemSize ) );
 
 			}
 
 			return geometry;
 
-		},
-		_loadLibrary: function ( url, responseType ) {
+		}
+
+		_loadLibrary( url, responseType ) {
 
-			var loader = new THREE.FileLoader( this.manager );
+			const loader = new THREE.FileLoader( this.manager );
 			loader.setPath( this.decoderPath );
 			loader.setResponseType( responseType );
 			loader.setWithCredentials( this.withCredentials );
@@ -219,19 +208,21 @@
 
 			} );
 
-		},
-		preload: function () {
+		}
+
+		preload() {
 
 			this._initDecoder();
 
 			return this;
 
-		},
-		_initDecoder: function () {
+		}
+
+		_initDecoder() {
 
 			if ( this.decoderPending ) return this.decoderPending;
-			var useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
-			var librariesPending = [];
+			const useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
+			const librariesPending = [];
 
 			if ( useJS ) {
 
@@ -246,7 +237,7 @@
 
 			this.decoderPending = Promise.all( librariesPending ).then( libraries => {
 
-				var jsContent = libraries[ 0 ];
+				const jsContent = libraries[ 0 ];
 
 				if ( ! useJS ) {
 
@@ -254,21 +245,22 @@
 
 				}
 
-				var fn = DRACOLoader.DRACOWorker.toString();
-				var body = [ '/* draco decoder */', jsContent, '', '/* worker */', fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) ) ].join( '\n' );
+				const fn = DRACOWorker.toString();
+				const body = [ '/* draco decoder */', jsContent, '', '/* worker */', fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) ) ].join( '\n' );
 				this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
 
 			} );
 			return this.decoderPending;
 
-		},
-		_getWorker: function ( taskID, taskCost ) {
+		}
+
+		_getWorker( taskID, taskCost ) {
 
 			return this._initDecoder().then( () => {
 
 				if ( this.workerPool.length < this.workerLimit ) {
 
-					var worker = new Worker( this.workerSourceURL );
+					const worker = new Worker( this.workerSourceURL );
 					worker._callbacks = {};
 					worker._taskCosts = {};
 					worker._taskLoad = 0;
@@ -279,7 +271,7 @@
 
 					worker.onmessage = function ( e ) {
 
-						var message = e.data;
+						const message = e.data;
 
 						switch ( message.type ) {
 
@@ -312,29 +304,32 @@
 
 				}
 
-				var worker = this.workerPool[ this.workerPool.length - 1 ];
+				const worker = this.workerPool[ this.workerPool.length - 1 ];
 				worker._taskCosts[ taskID ] = taskCost;
 				worker._taskLoad += taskCost;
 				return worker;
 
 			} );
 
-		},
-		_releaseTask: function ( worker, taskID ) {
+		}
+
+		_releaseTask( worker, taskID ) {
 
 			worker._taskLoad -= worker._taskCosts[ taskID ];
 			delete worker._callbacks[ taskID ];
 			delete worker._taskCosts[ taskID ];
 
-		},
-		debug: function () {
+		}
+
+		debug() {
 
 			console.log( 'Task load: ', this.workerPool.map( worker => worker._taskLoad ) );
 
-		},
-		dispose: function () {
+		}
+
+		dispose() {
 
-			for ( var i = 0; i < this.workerPool.length; ++ i ) {
+			for ( let i = 0; i < this.workerPool.length; ++ i ) {
 
 				this.workerPool[ i ].terminate();
 
@@ -344,17 +339,19 @@
 			return this;
 
 		}
-	} );
+
+	}
 	/* WEB WORKER */
 
-	DRACOLoader.DRACOWorker = function () {
 
-		var decoderConfig;
-		var decoderPending;
+	function DRACOWorker() {
+
+		let decoderConfig;
+		let decoderPending;
 
 		onmessage = function ( e ) {
 
-			var message = e.data;
+			const message = e.data;
 
 			switch ( message.type ) {
 
@@ -379,19 +376,19 @@
 					break;
 
 				case 'decode':
-					var buffer = message.buffer;
-					var taskConfig = message.taskConfig;
+					const buffer = message.buffer;
+					const taskConfig = message.taskConfig;
 					decoderPending.then( module => {
 
-						var draco = module.draco;
-						var decoder = new draco.Decoder();
-						var decoderBuffer = new draco.DecoderBuffer();
+						const draco = module.draco;
+						const decoder = new draco.Decoder();
+						const decoderBuffer = new draco.DecoderBuffer();
 						decoderBuffer.Init( new Int8Array( buffer ), buffer.byteLength );
 
 						try {
 
-							var geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
-							var buffers = geometry.attributes.map( attr => attr.array.buffer );
+							const geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
+							const buffers = geometry.attributes.map( attr => attr.array.buffer );
 							if ( geometry.index ) buffers.push( geometry.index.array.buffer );
 							self.postMessage( {
 								type: 'decode',
@@ -424,11 +421,11 @@
 
 		function decodeGeometry( draco, decoder, decoderBuffer, taskConfig ) {
 
-			var attributeIDs = taskConfig.attributeIDs;
-			var attributeTypes = taskConfig.attributeTypes;
-			var dracoGeometry;
-			var decodingStatus;
-			var geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
+			const attributeIDs = taskConfig.attributeIDs;
+			const attributeTypes = taskConfig.attributeTypes;
+			let dracoGeometry;
+			let decodingStatus;
+			const geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
 
 			if ( geometryType === draco.TRIANGULAR_MESH ) {
 
@@ -452,16 +449,16 @@
 
 			}
 
-			var geometry = {
+			const geometry = {
 				index: null,
 				attributes: []
 			}; // Gather all vertex attributes.
 
-			for ( var attributeName in attributeIDs ) {
+			for ( const attributeName in attributeIDs ) {
 
-				var attributeType = self[ attributeTypes[ attributeName ] ];
-				var attribute;
-				var attributeID; // A Draco file may be created with default vertex attributes, whose attribute IDs
+				const attributeType = self[ attributeTypes[ attributeName ] ];
+				let attribute;
+				let attributeID; // A Draco file may be created with default vertex attributes, whose attribute IDs
 				// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
 				// a Draco file may contain a custom set of attributes, identified by known unique
 				// IDs. glTF files always do the latter, and `.drc` files typically do the former.
@@ -497,14 +494,14 @@
 
 		function decodeIndex( draco, decoder, dracoGeometry ) {
 
-			var numFaces = dracoGeometry.num_faces();
-			var numIndices = numFaces * 3;
-			var byteLength = numIndices * 4;
+			const numFaces = dracoGeometry.num_faces();
+			const numIndices = numFaces * 3;
+			const byteLength = numIndices * 4;
 
-			var ptr = draco._malloc( byteLength );
+			const ptr = draco._malloc( byteLength );
 
 			decoder.GetTrianglesUInt32Array( dracoGeometry, byteLength, ptr );
-			var index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
+			const index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
 
 			draco._free( ptr );
 
@@ -517,16 +514,16 @@
 
 		function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
 
-			var numComponents = attribute.num_components();
-			var numPoints = dracoGeometry.num_points();
-			var numValues = numPoints * numComponents;
-			var byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
-			var dataType = getDracoDataType( draco, attributeType );
+			const numComponents = attribute.num_components();
+			const numPoints = dracoGeometry.num_points();
+			const numValues = numPoints * numComponents;
+			const byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
+			const dataType = getDracoDataType( draco, attributeType );
 
-			var ptr = draco._malloc( byteLength );
+			const ptr = draco._malloc( byteLength );
 
 			decoder.GetAttributeDataArrayForAllPoints( dracoGeometry, attribute, dataType, byteLength, ptr );
-			var array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
+			const array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
 
 			draco._free( ptr );
 
@@ -567,42 +564,7 @@
 
 		}
 
-	};
-
-	DRACOLoader.taskCache = new WeakMap();
-	/** Deprecated static methods */
-
-	/** @deprecated */
-
-	DRACOLoader.setDecoderPath = function () {
-
-		console.warn( 'THREE.DRACOLoader: The .setDecoderPath() method has been removed. Use instance methods.' );
-
-	};
-	/** @deprecated */
-
-
-	DRACOLoader.setDecoderConfig = function () {
-
-		console.warn( 'THREE.DRACOLoader: The .setDecoderConfig() method has been removed. Use instance methods.' );
-
-	};
-	/** @deprecated */
-
-
-	DRACOLoader.releaseDecoderModule = function () {
-
-		console.warn( 'THREE.DRACOLoader: The .releaseDecoderModule() method has been removed. Use instance methods.' );
-
-	};
-	/** @deprecated */
-
-
-	DRACOLoader.getDecoderModule = function () {
-
-		console.warn( 'THREE.DRACOLoader: The .getDecoderModule() method has been removed. Use instance methods.' );
-
-	};
+	}
 
 	THREE.DRACOLoader = DRACOLoader;
 

+ 16 - 13
examples/js/loaders/EXRLoader.js

@@ -67,16 +67,16 @@
 	// ///////////////////////////////////////////////////////////////////////////
 	// // End of OpenEXR license -------------------------------------------------
 
-	var EXRLoader = function ( manager ) {
+	class EXRLoader extends THREE.DataTextureLoader {
 
-		THREE.DataTextureLoader.call( this, manager );
-		this.type = THREE.FloatType;
+		constructor( manager ) {
 
-	};
+			super( manager );
+			this.type = THREE.FloatType;
 
-	EXRLoader.prototype = Object.assign( Object.create( THREE.DataTextureLoader.prototype ), {
-		constructor: EXRLoader,
-		parse: function ( buffer ) {
+		}
+
+		parse( buffer ) {
 
 			const USHORT_RANGE = 1 << 16;
 			const BITMAP_SIZE = USHORT_RANGE >> 3;
@@ -2155,14 +2155,16 @@
 				type: this.type
 			};
 
-		},
-		setDataType: function ( value ) {
+		}
+
+		setDataType( value ) {
 
 			this.type = value;
 			return this;
 
-		},
-		load: function ( url, onLoad, onProgress, onError ) {
+		}
+
+		load( url, onLoad, onProgress, onError ) {
 
 			function onLoadCallback( texture, texData ) {
 
@@ -2191,10 +2193,11 @@
 
 			}
 
-			return THREE.DataTextureLoader.prototype.load.call( this, url, onLoadCallback, onProgress, onError );
+			return super.load( url, onLoadCallback, onProgress, onError );
 
 		}
-	} );
+
+	}
 
 	THREE.EXRLoader = EXRLoader;
 

+ 2463 - 2375
examples/js/loaders/FBXLoader.js

@@ -16,189 +16,190 @@
  *		https://code.blender.org/2013/08/fbx-binary-file-format-specification/
  */
 
-	var FBXLoader = function () {
+	let fbxTree;
+	let connections;
+	let sceneGraph;
 
-		var fbxTree;
-		var connections;
-		var sceneGraph;
+	class FBXLoader extends THREE.Loader {
 
-		function FBXLoader( manager ) {
+		constructor( manager ) {
 
-			THREE.Loader.call( this, manager );
+			super( manager );
 
 		}
 
-		FBXLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-			constructor: FBXLoader,
-			load: function ( url, onLoad, onProgress, onError ) {
+		load( url, onLoad, onProgress, onError ) {
 
-				var scope = this;
-				var path = scope.path === '' ? THREE.LoaderUtils.extractUrlBase( url ) : scope.path;
-				var loader = new THREE.FileLoader( this.manager );
-				loader.setPath( scope.path );
-				loader.setResponseType( 'arraybuffer' );
-				loader.setRequestHeader( scope.requestHeader );
-				loader.setWithCredentials( scope.withCredentials );
-				loader.load( url, function ( buffer ) {
+			const scope = this;
+			const path = scope.path === '' ? THREE.LoaderUtils.extractUrlBase( url ) : scope.path;
+			const loader = new THREE.FileLoader( this.manager );
+			loader.setPath( scope.path );
+			loader.setResponseType( 'arraybuffer' );
+			loader.setRequestHeader( scope.requestHeader );
+			loader.setWithCredentials( scope.withCredentials );
+			loader.load( url, function ( buffer ) {
 
-					try {
+				try {
 
-						onLoad( scope.parse( buffer, path ) );
+					onLoad( scope.parse( buffer, path ) );
 
-					} catch ( e ) {
+				} catch ( e ) {
 
-						if ( onError ) {
+					if ( onError ) {
 
-							onError( e );
+						onError( e );
 
-						} else {
+					} else {
 
-							console.error( e );
+						console.error( e );
 
-						}
+					}
 
-						scope.manager.itemError( url );
+					scope.manager.itemError( url );
 
-					}
+				}
 
-				}, onProgress, onError );
+			}, onProgress, onError );
 
-			},
-			parse: function ( FBXBuffer, path ) {
+		}
 
-				if ( isFbxFormatBinary( FBXBuffer ) ) {
+		parse( FBXBuffer, path ) {
 
-					fbxTree = new BinaryParser().parse( FBXBuffer );
+			if ( isFbxFormatBinary( FBXBuffer ) ) {
 
-				} else {
+				fbxTree = new BinaryParser().parse( FBXBuffer );
 
-					var FBXText = convertArrayBufferToString( FBXBuffer );
+			} else {
 
-					if ( ! isFbxFormatASCII( FBXText ) ) {
+				const FBXText = convertArrayBufferToString( FBXBuffer );
 
-						throw new Error( 'THREE.FBXLoader: Unknown format.' );
+				if ( ! isFbxFormatASCII( FBXText ) ) {
 
-					}
+					throw new Error( 'THREE.FBXLoader: Unknown format.' );
 
-					if ( getFbxVersion( FBXText ) < 7000 ) {
+				}
 
-						throw new Error( 'THREE.FBXLoader: FBX version not supported, FileVersion: ' + getFbxVersion( FBXText ) );
+				if ( getFbxVersion( FBXText ) < 7000 ) {
 
-					}
+					throw new Error( 'THREE.FBXLoader: FBX version not supported, FileVersion: ' + getFbxVersion( FBXText ) );
 
-					fbxTree = new TextParser().parse( FBXText );
+				}
 
-				} // console.log( fbxTree );
+				fbxTree = new TextParser().parse( FBXText );
 
+			} // console.log( fbxTree );
 
-				var textureLoader = new THREE.TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
-				return new FBXTreeParser( textureLoader, this.manager ).parse( fbxTree );
 
-			}
-		} ); // Parse the FBXTree object returned by the BinaryParser or TextParser and return a THREE.Group
+			const textureLoader = new THREE.TextureLoader( this.manager ).setPath( this.resourcePath || path ).setCrossOrigin( this.crossOrigin );
+			return new FBXTreeParser( textureLoader, this.manager ).parse( fbxTree );
+
+		}
+
+	} // Parse the FBXTree object returned by the BinaryParser or TextParser and return a THREE.Group
+
+
+	class FBXTreeParser {
 
-		function FBXTreeParser( textureLoader, manager ) {
+		constructor( textureLoader, manager ) {
 
 			this.textureLoader = textureLoader;
 			this.manager = manager;
 
 		}
 
-		FBXTreeParser.prototype = {
-			constructor: FBXTreeParser,
-			parse: function () {
+		parse() {
 
-				connections = this.parseConnections();
-				var images = this.parseImages();
-				var textures = this.parseTextures( images );
-				var materials = this.parseMaterials( textures );
-				var deformers = this.parseDeformers();
-				var geometryMap = new GeometryParser().parse( deformers );
-				this.parseScene( deformers, geometryMap, materials );
-				return sceneGraph;
+			connections = this.parseConnections();
+			const images = this.parseImages();
+			const textures = this.parseTextures( images );
+			const materials = this.parseMaterials( textures );
+			const deformers = this.parseDeformers();
+			const geometryMap = new GeometryParser().parse( deformers );
+			this.parseScene( deformers, geometryMap, materials );
+			return sceneGraph;
 
-			},
-			// Parses FBXTree.Connections which holds parent-child connections between objects (e.g. material -> texture, model->geometry )
-			// and details the connection type
-			parseConnections: function () {
+		} // Parses FBXTree.Connections which holds parent-child connections between objects (e.g. material -> texture, model->geometry )
+		// and details the connection type
 
-				var connectionMap = new Map();
 
-				if ( 'Connections' in fbxTree ) {
+		parseConnections() {
 
-					var rawConnections = fbxTree.Connections.connections;
-					rawConnections.forEach( function ( rawConnection ) {
+			const connectionMap = new Map();
 
-						var fromID = rawConnection[ 0 ];
-						var toID = rawConnection[ 1 ];
-						var relationship = rawConnection[ 2 ];
+			if ( 'Connections' in fbxTree ) {
 
-						if ( ! connectionMap.has( fromID ) ) {
+				const rawConnections = fbxTree.Connections.connections;
+				rawConnections.forEach( function ( rawConnection ) {
 
-							connectionMap.set( fromID, {
-								parents: [],
-								children: []
-							} );
+					const fromID = rawConnection[ 0 ];
+					const toID = rawConnection[ 1 ];
+					const relationship = rawConnection[ 2 ];
 
-						}
+					if ( ! connectionMap.has( fromID ) ) {
 
-						var parentRelationship = {
-							ID: toID,
-							relationship: relationship
-						};
-						connectionMap.get( fromID ).parents.push( parentRelationship );
+						connectionMap.set( fromID, {
+							parents: [],
+							children: []
+						} );
 
-						if ( ! connectionMap.has( toID ) ) {
+					}
 
-							connectionMap.set( toID, {
-								parents: [],
-								children: []
-							} );
+					const parentRelationship = {
+						ID: toID,
+						relationship: relationship
+					};
+					connectionMap.get( fromID ).parents.push( parentRelationship );
 
-						}
+					if ( ! connectionMap.has( toID ) ) {
 
-						var childRelationship = {
-							ID: fromID,
-							relationship: relationship
-						};
-						connectionMap.get( toID ).children.push( childRelationship );
+						connectionMap.set( toID, {
+							parents: [],
+							children: []
+						} );
 
-					} );
+					}
 
-				}
+					const childRelationship = {
+						ID: fromID,
+						relationship: relationship
+					};
+					connectionMap.get( toID ).children.push( childRelationship );
+
+				} );
 
-				return connectionMap;
+			}
 
-			},
-			// Parse FBXTree.Objects.Video for embedded image data
-			// These images are connected to textures in FBXTree.Objects.Textures
-			// via FBXTree.Connections.
-			parseImages: function () {
+			return connectionMap;
 
-				var images = {};
-				var blobs = {};
+		} // Parse FBXTree.Objects.Video for embedded image data
+		// These images are connected to textures in FBXTree.Objects.Textures
+		// via FBXTree.Connections.
 
-				if ( 'Video' in fbxTree.Objects ) {
 
-					var videoNodes = fbxTree.Objects.Video;
+		parseImages() {
 
-					for ( var nodeID in videoNodes ) {
+			const images = {};
+			const blobs = {};
 
-						var videoNode = videoNodes[ nodeID ];
-						var id = parseInt( nodeID );
-						images[ id ] = videoNode.RelativeFilename || videoNode.Filename; // raw image data is in videoNode.Content
+			if ( 'Video' in fbxTree.Objects ) {
 
-						if ( 'Content' in videoNode ) {
+				const videoNodes = fbxTree.Objects.Video;
 
-							var arrayBufferContent = videoNode.Content instanceof ArrayBuffer && videoNode.Content.byteLength > 0;
-							var base64Content = typeof videoNode.Content === 'string' && videoNode.Content !== '';
+				for ( const nodeID in videoNodes ) {
 
-							if ( arrayBufferContent || base64Content ) {
+					const videoNode = videoNodes[ nodeID ];
+					const id = parseInt( nodeID );
+					images[ id ] = videoNode.RelativeFilename || videoNode.Filename; // raw image data is in videoNode.Content
 
-								var image = this.parseImage( videoNodes[ nodeID ] );
-								blobs[ videoNode.RelativeFilename || videoNode.Filename ] = image;
+					if ( 'Content' in videoNode ) {
 
-							}
+						const arrayBufferContent = videoNode.Content instanceof ArrayBuffer && videoNode.Content.byteLength > 0;
+						const base64Content = typeof videoNode.Content === 'string' && videoNode.Content !== '';
+
+						if ( arrayBufferContent || base64Content ) {
+
+							const image = this.parseImage( videoNodes[ nodeID ] );
+							blobs[ videoNode.RelativeFilename || videoNode.Filename ] = image;
 
 						}
 
@@ -206,3582 +207,3669 @@
 
 				}
 
-				for ( var id in images ) {
+			}
 
-					var filename = images[ id ];
-					if ( blobs[ filename ] !== undefined ) images[ id ] = blobs[ filename ]; else images[ id ] = images[ id ].split( '\\' ).pop();
+			for ( const id in images ) {
 
-				}
+				const filename = images[ id ];
+				if ( blobs[ filename ] !== undefined ) images[ id ] = blobs[ filename ]; else images[ id ] = images[ id ].split( '\\' ).pop();
 
-				return images;
+			}
 
-			},
-			// Parse embedded image data in FBXTree.Video.Content
-			parseImage: function ( videoNode ) {
+			return images;
 
-				var content = videoNode.Content;
-				var fileName = videoNode.RelativeFilename || videoNode.Filename;
-				var extension = fileName.slice( fileName.lastIndexOf( '.' ) + 1 ).toLowerCase();
-				var type;
+		} // Parse embedded image data in FBXTree.Video.Content
 
-				switch ( extension ) {
 
-					case 'bmp':
-						type = 'image/bmp';
-						break;
+		parseImage( videoNode ) {
 
-					case 'jpg':
-					case 'jpeg':
-						type = 'image/jpeg';
-						break;
+			const content = videoNode.Content;
+			const fileName = videoNode.RelativeFilename || videoNode.Filename;
+			const extension = fileName.slice( fileName.lastIndexOf( '.' ) + 1 ).toLowerCase();
+			let type;
 
-					case 'png':
-						type = 'image/png';
-						break;
+			switch ( extension ) {
 
-					case 'tif':
-						type = 'image/tiff';
-						break;
+				case 'bmp':
+					type = 'image/bmp';
+					break;
 
-					case 'tga':
-						if ( this.manager.getHandler( '.tga' ) === null ) {
+				case 'jpg':
+				case 'jpeg':
+					type = 'image/jpeg';
+					break;
 
-							console.warn( 'FBXLoader: TGA loader not found, skipping ', fileName );
+				case 'png':
+					type = 'image/png';
+					break;
 
-						}
+				case 'tif':
+					type = 'image/tiff';
+					break;
 
-						type = 'image/tga';
-						break;
+				case 'tga':
+					if ( this.manager.getHandler( '.tga' ) === null ) {
 
-					default:
-						console.warn( 'FBXLoader: Image type "' + extension + '" is not supported.' );
-						return;
+						console.warn( 'FBXLoader: TGA loader not found, skipping ', fileName );
 
-				}
+					}
 
-				if ( typeof content === 'string' ) {
+					type = 'image/tga';
+					break;
 
-					// ASCII format
-					return 'data:' + type + ';base64,' + content;
+				default:
+					console.warn( 'FBXLoader: Image type "' + extension + '" is not supported.' );
+					return;
 
-				} else {
+			}
 
-					// Binary Format
-					var array = new Uint8Array( content );
-					return window.URL.createObjectURL( new Blob( [ array ], {
-						type: type
-					} ) );
+			if ( typeof content === 'string' ) {
 
-				}
+				// ASCII format
+				return 'data:' + type + ';base64,' + content;
 
-			},
-			// Parse nodes in FBXTree.Objects.Texture
-			// These contain details such as UV scaling, cropping, rotation etc and are connected
-			// to images in FBXTree.Objects.Video
-			parseTextures: function ( images ) {
+			} else {
+
+				// Binary Format
+				const array = new Uint8Array( content );
+				return window.URL.createObjectURL( new Blob( [ array ], {
+					type: type
+				} ) );
 
-				var textureMap = new Map();
+			}
 
-				if ( 'Texture' in fbxTree.Objects ) {
+		} // Parse nodes in FBXTree.Objects.Texture
+		// These contain details such as UV scaling, cropping, rotation etc and are connected
+		// to images in FBXTree.Objects.Video
 
-					var textureNodes = fbxTree.Objects.Texture;
 
-					for ( var nodeID in textureNodes ) {
+		parseTextures( images ) {
 
-						var texture = this.parseTexture( textureNodes[ nodeID ], images );
-						textureMap.set( parseInt( nodeID ), texture );
+			const textureMap = new Map();
 
-					}
+			if ( 'Texture' in fbxTree.Objects ) {
+
+				const textureNodes = fbxTree.Objects.Texture;
+
+				for ( const nodeID in textureNodes ) {
+
+					const texture = this.parseTexture( textureNodes[ nodeID ], images );
+					textureMap.set( parseInt( nodeID ), texture );
 
 				}
 
-				return textureMap;
+			}
 
-			},
-			// Parse individual node in FBXTree.Objects.Texture
-			parseTexture: function ( textureNode, images ) {
+			return textureMap;
 
-				var texture = this.loadTexture( textureNode, images );
-				texture.ID = textureNode.id;
-				texture.name = textureNode.attrName;
-				var wrapModeU = textureNode.WrapModeU;
-				var wrapModeV = textureNode.WrapModeV;
-				var valueU = wrapModeU !== undefined ? wrapModeU.value : 0;
-				var valueV = wrapModeV !== undefined ? wrapModeV.value : 0; // http://download.autodesk.com/us/fbx/SDKdocs/FBX_SDK_Help/files/fbxsdkref/class_k_fbx_texture.html#889640e63e2e681259ea81061b85143a
-				// 0: repeat(default), 1: clamp
+		} // Parse individual node in FBXTree.Objects.Texture
 
-				texture.wrapS = valueU === 0 ? THREE.RepeatWrapping : THREE.ClampToEdgeWrapping;
-				texture.wrapT = valueV === 0 ? THREE.RepeatWrapping : THREE.ClampToEdgeWrapping;
 
-				if ( 'Scaling' in textureNode ) {
+		parseTexture( textureNode, images ) {
 
-					var values = textureNode.Scaling.value;
-					texture.repeat.x = values[ 0 ];
-					texture.repeat.y = values[ 1 ];
+			const texture = this.loadTexture( textureNode, images );
+			texture.ID = textureNode.id;
+			texture.name = textureNode.attrName;
+			const wrapModeU = textureNode.WrapModeU;
+			const wrapModeV = textureNode.WrapModeV;
+			const valueU = wrapModeU !== undefined ? wrapModeU.value : 0;
+			const valueV = wrapModeV !== undefined ? wrapModeV.value : 0; // http://download.autodesk.com/us/fbx/SDKdocs/FBX_SDK_Help/files/fbxsdkref/class_k_fbx_texture.html#889640e63e2e681259ea81061b85143a
+			// 0: repeat(default), 1: clamp
 
-				}
+			texture.wrapS = valueU === 0 ? THREE.RepeatWrapping : THREE.ClampToEdgeWrapping;
+			texture.wrapT = valueV === 0 ? THREE.RepeatWrapping : THREE.ClampToEdgeWrapping;
 
-				return texture;
+			if ( 'Scaling' in textureNode ) {
 
-			},
-			// load a texture specified as a blob or data URI, or via an external URL using THREE.TextureLoader
-			loadTexture: function ( textureNode, images ) {
+				const values = textureNode.Scaling.value;
+				texture.repeat.x = values[ 0 ];
+				texture.repeat.y = values[ 1 ];
 
-				var fileName;
-				var currentPath = this.textureLoader.path;
-				var children = connections.get( textureNode.id ).children;
+			}
 
-				if ( children !== undefined && children.length > 0 && images[ children[ 0 ].ID ] !== undefined ) {
+			return texture;
 
-					fileName = images[ children[ 0 ].ID ];
+		} // load a texture specified as a blob or data URI, or via an external URL using THREE.TextureLoader
 
-					if ( fileName.indexOf( 'blob:' ) === 0 || fileName.indexOf( 'data:' ) === 0 ) {
 
-						this.textureLoader.setPath( undefined );
+		loadTexture( textureNode, images ) {
 
-					}
+			let fileName;
+			const currentPath = this.textureLoader.path;
+			const children = connections.get( textureNode.id ).children;
 
-				}
+			if ( children !== undefined && children.length > 0 && images[ children[ 0 ].ID ] !== undefined ) {
 
-				var texture;
-				var extension = textureNode.FileName.slice( - 3 ).toLowerCase();
+				fileName = images[ children[ 0 ].ID ];
 
-				if ( extension === 'tga' ) {
+				if ( fileName.indexOf( 'blob:' ) === 0 || fileName.indexOf( 'data:' ) === 0 ) {
 
-					var loader = this.manager.getHandler( '.tga' );
+					this.textureLoader.setPath( undefined );
 
-					if ( loader === null ) {
+				}
 
-						console.warn( 'FBXLoader: TGA loader not found, creating placeholder texture for', textureNode.RelativeFilename );
-						texture = new THREE.Texture();
+			}
 
-					} else {
+			let texture;
+			const extension = textureNode.FileName.slice( - 3 ).toLowerCase();
 
-						texture = loader.load( fileName );
+			if ( extension === 'tga' ) {
 
-					}
+				const loader = this.manager.getHandler( '.tga' );
 
-				} else if ( extension === 'psd' ) {
+				if ( loader === null ) {
 
-					console.warn( 'FBXLoader: PSD textures are not supported, creating placeholder texture for', textureNode.RelativeFilename );
+					console.warn( 'FBXLoader: TGA loader not found, creating placeholder texture for', textureNode.RelativeFilename );
 					texture = new THREE.Texture();
 
 				} else {
 
-					texture = this.textureLoader.load( fileName );
+					texture = loader.load( fileName );
 
 				}
 
-				this.textureLoader.setPath( currentPath );
-				return texture;
+			} else if ( extension === 'psd' ) {
+
+				console.warn( 'FBXLoader: PSD textures are not supported, creating placeholder texture for', textureNode.RelativeFilename );
+				texture = new THREE.Texture();
 
-			},
-			// Parse nodes in FBXTree.Objects.Material
-			parseMaterials: function ( textureMap ) {
+			} else {
 
-				var materialMap = new Map();
+				texture = this.textureLoader.load( fileName );
 
-				if ( 'Material' in fbxTree.Objects ) {
+			}
 
-					var materialNodes = fbxTree.Objects.Material;
+			this.textureLoader.setPath( currentPath );
+			return texture;
 
-					for ( var nodeID in materialNodes ) {
+		} // Parse nodes in FBXTree.Objects.Material
 
-						var material = this.parseMaterial( materialNodes[ nodeID ], textureMap );
-						if ( material !== null ) materialMap.set( parseInt( nodeID ), material );
 
-					}
+		parseMaterials( textureMap ) {
+
+			const materialMap = new Map();
+
+			if ( 'Material' in fbxTree.Objects ) {
+
+				const materialNodes = fbxTree.Objects.Material;
+
+				for ( const nodeID in materialNodes ) {
+
+					const material = this.parseMaterial( materialNodes[ nodeID ], textureMap );
+					if ( material !== null ) materialMap.set( parseInt( nodeID ), material );
 
 				}
 
-				return materialMap;
+			}
+
+			return materialMap;
+
+		} // Parse single node in FBXTree.Objects.Material
+		// Materials are connected to texture maps in FBXTree.Objects.Textures
+		// FBX format currently only supports Lambert and Phong shading models
+
+
+		parseMaterial( materialNode, textureMap ) {
+
+			const ID = materialNode.id;
+			const name = materialNode.attrName;
+			let type = materialNode.ShadingModel; // Case where FBX wraps shading model in property object.
+
+			if ( typeof type === 'object' ) {
+
+				type = type.value;
+
+			} // Ignore unused materials which don't have any connections.
+
 
-			},
-			// Parse single node in FBXTree.Objects.Material
-			// Materials are connected to texture maps in FBXTree.Objects.Textures
-			// FBX format currently only supports Lambert and Phong shading models
-			parseMaterial: function ( materialNode, textureMap ) {
+			if ( ! connections.has( ID ) ) return null;
+			const parameters = this.parseParameters( materialNode, textureMap, ID );
+			let material;
 
-				var ID = materialNode.id;
-				var name = materialNode.attrName;
-				var type = materialNode.ShadingModel; // Case where FBX wraps shading model in property object.
+			switch ( type.toLowerCase() ) {
 
-				if ( typeof type === 'object' ) {
+				case 'phong':
+					material = new THREE.MeshPhongMaterial();
+					break;
+
+				case 'lambert':
+					material = new THREE.MeshLambertMaterial();
+					break;
+
+				default:
+					console.warn( 'THREE.FBXLoader: unknown material type "%s". Defaulting to THREE.MeshPhongMaterial.', type );
+					material = new THREE.MeshPhongMaterial();
+					break;
+
+			}
+
+			material.setValues( parameters );
+			material.name = name;
+			return material;
+
+		} // Parse FBX material and return parameters suitable for a three.js material
+		// Also parse the texture map and return any textures associated with the material
+
+
+		parseParameters( materialNode, textureMap, ID ) {
+
+			const parameters = {};
 
-					type = type.value;
+			if ( materialNode.BumpFactor ) {
 
-				} // Ignore unused materials which don't have any connections.
+				parameters.bumpScale = materialNode.BumpFactor.value;
+
+			}
+
+			if ( materialNode.Diffuse ) {
+
+				parameters.color = new THREE.Color().fromArray( materialNode.Diffuse.value );
+
+			} else if ( materialNode.DiffuseColor && ( materialNode.DiffuseColor.type === 'Color' || materialNode.DiffuseColor.type === 'ColorRGB' ) ) {
+
+				// The blender exporter exports diffuse here instead of in materialNode.Diffuse
+				parameters.color = new THREE.Color().fromArray( materialNode.DiffuseColor.value );
+
+			}
+
+			if ( materialNode.DisplacementFactor ) {
+
+				parameters.displacementScale = materialNode.DisplacementFactor.value;
+
+			}
 
+			if ( materialNode.Emissive ) {
 
-				if ( ! connections.has( ID ) ) return null;
-				var parameters = this.parseParameters( materialNode, textureMap, ID );
-				var material;
+				parameters.emissive = new THREE.Color().fromArray( materialNode.Emissive.value );
+
+			} else if ( materialNode.EmissiveColor && ( materialNode.EmissiveColor.type === 'Color' || materialNode.EmissiveColor.type === 'ColorRGB' ) ) {
+
+				// The blender exporter exports emissive color here instead of in materialNode.Emissive
+				parameters.emissive = new THREE.Color().fromArray( materialNode.EmissiveColor.value );
+
+			}
+
+			if ( materialNode.EmissiveFactor ) {
+
+				parameters.emissiveIntensity = parseFloat( materialNode.EmissiveFactor.value );
+
+			}
+
+			if ( materialNode.Opacity ) {
+
+				parameters.opacity = parseFloat( materialNode.Opacity.value );
+
+			}
+
+			if ( parameters.opacity < 1.0 ) {
+
+				parameters.transparent = true;
+
+			}
+
+			if ( materialNode.ReflectionFactor ) {
+
+				parameters.reflectivity = materialNode.ReflectionFactor.value;
+
+			}
+
+			if ( materialNode.Shininess ) {
+
+				parameters.shininess = materialNode.Shininess.value;
+
+			}
 
-				switch ( type.toLowerCase() ) {
+			if ( materialNode.Specular ) {
 
-					case 'phong':
-						material = new THREE.MeshPhongMaterial();
+				parameters.specular = new THREE.Color().fromArray( materialNode.Specular.value );
+
+			} else if ( materialNode.SpecularColor && materialNode.SpecularColor.type === 'Color' ) {
+
+				// The blender exporter exports specular color here instead of in materialNode.Specular
+				parameters.specular = new THREE.Color().fromArray( materialNode.SpecularColor.value );
+
+			}
+
+			const scope = this;
+			connections.get( ID ).children.forEach( function ( child ) {
+
+				const type = child.relationship;
+
+				switch ( type ) {
+
+					case 'Bump':
+						parameters.bumpMap = scope.getTexture( textureMap, child.ID );
+						break;
+
+					case 'Maya|TEX_ao_map':
+						parameters.aoMap = scope.getTexture( textureMap, child.ID );
+						break;
+
+					case 'DiffuseColor':
+					case 'Maya|TEX_color_map':
+						parameters.map = scope.getTexture( textureMap, child.ID );
+						parameters.map.encoding = THREE.sRGBEncoding;
+						break;
+
+					case 'DisplacementColor':
+						parameters.displacementMap = scope.getTexture( textureMap, child.ID );
+						break;
+
+					case 'EmissiveColor':
+						parameters.emissiveMap = scope.getTexture( textureMap, child.ID );
+						parameters.emissiveMap.encoding = THREE.sRGBEncoding;
+						break;
+
+					case 'NormalMap':
+					case 'Maya|TEX_normal_map':
+						parameters.normalMap = scope.getTexture( textureMap, child.ID );
+						break;
+
+					case 'ReflectionColor':
+						parameters.envMap = scope.getTexture( textureMap, child.ID );
+						parameters.envMap.mapping = THREE.EquirectangularReflectionMapping;
+						parameters.envMap.encoding = THREE.sRGBEncoding;
 						break;
 
-					case 'lambert':
-						material = new THREE.MeshLambertMaterial();
+					case 'SpecularColor':
+						parameters.specularMap = scope.getTexture( textureMap, child.ID );
+						parameters.specularMap.encoding = THREE.sRGBEncoding;
 						break;
 
+					case 'TransparentColor':
+					case 'TransparencyFactor':
+						parameters.alphaMap = scope.getTexture( textureMap, child.ID );
+						parameters.transparent = true;
+						break;
+
+					case 'AmbientColor':
+					case 'ShininessExponent': // AKA glossiness map
+
+					case 'SpecularFactor': // AKA specularLevel
+
+					case 'VectorDisplacementColor': // NOTE: Seems to be a copy of DisplacementColor
+
 					default:
-						console.warn( 'THREE.FBXLoader: unknown material type "%s". Defaulting to THREE.MeshPhongMaterial.', type );
-						material = new THREE.MeshPhongMaterial();
+						console.warn( 'THREE.FBXLoader: %s map is not supported in three.js, skipping texture.', type );
 						break;
 
 				}
 
-				material.setValues( parameters );
-				material.name = name;
-				return material;
+			} );
+			return parameters;
 
-			},
-			// Parse FBX material and return parameters suitable for a three.js material
-			// Also parse the texture map and return any textures associated with the material
-			parseParameters: function ( materialNode, textureMap, ID ) {
+		} // get a texture from the textureMap for use by a material.
 
-				var parameters = {};
 
-				if ( materialNode.BumpFactor ) {
+		getTexture( textureMap, id ) {
 
-					parameters.bumpScale = materialNode.BumpFactor.value;
+			// if the texture is a layered texture, just use the first layer and issue a warning
+			if ( 'LayeredTexture' in fbxTree.Objects && id in fbxTree.Objects.LayeredTexture ) {
 
-				}
+				console.warn( 'THREE.FBXLoader: layered textures are not supported in three.js. Discarding all but first layer.' );
+				id = connections.get( id ).children[ 0 ].ID;
 
-				if ( materialNode.Diffuse ) {
+			}
 
-					parameters.color = new THREE.Color().fromArray( materialNode.Diffuse.value );
+			return textureMap.get( id );
 
-				} else if ( materialNode.DiffuseColor && ( materialNode.DiffuseColor.type === 'Color' || materialNode.DiffuseColor.type === 'ColorRGB' ) ) {
+		} // Parse nodes in FBXTree.Objects.Deformer
+		// Deformer node can contain skinning or Vertex Cache animation data, however only skinning is supported here
+		// Generates map of THREE.Skeleton-like objects for use later when generating and binding skeletons.
 
-					// The blender exporter exports diffuse here instead of in materialNode.Diffuse
-					parameters.color = new THREE.Color().fromArray( materialNode.DiffuseColor.value );
 
-				}
+		parseDeformers() {
 
-				if ( materialNode.DisplacementFactor ) {
+			const skeletons = {};
+			const morphTargets = {};
 
-					parameters.displacementScale = materialNode.DisplacementFactor.value;
+			if ( 'Deformer' in fbxTree.Objects ) {
 
-				}
+				const DeformerNodes = fbxTree.Objects.Deformer;
 
-				if ( materialNode.Emissive ) {
+				for ( const nodeID in DeformerNodes ) {
 
-					parameters.emissive = new THREE.Color().fromArray( materialNode.Emissive.value );
+					const deformerNode = DeformerNodes[ nodeID ];
+					const relationships = connections.get( parseInt( nodeID ) );
 
-				} else if ( materialNode.EmissiveColor && ( materialNode.EmissiveColor.type === 'Color' || materialNode.EmissiveColor.type === 'ColorRGB' ) ) {
+					if ( deformerNode.attrType === 'Skin' ) {
 
-					// The blender exporter exports emissive color here instead of in materialNode.Emissive
-					parameters.emissive = new THREE.Color().fromArray( materialNode.EmissiveColor.value );
+						const skeleton = this.parseSkeleton( relationships, DeformerNodes );
+						skeleton.ID = nodeID;
+						if ( relationships.parents.length > 1 ) console.warn( 'THREE.FBXLoader: skeleton attached to more than one geometry is not supported.' );
+						skeleton.geometryID = relationships.parents[ 0 ].ID;
+						skeletons[ nodeID ] = skeleton;
 
-				}
+					} else if ( deformerNode.attrType === 'BlendShape' ) {
 
-				if ( materialNode.EmissiveFactor ) {
+						const morphTarget = {
+							id: nodeID
+						};
+						morphTarget.rawTargets = this.parseMorphTargets( relationships, DeformerNodes );
+						morphTarget.id = nodeID;
+						if ( relationships.parents.length > 1 ) console.warn( 'THREE.FBXLoader: morph target attached to more than one geometry is not supported.' );
+						morphTargets[ nodeID ] = morphTarget;
 
-					parameters.emissiveIntensity = parseFloat( materialNode.EmissiveFactor.value );
+					}
 
 				}
 
-				if ( materialNode.Opacity ) {
+			}
 
-					parameters.opacity = parseFloat( materialNode.Opacity.value );
+			return {
+				skeletons: skeletons,
+				morphTargets: morphTargets
+			};
 
-				}
+		} // Parse single nodes in FBXTree.Objects.Deformer
+		// The top level skeleton node has type 'Skin' and sub nodes have type 'Cluster'
+		// Each skin node represents a skeleton and each cluster node represents a bone
 
-				if ( parameters.opacity < 1.0 ) {
 
-					parameters.transparent = true;
+		parseSkeleton( relationships, deformerNodes ) {
 
-				}
+			const rawBones = [];
+			relationships.children.forEach( function ( child ) {
+
+				const boneNode = deformerNodes[ child.ID ];
+				if ( boneNode.attrType !== 'Cluster' ) return;
+				const rawBone = {
+					ID: child.ID,
+					indices: [],
+					weights: [],
+					transformLink: new THREE.Matrix4().fromArray( boneNode.TransformLink.a ) // transform: new THREE.Matrix4().fromArray( boneNode.Transform.a ),
+					// linkMode: boneNode.Mode,
+
+				};
 
-				if ( materialNode.ReflectionFactor ) {
+				if ( 'Indexes' in boneNode ) {
 
-					parameters.reflectivity = materialNode.ReflectionFactor.value;
+					rawBone.indices = boneNode.Indexes.a;
+					rawBone.weights = boneNode.Weights.a;
 
 				}
 
-				if ( materialNode.Shininess ) {
+				rawBones.push( rawBone );
+
+			} );
+			return {
+				rawBones: rawBones,
+				bones: []
+			};
+
+		} // The top level morph deformer node has type "BlendShape" and sub nodes have type "BlendShapeChannel"
+
+
+		parseMorphTargets( relationships, deformerNodes ) {
+
+			const rawMorphTargets = [];
+
+			for ( let i = 0; i < relationships.children.length; i ++ ) {
+
+				const child = relationships.children[ i ];
+				const morphTargetNode = deformerNodes[ child.ID ];
+				const rawMorphTarget = {
+					name: morphTargetNode.attrName,
+					initialWeight: morphTargetNode.DeformPercent,
+					id: morphTargetNode.id,
+					fullWeights: morphTargetNode.FullWeights.a
+				};
+				if ( morphTargetNode.attrType !== 'BlendShapeChannel' ) return;
+				rawMorphTarget.geoID = connections.get( parseInt( child.ID ) ).children.filter( function ( child ) {
+
+					return child.relationship === undefined;
+
+				} )[ 0 ].ID;
+				rawMorphTargets.push( rawMorphTarget );
+
+			}
+
+			return rawMorphTargets;
+
+		} // create the main THREE.Group() to be returned by the loader
 
-					parameters.shininess = materialNode.Shininess.value;
+
+		parseScene( deformers, geometryMap, materialMap ) {
+
+			sceneGraph = new THREE.Group();
+			const modelMap = this.parseModels( deformers.skeletons, geometryMap, materialMap );
+			const modelNodes = fbxTree.Objects.Model;
+			const scope = this;
+			modelMap.forEach( function ( model ) {
+
+				const modelNode = modelNodes[ model.ID ];
+				scope.setLookAtProperties( model, modelNode );
+				const parentConnections = connections.get( model.ID ).parents;
+				parentConnections.forEach( function ( connection ) {
+
+					const parent = modelMap.get( connection.ID );
+					if ( parent !== undefined ) parent.add( model );
+
+				} );
+
+				if ( model.parent === null ) {
+
+					sceneGraph.add( model );
 
 				}
 
-				if ( materialNode.Specular ) {
+			} );
+			this.bindSkeleton( deformers.skeletons, geometryMap, modelMap );
+			this.createAmbientLight();
+			this.setupMorphMaterials();
+			sceneGraph.traverse( function ( node ) {
+
+				if ( node.userData.transformData ) {
+
+					if ( node.parent ) {
 
-					parameters.specular = new THREE.Color().fromArray( materialNode.Specular.value );
+						node.userData.transformData.parentMatrix = node.parent.matrix;
+						node.userData.transformData.parentMatrixWorld = node.parent.matrixWorld;
 
-				} else if ( materialNode.SpecularColor && materialNode.SpecularColor.type === 'Color' ) {
+					}
 
-					// The blender exporter exports specular color here instead of in materialNode.Specular
-					parameters.specular = new THREE.Color().fromArray( materialNode.SpecularColor.value );
+					const transform = generateTransform( node.userData.transformData );
+					node.applyMatrix4( transform );
+					node.updateWorldMatrix();
 
 				}
 
-				var scope = this;
-				connections.get( ID ).children.forEach( function ( child ) {
+			} );
+			const animations = new AnimationParser().parse(); // if all the models where already combined in a single group, just return that
 
-					var type = child.relationship;
+			if ( sceneGraph.children.length === 1 && sceneGraph.children[ 0 ].isGroup ) {
 
-					switch ( type ) {
+				sceneGraph.children[ 0 ].animations = animations;
+				sceneGraph = sceneGraph.children[ 0 ];
 
-						case 'Bump':
-							parameters.bumpMap = scope.getTexture( textureMap, child.ID );
-							break;
+			}
 
-						case 'Maya|TEX_ao_map':
-							parameters.aoMap = scope.getTexture( textureMap, child.ID );
-							break;
+			sceneGraph.animations = animations;
 
-						case 'DiffuseColor':
-						case 'Maya|TEX_color_map':
-							parameters.map = scope.getTexture( textureMap, child.ID );
-							parameters.map.encoding = THREE.sRGBEncoding;
-							break;
+		} // parse nodes in FBXTree.Objects.Model
 
-						case 'DisplacementColor':
-							parameters.displacementMap = scope.getTexture( textureMap, child.ID );
+
+		parseModels( skeletons, geometryMap, materialMap ) {
+
+			const modelMap = new Map();
+			const modelNodes = fbxTree.Objects.Model;
+
+			for ( const nodeID in modelNodes ) {
+
+				const id = parseInt( nodeID );
+				const node = modelNodes[ nodeID ];
+				const relationships = connections.get( id );
+				let model = this.buildSkeleton( relationships, skeletons, id, node.attrName );
+
+				if ( ! model ) {
+
+					switch ( node.attrType ) {
+
+						case 'Camera':
+							model = this.createCamera( relationships );
 							break;
 
-						case 'EmissiveColor':
-							parameters.emissiveMap = scope.getTexture( textureMap, child.ID );
-							parameters.emissiveMap.encoding = THREE.sRGBEncoding;
+						case 'Light':
+							model = this.createLight( relationships );
 							break;
 
-						case 'NormalMap':
-						case 'Maya|TEX_normal_map':
-							parameters.normalMap = scope.getTexture( textureMap, child.ID );
+						case 'Mesh':
+							model = this.createMesh( relationships, geometryMap, materialMap );
 							break;
 
-						case 'ReflectionColor':
-							parameters.envMap = scope.getTexture( textureMap, child.ID );
-							parameters.envMap.mapping = THREE.EquirectangularReflectionMapping;
-							parameters.envMap.encoding = THREE.sRGBEncoding;
+						case 'NurbsCurve':
+							model = this.createCurve( relationships, geometryMap );
 							break;
 
-						case 'SpecularColor':
-							parameters.specularMap = scope.getTexture( textureMap, child.ID );
-							parameters.specularMap.encoding = THREE.sRGBEncoding;
+						case 'LimbNode':
+						case 'Root':
+							model = new THREE.Bone();
 							break;
 
-						case 'TransparentColor':
-						case 'TransparencyFactor':
-							parameters.alphaMap = scope.getTexture( textureMap, child.ID );
-							parameters.transparent = true;
+						case 'Null':
+						default:
+							model = new THREE.Group();
 							break;
 
-						case 'AmbientColor':
-						case 'ShininessExponent': // AKA glossiness map
+					}
 
-						case 'SpecularFactor': // AKA specularLevel
+					model.name = node.attrName ? THREE.PropertyBinding.sanitizeNodeName( node.attrName ) : '';
+					model.ID = id;
 
-						case 'VectorDisplacementColor': // NOTE: Seems to be a copy of DisplacementColor
+				}
 
-						default:
-							console.warn( 'THREE.FBXLoader: %s map is not supported in three.js, skipping texture.', type );
-							break;
+				this.getTransformData( model, node );
+				modelMap.set( id, model );
 
-					}
+			}
 
-				} );
-				return parameters;
+			return modelMap;
+
+		}
+
+		buildSkeleton( relationships, skeletons, id, name ) {
+
+			let bone = null;
+			relationships.parents.forEach( function ( parent ) {
+
+				for ( const ID in skeletons ) {
+
+					const skeleton = skeletons[ ID ];
+					skeleton.rawBones.forEach( function ( rawBone, i ) {
+
+						if ( rawBone.ID === parent.ID ) {
+
+							const subBone = bone;
+							bone = new THREE.Bone();
+							bone.matrixWorld.copy( rawBone.transformLink ); // set name and id here - otherwise in cases where "subBone" is created it will not have a name / id
 
-			},
-			// get a texture from the textureMap for use by a material.
-			getTexture: function ( textureMap, id ) {
+							bone.name = name ? THREE.PropertyBinding.sanitizeNodeName( name ) : '';
+							bone.ID = id;
+							skeleton.bones[ i ] = bone; // In cases where a bone is shared between multiple meshes
+							// duplicate the bone here and and it as a child of the first bone
 
-				// if the texture is a layered texture, just use the first layer and issue a warning
-				if ( 'LayeredTexture' in fbxTree.Objects && id in fbxTree.Objects.LayeredTexture ) {
+							if ( subBone !== null ) {
 
-					console.warn( 'THREE.FBXLoader: layered textures are not supported in three.js. Discarding all but first layer.' );
-					id = connections.get( id ).children[ 0 ].ID;
+								bone.add( subBone );
+
+							}
+
+						}
+
+					} );
 
 				}
 
-				return textureMap.get( id );
+			} );
+			return bone;
 
-			},
-			// Parse nodes in FBXTree.Objects.Deformer
-			// Deformer node can contain skinning or Vertex Cache animation data, however only skinning is supported here
-			// Generates map of THREE.Skeleton-like objects for use later when generating and binding skeletons.
-			parseDeformers: function () {
+		} // create a THREE.PerspectiveCamera or THREE.OrthographicCamera
 
-				var skeletons = {};
-				var morphTargets = {};
 
-				if ( 'Deformer' in fbxTree.Objects ) {
+		createCamera( relationships ) {
 
-					var DeformerNodes = fbxTree.Objects.Deformer;
+			let model;
+			let cameraAttribute;
+			relationships.children.forEach( function ( child ) {
 
-					for ( var nodeID in DeformerNodes ) {
+				const attr = fbxTree.Objects.NodeAttribute[ child.ID ];
 
-						var deformerNode = DeformerNodes[ nodeID ];
-						var relationships = connections.get( parseInt( nodeID ) );
+				if ( attr !== undefined ) {
 
-						if ( deformerNode.attrType === 'Skin' ) {
+					cameraAttribute = attr;
 
-							var skeleton = this.parseSkeleton( relationships, DeformerNodes );
-							skeleton.ID = nodeID;
-							if ( relationships.parents.length > 1 ) console.warn( 'THREE.FBXLoader: skeleton attached to more than one geometry is not supported.' );
-							skeleton.geometryID = relationships.parents[ 0 ].ID;
-							skeletons[ nodeID ] = skeleton;
+				}
 
-						} else if ( deformerNode.attrType === 'BlendShape' ) {
+			} );
 
-							var morphTarget = {
-								id: nodeID
-							};
-							morphTarget.rawTargets = this.parseMorphTargets( relationships, DeformerNodes );
-							morphTarget.id = nodeID;
-							if ( relationships.parents.length > 1 ) console.warn( 'THREE.FBXLoader: morph target attached to more than one geometry is not supported.' );
-							morphTargets[ nodeID ] = morphTarget;
+			if ( cameraAttribute === undefined ) {
 
-						}
+				model = new THREE.Object3D();
 
-					}
+			} else {
+
+				let type = 0;
+
+				if ( cameraAttribute.CameraProjectionType !== undefined && cameraAttribute.CameraProjectionType.value === 1 ) {
+
+					type = 1;
 
 				}
 
-				return {
-					skeletons: skeletons,
-					morphTargets: morphTargets
-				};
+				let nearClippingPlane = 1;
 
-			},
-			// Parse single nodes in FBXTree.Objects.Deformer
-			// The top level skeleton node has type 'Skin' and sub nodes have type 'Cluster'
-			// Each skin node represents a skeleton and each cluster node represents a bone
-			parseSkeleton: function ( relationships, deformerNodes ) {
+				if ( cameraAttribute.NearPlane !== undefined ) {
 
-				var rawBones = [];
-				relationships.children.forEach( function ( child ) {
+					nearClippingPlane = cameraAttribute.NearPlane.value / 1000;
 
-					var boneNode = deformerNodes[ child.ID ];
-					if ( boneNode.attrType !== 'Cluster' ) return;
-					var rawBone = {
-						ID: child.ID,
-						indices: [],
-						weights: [],
-						transformLink: new THREE.Matrix4().fromArray( boneNode.TransformLink.a ) // transform: new THREE.Matrix4().fromArray( boneNode.Transform.a ),
-						// linkMode: boneNode.Mode,
+				}
 
-					};
+				let farClippingPlane = 1000;
 
-					if ( 'Indexes' in boneNode ) {
+				if ( cameraAttribute.FarPlane !== undefined ) {
 
-						rawBone.indices = boneNode.Indexes.a;
-						rawBone.weights = boneNode.Weights.a;
+					farClippingPlane = cameraAttribute.FarPlane.value / 1000;
 
-					}
+				}
 
-					rawBones.push( rawBone );
+				let width = window.innerWidth;
+				let height = window.innerHeight;
 
-				} );
-				return {
-					rawBones: rawBones,
-					bones: []
-				};
+				if ( cameraAttribute.AspectWidth !== undefined && cameraAttribute.AspectHeight !== undefined ) {
 
-			},
-			// The top level morph deformer node has type "BlendShape" and sub nodes have type "BlendShapeChannel"
-			parseMorphTargets: function ( relationships, deformerNodes ) {
+					width = cameraAttribute.AspectWidth.value;
+					height = cameraAttribute.AspectHeight.value;
 
-				var rawMorphTargets = [];
+				}
 
-				for ( var i = 0; i < relationships.children.length; i ++ ) {
+				const aspect = width / height;
+				let fov = 45;
 
-					var child = relationships.children[ i ];
-					var morphTargetNode = deformerNodes[ child.ID ];
-					var rawMorphTarget = {
-						name: morphTargetNode.attrName,
-						initialWeight: morphTargetNode.DeformPercent,
-						id: morphTargetNode.id,
-						fullWeights: morphTargetNode.FullWeights.a
-					};
-					if ( morphTargetNode.attrType !== 'BlendShapeChannel' ) return;
-					rawMorphTarget.geoID = connections.get( parseInt( child.ID ) ).children.filter( function ( child ) {
+				if ( cameraAttribute.FieldOfView !== undefined ) {
+
+					fov = cameraAttribute.FieldOfView.value;
+
+				}
+
+				const focalLength = cameraAttribute.FocalLength ? cameraAttribute.FocalLength.value : null;
 
-						return child.relationship === undefined;
+				switch ( type ) {
+
+					case 0:
+					// Perspective
+						model = new THREE.PerspectiveCamera( fov, aspect, nearClippingPlane, farClippingPlane );
+						if ( focalLength !== null ) model.setFocalLength( focalLength );
+						break;
 
-					} )[ 0 ].ID;
-					rawMorphTargets.push( rawMorphTarget );
+					case 1:
+					// Orthographic
+						model = new THREE.OrthographicCamera( - width / 2, width / 2, height / 2, - height / 2, nearClippingPlane, farClippingPlane );
+						break;
+
+					default:
+						console.warn( 'THREE.FBXLoader: Unknown camera type ' + type + '.' );
+						model = new THREE.Object3D();
+						break;
 
 				}
 
-				return rawMorphTargets;
+			}
 
-			},
-			// create the main THREE.Group() to be returned by the loader
-			parseScene: function ( deformers, geometryMap, materialMap ) {
+			return model;
 
-				sceneGraph = new THREE.Group();
-				var modelMap = this.parseModels( deformers.skeletons, geometryMap, materialMap );
-				var modelNodes = fbxTree.Objects.Model;
-				var scope = this;
-				modelMap.forEach( function ( model ) {
+		} // Create a THREE.DirectionalLight, THREE.PointLight or THREE.SpotLight
 
-					var modelNode = modelNodes[ model.ID ];
-					scope.setLookAtProperties( model, modelNode );
-					var parentConnections = connections.get( model.ID ).parents;
-					parentConnections.forEach( function ( connection ) {
 
-						var parent = modelMap.get( connection.ID );
-						if ( parent !== undefined ) parent.add( model );
+		createLight( relationships ) {
 
-					} );
+			let model;
+			let lightAttribute;
+			relationships.children.forEach( function ( child ) {
 
-					if ( model.parent === null ) {
+				const attr = fbxTree.Objects.NodeAttribute[ child.ID ];
 
-						sceneGraph.add( model );
+				if ( attr !== undefined ) {
 
-					}
+					lightAttribute = attr;
 
-				} );
-				this.bindSkeleton( deformers.skeletons, geometryMap, modelMap );
-				this.createAmbientLight();
-				this.setupMorphMaterials();
-				sceneGraph.traverse( function ( node ) {
+				}
 
-					if ( node.userData.transformData ) {
+			} );
 
-						if ( node.parent ) {
+			if ( lightAttribute === undefined ) {
 
-							node.userData.transformData.parentMatrix = node.parent.matrix;
-							node.userData.transformData.parentMatrixWorld = node.parent.matrixWorld;
+				model = new THREE.Object3D();
 
-						}
+			} else {
 
-						var transform = generateTransform( node.userData.transformData );
-						node.applyMatrix4( transform );
-						node.updateWorldMatrix();
+				let type; // LightType can be undefined for Point lights
 
-					}
+				if ( lightAttribute.LightType === undefined ) {
 
-				} );
-				var animations = new AnimationParser().parse(); // if all the models where already combined in a single group, just return that
+					type = 0;
+
+				} else {
+
+					type = lightAttribute.LightType.value;
+
+				}
 
-				if ( sceneGraph.children.length === 1 && sceneGraph.children[ 0 ].isGroup ) {
+				let color = 0xffffff;
 
-					sceneGraph.children[ 0 ].animations = animations;
-					sceneGraph = sceneGraph.children[ 0 ];
+				if ( lightAttribute.Color !== undefined ) {
+
+					color = new THREE.Color().fromArray( lightAttribute.Color.value );
 
 				}
 
-				sceneGraph.animations = animations;
+				let intensity = lightAttribute.Intensity === undefined ? 1 : lightAttribute.Intensity.value / 100; // light disabled
 
-			},
-			// parse nodes in FBXTree.Objects.Model
-			parseModels: function ( skeletons, geometryMap, materialMap ) {
+				if ( lightAttribute.CastLightOnObject !== undefined && lightAttribute.CastLightOnObject.value === 0 ) {
 
-				var modelMap = new Map();
-				var modelNodes = fbxTree.Objects.Model;
+					intensity = 0;
 
-				for ( var nodeID in modelNodes ) {
+				}
 
-					var id = parseInt( nodeID );
-					var node = modelNodes[ nodeID ];
-					var relationships = connections.get( id );
-					var model = this.buildSkeleton( relationships, skeletons, id, node.attrName );
+				let distance = 0;
 
-					if ( ! model ) {
+				if ( lightAttribute.FarAttenuationEnd !== undefined ) {
 
-						switch ( node.attrType ) {
+					if ( lightAttribute.EnableFarAttenuation !== undefined && lightAttribute.EnableFarAttenuation.value === 0 ) {
 
-							case 'Camera':
-								model = this.createCamera( relationships );
-								break;
+						distance = 0;
 
-							case 'Light':
-								model = this.createLight( relationships );
-								break;
+					} else {
 
-							case 'Mesh':
-								model = this.createMesh( relationships, geometryMap, materialMap );
-								break;
+						distance = lightAttribute.FarAttenuationEnd.value;
+
+					}
+
+				} // TODO: could this be calculated linearly from FarAttenuationStart to FarAttenuationEnd?
 
-							case 'NurbsCurve':
-								model = this.createCurve( relationships, geometryMap );
-								break;
 
-							case 'LimbNode':
-							case 'Root':
-								model = new THREE.Bone();
-								break;
+				const decay = 1;
 
-							case 'Null':
-							default:
-								model = new THREE.Group();
-								break;
+				switch ( type ) {
+
+					case 0:
+					// Point
+						model = new THREE.PointLight( color, intensity, distance, decay );
+						break;
+
+					case 1:
+					// Directional
+						model = new THREE.DirectionalLight( color, intensity );
+						break;
+
+					case 2:
+					// Spot
+						let angle = Math.PI / 3;
+
+						if ( lightAttribute.InnerAngle !== undefined ) {
+
+							angle = THREE.MathUtils.degToRad( lightAttribute.InnerAngle.value );
 
 						}
 
-						model.name = node.attrName ? THREE.PropertyBinding.sanitizeNodeName( node.attrName ) : '';
-						model.ID = id;
+						let penumbra = 0;
 
-					}
+						if ( lightAttribute.OuterAngle !== undefined ) {
 
-					this.getTransformData( model, node );
-					modelMap.set( id, model );
+							// TODO: this is not correct - FBX calculates outer and inner angle in degrees
+							// with OuterAngle > InnerAngle && OuterAngle <= Math.PI
+							// while three.js uses a penumbra between (0, 1) to attenuate the inner angle
+							penumbra = THREE.MathUtils.degToRad( lightAttribute.OuterAngle.value );
+							penumbra = Math.max( penumbra, 1 );
+
+						}
+
+						model = new THREE.SpotLight( color, intensity, distance, angle, penumbra, decay );
+						break;
+
+					default:
+						console.warn( 'THREE.FBXLoader: Unknown light type ' + lightAttribute.LightType.value + ', defaulting to a THREE.PointLight.' );
+						model = new THREE.PointLight( color, intensity );
+						break;
 
 				}
 
-				return modelMap;
+				if ( lightAttribute.CastShadows !== undefined && lightAttribute.CastShadows.value === 1 ) {
 
-			},
-			buildSkeleton: function ( relationships, skeletons, id, name ) {
+					model.castShadow = true;
 
-				var bone = null;
-				relationships.parents.forEach( function ( parent ) {
+				}
 
-					for ( var ID in skeletons ) {
+			}
 
-						var skeleton = skeletons[ ID ];
-						skeleton.rawBones.forEach( function ( rawBone, i ) {
+			return model;
 
-							if ( rawBone.ID === parent.ID ) {
+		}
 
-								var subBone = bone;
-								bone = new THREE.Bone();
-								bone.matrixWorld.copy( rawBone.transformLink ); // set name and id here - otherwise in cases where "subBone" is created it will not have a name / id
+		createMesh( relationships, geometryMap, materialMap ) {
 
-								bone.name = name ? THREE.PropertyBinding.sanitizeNodeName( name ) : '';
-								bone.ID = id;
-								skeleton.bones[ i ] = bone; // In cases where a bone is shared between multiple meshes
-								// duplicate the bone here and and it as a child of the first bone
+			let model;
+			let geometry = null;
+			let material = null;
+			const materials = []; // get geometry and materials(s) from connections
 
-								if ( subBone !== null ) {
+			relationships.children.forEach( function ( child ) {
 
-									bone.add( subBone );
+				if ( geometryMap.has( child.ID ) ) {
 
-								}
+					geometry = geometryMap.get( child.ID );
 
-							}
+				}
 
-						} );
+				if ( materialMap.has( child.ID ) ) {
 
-					}
+					materials.push( materialMap.get( child.ID ) );
 
-				} );
-				return bone;
+				}
 
-			},
-			// create a THREE.PerspectiveCamera or THREE.OrthographicCamera
-			createCamera: function ( relationships ) {
+			} );
 
-				var model;
-				var cameraAttribute;
-				relationships.children.forEach( function ( child ) {
+			if ( materials.length > 1 ) {
 
-					var attr = fbxTree.Objects.NodeAttribute[ child.ID ];
+				material = materials;
 
-					if ( attr !== undefined ) {
+			} else if ( materials.length > 0 ) {
 
-						cameraAttribute = attr;
+				material = materials[ 0 ];
 
-					}
+			} else {
 
+				material = new THREE.MeshPhongMaterial( {
+					color: 0xcccccc
 				} );
+				materials.push( material );
 
-				if ( cameraAttribute === undefined ) {
+			}
 
-					model = new THREE.Object3D();
+			if ( 'color' in geometry.attributes ) {
 
-				} else {
+				materials.forEach( function ( material ) {
 
-					var type = 0;
+					material.vertexColors = true;
 
-					if ( cameraAttribute.CameraProjectionType !== undefined && cameraAttribute.CameraProjectionType.value === 1 ) {
+				} );
 
-						type = 1;
+			}
 
-					}
+			if ( geometry.FBX_Deformer ) {
 
-					var nearClippingPlane = 1;
+				materials.forEach( function ( material ) {
 
-					if ( cameraAttribute.NearPlane !== undefined ) {
+					material.skinning = true;
 
-						nearClippingPlane = cameraAttribute.NearPlane.value / 1000;
+				} );
+				model = new THREE.SkinnedMesh( geometry, material );
+				model.normalizeSkinWeights();
 
-					}
+			} else {
 
-					var farClippingPlane = 1000;
+				model = new THREE.Mesh( geometry, material );
 
-					if ( cameraAttribute.FarPlane !== undefined ) {
+			}
 
-						farClippingPlane = cameraAttribute.FarPlane.value / 1000;
+			return model;
 
-					}
+		}
 
-					var width = window.innerWidth;
-					var height = window.innerHeight;
+		createCurve( relationships, geometryMap ) {
 
-					if ( cameraAttribute.AspectWidth !== undefined && cameraAttribute.AspectHeight !== undefined ) {
+			const geometry = relationships.children.reduce( function ( geo, child ) {
 
-						width = cameraAttribute.AspectWidth.value;
-						height = cameraAttribute.AspectHeight.value;
+				if ( geometryMap.has( child.ID ) ) geo = geometryMap.get( child.ID );
+				return geo;
 
-					}
+			}, null ); // FBX does not list materials for Nurbs lines, so we'll just put our own in here.
 
-					var aspect = width / height;
-					var fov = 45;
+			const material = new THREE.LineBasicMaterial( {
+				color: 0x3300ff,
+				linewidth: 1
+			} );
+			return new THREE.Line( geometry, material );
 
-					if ( cameraAttribute.FieldOfView !== undefined ) {
+		} // parse the model node for transform data
 
-						fov = cameraAttribute.FieldOfView.value;
 
-					}
+		getTransformData( model, modelNode ) {
 
-					var focalLength = cameraAttribute.FocalLength ? cameraAttribute.FocalLength.value : null;
+			const transformData = {};
+			if ( 'InheritType' in modelNode ) transformData.inheritType = parseInt( modelNode.InheritType.value );
+			if ( 'RotationOrder' in modelNode ) transformData.eulerOrder = getEulerOrder( modelNode.RotationOrder.value ); else transformData.eulerOrder = 'ZYX';
+			if ( 'Lcl_Translation' in modelNode ) transformData.translation = modelNode.Lcl_Translation.value;
+			if ( 'PreRotation' in modelNode ) transformData.preRotation = modelNode.PreRotation.value;
+			if ( 'Lcl_Rotation' in modelNode ) transformData.rotation = modelNode.Lcl_Rotation.value;
+			if ( 'PostRotation' in modelNode ) transformData.postRotation = modelNode.PostRotation.value;
+			if ( 'Lcl_Scaling' in modelNode ) transformData.scale = modelNode.Lcl_Scaling.value;
+			if ( 'ScalingOffset' in modelNode ) transformData.scalingOffset = modelNode.ScalingOffset.value;
+			if ( 'ScalingPivot' in modelNode ) transformData.scalingPivot = modelNode.ScalingPivot.value;
+			if ( 'RotationOffset' in modelNode ) transformData.rotationOffset = modelNode.RotationOffset.value;
+			if ( 'RotationPivot' in modelNode ) transformData.rotationPivot = modelNode.RotationPivot.value;
+			model.userData.transformData = transformData;
 
-					switch ( type ) {
+		}
 
-						case 0:
-						// Perspective
-							model = new THREE.PerspectiveCamera( fov, aspect, nearClippingPlane, farClippingPlane );
-							if ( focalLength !== null ) model.setFocalLength( focalLength );
-							break;
+		setLookAtProperties( model, modelNode ) {
 
-						case 1:
-						// Orthographic
-							model = new THREE.OrthographicCamera( - width / 2, width / 2, height / 2, - height / 2, nearClippingPlane, farClippingPlane );
-							break;
+			if ( 'LookAtProperty' in modelNode ) {
 
-						default:
-							console.warn( 'THREE.FBXLoader: Unknown camera type ' + type + '.' );
-							model = new THREE.Object3D();
-							break;
+				const children = connections.get( model.ID ).children;
+				children.forEach( function ( child ) {
 
-					}
+					if ( child.relationship === 'LookAtProperty' ) {
 
-				}
+						const lookAtTarget = fbxTree.Objects.Model[ child.ID ];
+
+						if ( 'Lcl_Translation' in lookAtTarget ) {
+
+							const pos = lookAtTarget.Lcl_Translation.value; // THREE.DirectionalLight, THREE.SpotLight
 
-				return model;
+							if ( model.target !== undefined ) {
 
-			},
-			// Create a THREE.DirectionalLight, THREE.PointLight or THREE.SpotLight
-			createLight: function ( relationships ) {
+								model.target.position.fromArray( pos );
+								sceneGraph.add( model.target );
 
-				var model;
-				var lightAttribute;
-				relationships.children.forEach( function ( child ) {
+							} else {
 
-					var attr = fbxTree.Objects.NodeAttribute[ child.ID ];
+								// Cameras and other Object3Ds
+								model.lookAt( new THREE.Vector3().fromArray( pos ) );
 
-					if ( attr !== undefined ) {
+							}
 
-						lightAttribute = attr;
+						}
 
 					}
 
 				} );
 
-				if ( lightAttribute === undefined ) {
+			}
 
-					model = new THREE.Object3D();
+		}
 
-				} else {
+		bindSkeleton( skeletons, geometryMap, modelMap ) {
 
-					var type; // LightType can be undefined for Point lights
+			const bindMatrices = this.parsePoseNodes();
 
-					if ( lightAttribute.LightType === undefined ) {
+			for ( const ID in skeletons ) {
 
-						type = 0;
+				const skeleton = skeletons[ ID ];
+				const parents = connections.get( parseInt( skeleton.ID ) ).parents;
+				parents.forEach( function ( parent ) {
 
-					} else {
+					if ( geometryMap.has( parent.ID ) ) {
 
-						type = lightAttribute.LightType.value;
+						const geoID = parent.ID;
+						const geoRelationships = connections.get( geoID );
+						geoRelationships.parents.forEach( function ( geoConnParent ) {
 
-					}
+							if ( modelMap.has( geoConnParent.ID ) ) {
 
-					var color = 0xffffff;
+								const model = modelMap.get( geoConnParent.ID );
+								model.bind( new THREE.Skeleton( skeleton.bones ), bindMatrices[ geoConnParent.ID ] );
 
-					if ( lightAttribute.Color !== undefined ) {
+							}
 
-						color = new THREE.Color().fromArray( lightAttribute.Color.value );
+						} );
 
 					}
 
-					var intensity = lightAttribute.Intensity === undefined ? 1 : lightAttribute.Intensity.value / 100; // light disabled
+				} );
 
-					if ( lightAttribute.CastLightOnObject !== undefined && lightAttribute.CastLightOnObject.value === 0 ) {
+			}
 
-						intensity = 0;
+		}
 
-					}
+		parsePoseNodes() {
 
-					var distance = 0;
+			const bindMatrices = {};
 
-					if ( lightAttribute.FarAttenuationEnd !== undefined ) {
+			if ( 'Pose' in fbxTree.Objects ) {
 
-						if ( lightAttribute.EnableFarAttenuation !== undefined && lightAttribute.EnableFarAttenuation.value === 0 ) {
+				const BindPoseNode = fbxTree.Objects.Pose;
 
-							distance = 0;
+				for ( const nodeID in BindPoseNode ) {
 
-						} else {
+					if ( BindPoseNode[ nodeID ].attrType === 'BindPose' ) {
 
-							distance = lightAttribute.FarAttenuationEnd.value;
+						const poseNodes = BindPoseNode[ nodeID ].PoseNode;
 
-						}
+						if ( Array.isArray( poseNodes ) ) {
 
-					} // TODO: could this be calculated linearly from FarAttenuationStart to FarAttenuationEnd?
+							poseNodes.forEach( function ( poseNode ) {
 
+								bindMatrices[ poseNode.Node ] = new THREE.Matrix4().fromArray( poseNode.Matrix.a );
 
-					var decay = 1;
+							} );
 
-					switch ( type ) {
+						} else {
 
-						case 0:
-						// Point
-							model = new THREE.PointLight( color, intensity, distance, decay );
-							break;
+							bindMatrices[ poseNodes.Node ] = new THREE.Matrix4().fromArray( poseNodes.Matrix.a );
 
-						case 1:
-						// Directional
-							model = new THREE.DirectionalLight( color, intensity );
-							break;
+						}
 
-						case 2:
-						// Spot
-							var angle = Math.PI / 3;
+					}
 
-							if ( lightAttribute.InnerAngle !== undefined ) {
+				}
 
-								angle = THREE.MathUtils.degToRad( lightAttribute.InnerAngle.value );
+			}
 
-							}
+			return bindMatrices;
 
-							var penumbra = 0;
+		} // Parse ambient color in FBXTree.GlobalSettings - if it's not set to black (default), create an ambient light
 
-							if ( lightAttribute.OuterAngle !== undefined ) {
 
-								// TODO: this is not correct - FBX calculates outer and inner angle in degrees
-								// with OuterAngle > InnerAngle && OuterAngle <= Math.PI
-								// while three.js uses a penumbra between (0, 1) to attenuate the inner angle
-								penumbra = THREE.MathUtils.degToRad( lightAttribute.OuterAngle.value );
-								penumbra = Math.max( penumbra, 1 );
+		createAmbientLight() {
 
-							}
+			if ( 'GlobalSettings' in fbxTree && 'AmbientColor' in fbxTree.GlobalSettings ) {
 
-							model = new THREE.SpotLight( color, intensity, distance, angle, penumbra, decay );
-							break;
+				const ambientColor = fbxTree.GlobalSettings.AmbientColor.value;
+				const r = ambientColor[ 0 ];
+				const g = ambientColor[ 1 ];
+				const b = ambientColor[ 2 ];
 
-						default:
-							console.warn( 'THREE.FBXLoader: Unknown light type ' + lightAttribute.LightType.value + ', defaulting to a THREE.PointLight.' );
-							model = new THREE.PointLight( color, intensity );
-							break;
+				if ( r !== 0 || g !== 0 || b !== 0 ) {
 
-					}
+					const color = new THREE.Color( r, g, b );
+					sceneGraph.add( new THREE.AmbientLight( color, 1 ) );
+
+				}
 
-					if ( lightAttribute.CastShadows !== undefined && lightAttribute.CastShadows.value === 1 ) {
+			}
 
-						model.castShadow = true;
+		}
 
-					}
+		setupMorphMaterials() {
 
-				}
+			const scope = this;
+			sceneGraph.traverse( function ( child ) {
 
-				return model;
+				if ( child.isMesh ) {
 
-			},
-			createMesh: function ( relationships, geometryMap, materialMap ) {
+					if ( child.geometry.morphAttributes.position && child.geometry.morphAttributes.position.length ) {
 
-				var model;
-				var geometry = null;
-				var material = null;
-				var materials = []; // get geometry and materials(s) from connections
+						if ( Array.isArray( child.material ) ) {
 
-				relationships.children.forEach( function ( child ) {
+							child.material.forEach( function ( material, i ) {
 
-					if ( geometryMap.has( child.ID ) ) {
+								scope.setupMorphMaterial( child, material, i );
 
-						geometry = geometryMap.get( child.ID );
+							} );
 
-					}
+						} else {
 
-					if ( materialMap.has( child.ID ) ) {
+							scope.setupMorphMaterial( child, child.material );
 
-						materials.push( materialMap.get( child.ID ) );
+						}
 
 					}
 
-				} );
+				}
 
-				if ( materials.length > 1 ) {
+			} );
 
-					material = materials;
+		}
 
-				} else if ( materials.length > 0 ) {
+		setupMorphMaterial( child, material, index ) {
 
-					material = materials[ 0 ];
+			const uuid = child.uuid;
+			const matUuid = material.uuid; // if a geometry has morph targets, it cannot share the material with other geometries
 
-				} else {
+			let sharedMat = false;
+			sceneGraph.traverse( function ( node ) {
 
-					material = new THREE.MeshPhongMaterial( {
-						color: 0xcccccc
-					} );
-					materials.push( material );
+				if ( node.isMesh ) {
 
-				}
+					if ( Array.isArray( node.material ) ) {
 
-				if ( 'color' in geometry.attributes ) {
+						node.material.forEach( function ( mat ) {
 
-					materials.forEach( function ( material ) {
+							if ( mat.uuid === matUuid && node.uuid !== uuid ) sharedMat = true;
 
-						material.vertexColors = true;
+						} );
 
-					} );
+					} else if ( node.material.uuid === matUuid && node.uuid !== uuid ) sharedMat = true;
 
 				}
 
-				if ( geometry.FBX_Deformer ) {
+			} );
 
-					materials.forEach( function ( material ) {
+			if ( sharedMat === true ) {
 
-						material.skinning = true;
+				const clonedMat = material.clone();
+				clonedMat.morphTargets = true;
+				if ( index === undefined ) child.material = clonedMat; else child.material[ index ] = clonedMat;
 
-					} );
-					model = new THREE.SkinnedMesh( geometry, material );
-					model.normalizeSkinWeights();
+			} else material.morphTargets = true;
 
-				} else {
+		}
 
-					model = new THREE.Mesh( geometry, material );
+	} // parse Geometry data from FBXTree and return map of BufferGeometries
 
-				}
 
-				return model;
+	class GeometryParser {
 
-			},
-			createCurve: function ( relationships, geometryMap ) {
+		// Parse nodes in FBXTree.Objects.Geometry
+		parse( deformers ) {
 
-				var geometry = relationships.children.reduce( function ( geo, child ) {
+			const geometryMap = new Map();
 
-					if ( geometryMap.has( child.ID ) ) geo = geometryMap.get( child.ID );
-					return geo;
+			if ( 'Geometry' in fbxTree.Objects ) {
 
-				}, null ); // FBX does not list materials for Nurbs lines, so we'll just put our own in here.
+				const geoNodes = fbxTree.Objects.Geometry;
 
-				var material = new THREE.LineBasicMaterial( {
-					color: 0x3300ff,
-					linewidth: 1
-				} );
-				return new THREE.Line( geometry, material );
+				for ( const nodeID in geoNodes ) {
 
-			},
-			// parse the model node for transform data
-			getTransformData: function ( model, modelNode ) {
+					const relationships = connections.get( parseInt( nodeID ) );
+					const geo = this.parseGeometry( relationships, geoNodes[ nodeID ], deformers );
+					geometryMap.set( parseInt( nodeID ), geo );
 
-				var transformData = {};
-				if ( 'InheritType' in modelNode ) transformData.inheritType = parseInt( modelNode.InheritType.value );
-				if ( 'RotationOrder' in modelNode ) transformData.eulerOrder = getEulerOrder( modelNode.RotationOrder.value ); else transformData.eulerOrder = 'ZYX';
-				if ( 'Lcl_Translation' in modelNode ) transformData.translation = modelNode.Lcl_Translation.value;
-				if ( 'PreRotation' in modelNode ) transformData.preRotation = modelNode.PreRotation.value;
-				if ( 'Lcl_Rotation' in modelNode ) transformData.rotation = modelNode.Lcl_Rotation.value;
-				if ( 'PostRotation' in modelNode ) transformData.postRotation = modelNode.PostRotation.value;
-				if ( 'Lcl_Scaling' in modelNode ) transformData.scale = modelNode.Lcl_Scaling.value;
-				if ( 'ScalingOffset' in modelNode ) transformData.scalingOffset = modelNode.ScalingOffset.value;
-				if ( 'ScalingPivot' in modelNode ) transformData.scalingPivot = modelNode.ScalingPivot.value;
-				if ( 'RotationOffset' in modelNode ) transformData.rotationOffset = modelNode.RotationOffset.value;
-				if ( 'RotationPivot' in modelNode ) transformData.rotationPivot = modelNode.RotationPivot.value;
-				model.userData.transformData = transformData;
+				}
 
-			},
-			setLookAtProperties: function ( model, modelNode ) {
+			}
 
-				if ( 'LookAtProperty' in modelNode ) {
+			return geometryMap;
 
-					var children = connections.get( model.ID ).children;
-					children.forEach( function ( child ) {
+		} // Parse single node in FBXTree.Objects.Geometry
 
-						if ( child.relationship === 'LookAtProperty' ) {
 
-							var lookAtTarget = fbxTree.Objects.Model[ child.ID ];
+		parseGeometry( relationships, geoNode, deformers ) {
 
-							if ( 'Lcl_Translation' in lookAtTarget ) {
+			switch ( geoNode.attrType ) {
 
-								var pos = lookAtTarget.Lcl_Translation.value; // THREE.DirectionalLight, THREE.SpotLight
+				case 'Mesh':
+					return this.parseMeshGeometry( relationships, geoNode, deformers );
+					break;
 
-								if ( model.target !== undefined ) {
+				case 'NurbsCurve':
+					return this.parseNurbsGeometry( geoNode );
+					break;
 
-									model.target.position.fromArray( pos );
-									sceneGraph.add( model.target );
+			}
 
-								} else {
+		} // Parse single node mesh geometry in FBXTree.Objects.Geometry
 
-									// Cameras and other Object3Ds
-									model.lookAt( new THREE.Vector3().fromArray( pos ) );
 
-								}
+		parseMeshGeometry( relationships, geoNode, deformers ) {
 
-							}
+			const skeletons = deformers.skeletons;
+			const morphTargets = [];
+			const modelNodes = relationships.parents.map( function ( parent ) {
 
-						}
+				return fbxTree.Objects.Model[ parent.ID ];
 
-					} );
+			} ); // don't create geometry if it is not associated with any models
 
-				}
+			if ( modelNodes.length === 0 ) return;
+			const skeleton = relationships.children.reduce( function ( skeleton, child ) {
 
-			},
-			bindSkeleton: function ( skeletons, geometryMap, modelMap ) {
+				if ( skeletons[ child.ID ] !== undefined ) skeleton = skeletons[ child.ID ];
+				return skeleton;
 
-				var bindMatrices = this.parsePoseNodes();
+			}, null );
+			relationships.children.forEach( function ( child ) {
 
-				for ( var ID in skeletons ) {
+				if ( deformers.morphTargets[ child.ID ] !== undefined ) {
 
-					var skeleton = skeletons[ ID ];
-					var parents = connections.get( parseInt( skeleton.ID ) ).parents;
-					parents.forEach( function ( parent ) {
+					morphTargets.push( deformers.morphTargets[ child.ID ] );
 
-						if ( geometryMap.has( parent.ID ) ) {
+				}
 
-							var geoID = parent.ID;
-							var geoRelationships = connections.get( geoID );
-							geoRelationships.parents.forEach( function ( geoConnParent ) {
+			} ); // Assume one model and get the preRotation from that
+			// if there is more than one model associated with the geometry this may cause problems
 
-								if ( modelMap.has( geoConnParent.ID ) ) {
+			const modelNode = modelNodes[ 0 ];
+			const transformData = {};
+			if ( 'RotationOrder' in modelNode ) transformData.eulerOrder = getEulerOrder( modelNode.RotationOrder.value );
+			if ( 'InheritType' in modelNode ) transformData.inheritType = parseInt( modelNode.InheritType.value );
+			if ( 'GeometricTranslation' in modelNode ) transformData.translation = modelNode.GeometricTranslation.value;
+			if ( 'GeometricRotation' in modelNode ) transformData.rotation = modelNode.GeometricRotation.value;
+			if ( 'GeometricScaling' in modelNode ) transformData.scale = modelNode.GeometricScaling.value;
+			const transform = generateTransform( transformData );
+			return this.genGeometry( geoNode, skeleton, morphTargets, transform );
 
-									var model = modelMap.get( geoConnParent.ID );
-									model.bind( new THREE.Skeleton( skeleton.bones ), bindMatrices[ geoConnParent.ID ] );
+		} // Generate a THREE.BufferGeometry from a node in FBXTree.Objects.Geometry
 
-								}
 
-							} );
+		genGeometry( geoNode, skeleton, morphTargets, preTransform ) {
 
-						}
+			const geo = new THREE.BufferGeometry();
+			if ( geoNode.attrName ) geo.name = geoNode.attrName;
+			const geoInfo = this.parseGeoNode( geoNode, skeleton );
+			const buffers = this.genBuffers( geoInfo );
+			const positionAttribute = new THREE.Float32BufferAttribute( buffers.vertex, 3 );
+			positionAttribute.applyMatrix4( preTransform );
+			geo.setAttribute( 'position', positionAttribute );
 
-					} );
+			if ( buffers.colors.length > 0 ) {
 
-				}
+				geo.setAttribute( 'color', new THREE.Float32BufferAttribute( buffers.colors, 3 ) );
 
-			},
-			parsePoseNodes: function () {
+			}
 
-				var bindMatrices = {};
+			if ( skeleton ) {
 
-				if ( 'Pose' in fbxTree.Objects ) {
+				geo.setAttribute( 'skinIndex', new THREE.Uint16BufferAttribute( buffers.weightsIndices, 4 ) );
+				geo.setAttribute( 'skinWeight', new THREE.Float32BufferAttribute( buffers.vertexWeights, 4 ) ); // used later to bind the skeleton to the model
 
-					var BindPoseNode = fbxTree.Objects.Pose;
+				geo.FBX_Deformer = skeleton;
 
-					for ( var nodeID in BindPoseNode ) {
+			}
 
-						if ( BindPoseNode[ nodeID ].attrType === 'BindPose' ) {
+			if ( buffers.normal.length > 0 ) {
 
-							var poseNodes = BindPoseNode[ nodeID ].PoseNode;
+				const normalMatrix = new THREE.Matrix3().getNormalMatrix( preTransform );
+				const normalAttribute = new THREE.Float32BufferAttribute( buffers.normal, 3 );
+				normalAttribute.applyNormalMatrix( normalMatrix );
+				geo.setAttribute( 'normal', normalAttribute );
 
-							if ( Array.isArray( poseNodes ) ) {
+			}
 
-								poseNodes.forEach( function ( poseNode ) {
+			buffers.uvs.forEach( function ( uvBuffer, i ) {
 
-									bindMatrices[ poseNode.Node ] = new THREE.Matrix4().fromArray( poseNode.Matrix.a );
+				// subsequent uv buffers are called 'uv1', 'uv2', ...
+				let name = 'uv' + ( i + 1 ).toString(); // the first uv buffer is just called 'uv'
 
-								} );
+				if ( i === 0 ) {
 
-							} else {
+					name = 'uv';
 
-								bindMatrices[ poseNodes.Node ] = new THREE.Matrix4().fromArray( poseNodes.Matrix.a );
+				}
 
-							}
+				geo.setAttribute( name, new THREE.Float32BufferAttribute( buffers.uvs[ i ], 2 ) );
 
-						}
+			} );
 
-					}
+			if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
 
-				}
+				// Convert the material indices of each vertex into rendering groups on the geometry.
+				let prevMaterialIndex = buffers.materialIndex[ 0 ];
+				let startIndex = 0;
+				buffers.materialIndex.forEach( function ( currentIndex, i ) {
+
+					if ( currentIndex !== prevMaterialIndex ) {
 
-				return bindMatrices;
+						geo.addGroup( startIndex, i - startIndex, prevMaterialIndex );
+						prevMaterialIndex = currentIndex;
+						startIndex = i;
+
+					}
 
-			},
-			// Parse ambient color in FBXTree.GlobalSettings - if it's not set to black (default), create an ambient light
-			createAmbientLight: function () {
+				} ); // the loop above doesn't add the last group, do that here.
 
-				if ( 'GlobalSettings' in fbxTree && 'AmbientColor' in fbxTree.GlobalSettings ) {
+				if ( geo.groups.length > 0 ) {
 
-					var ambientColor = fbxTree.GlobalSettings.AmbientColor.value;
-					var r = ambientColor[ 0 ];
-					var g = ambientColor[ 1 ];
-					var b = ambientColor[ 2 ];
+					const lastGroup = geo.groups[ geo.groups.length - 1 ];
+					const lastIndex = lastGroup.start + lastGroup.count;
 
-					if ( r !== 0 || g !== 0 || b !== 0 ) {
+					if ( lastIndex !== buffers.materialIndex.length ) {
 
-						var color = new THREE.Color( r, g, b );
-						sceneGraph.add( new THREE.AmbientLight( color, 1 ) );
+						geo.addGroup( lastIndex, buffers.materialIndex.length - lastIndex, prevMaterialIndex );
 
 					}
 
-				}
+				} // case where there are multiple materials but the whole geometry is only
+				// using one of them
 
-			},
-			setupMorphMaterials: function () {
 
-				var scope = this;
-				sceneGraph.traverse( function ( child ) {
+				if ( geo.groups.length === 0 ) {
 
-					if ( child.isMesh ) {
+					geo.addGroup( 0, buffers.materialIndex.length, buffers.materialIndex[ 0 ] );
 
-						if ( child.geometry.morphAttributes.position && child.geometry.morphAttributes.position.length ) {
+				}
 
-							if ( Array.isArray( child.material ) ) {
+			}
 
-								child.material.forEach( function ( material, i ) {
+			this.addMorphTargets( geo, geoNode, morphTargets, preTransform );
+			return geo;
 
-									scope.setupMorphMaterial( child, material, i );
+		}
 
-								} );
+		parseGeoNode( geoNode, skeleton ) {
 
-							} else {
+			const geoInfo = {};
+			geoInfo.vertexPositions = geoNode.Vertices !== undefined ? geoNode.Vertices.a : [];
+			geoInfo.vertexIndices = geoNode.PolygonVertexIndex !== undefined ? geoNode.PolygonVertexIndex.a : [];
 
-								scope.setupMorphMaterial( child, child.material );
+			if ( geoNode.LayerElementColor ) {
 
-							}
+				geoInfo.color = this.parseVertexColors( geoNode.LayerElementColor[ 0 ] );
 
-						}
+			}
 
-					}
+			if ( geoNode.LayerElementMaterial ) {
 
-				} );
+				geoInfo.material = this.parseMaterialIndices( geoNode.LayerElementMaterial[ 0 ] );
 
-			},
-			setupMorphMaterial: function ( child, material, index ) {
+			}
 
-				var uuid = child.uuid;
-				var matUuid = material.uuid; // if a geometry has morph targets, it cannot share the material with other geometries
+			if ( geoNode.LayerElementNormal ) {
 
-				var sharedMat = false;
-				sceneGraph.traverse( function ( node ) {
+				geoInfo.normal = this.parseNormals( geoNode.LayerElementNormal[ 0 ] );
 
-					if ( node.isMesh ) {
+			}
 
-						if ( Array.isArray( node.material ) ) {
+			if ( geoNode.LayerElementUV ) {
 
-							node.material.forEach( function ( mat ) {
+				geoInfo.uv = [];
+				let i = 0;
 
-								if ( mat.uuid === matUuid && node.uuid !== uuid ) sharedMat = true;
+				while ( geoNode.LayerElementUV[ i ] ) {
 
-							} );
+					if ( geoNode.LayerElementUV[ i ].UV ) {
 
-						} else if ( node.material.uuid === matUuid && node.uuid !== uuid ) sharedMat = true;
+						geoInfo.uv.push( this.parseUVs( geoNode.LayerElementUV[ i ] ) );
 
 					}
 
-				} );
+					i ++;
+
+				}
 
-				if ( sharedMat === true ) {
+			}
 
-					var clonedMat = material.clone();
-					clonedMat.morphTargets = true;
-					if ( index === undefined ) child.material = clonedMat; else child.material[ index ] = clonedMat;
+			geoInfo.weightTable = {};
 
-				} else material.morphTargets = true;
+			if ( skeleton !== null ) {
 
-			}
-		}; // parse Geometry data from FBXTree and return map of BufferGeometries
+				geoInfo.skeleton = skeleton;
+				skeleton.rawBones.forEach( function ( rawBone, i ) {
 
-		function GeometryParser() {}
+					// loop over the bone's vertex indices and weights
+					rawBone.indices.forEach( function ( index, j ) {
 
-		GeometryParser.prototype = {
-			constructor: GeometryParser,
-			// Parse nodes in FBXTree.Objects.Geometry
-			parse: function ( deformers ) {
+						if ( geoInfo.weightTable[ index ] === undefined ) geoInfo.weightTable[ index ] = [];
+						geoInfo.weightTable[ index ].push( {
+							id: i,
+							weight: rawBone.weights[ j ]
+						} );
 
-				var geometryMap = new Map();
+					} );
 
-				if ( 'Geometry' in fbxTree.Objects ) {
+				} );
 
-					var geoNodes = fbxTree.Objects.Geometry;
+			}
 
-					for ( var nodeID in geoNodes ) {
+			return geoInfo;
 
-						var relationships = connections.get( parseInt( nodeID ) );
-						var geo = this.parseGeometry( relationships, geoNodes[ nodeID ], deformers );
-						geometryMap.set( parseInt( nodeID ), geo );
+		}
 
-					}
+		genBuffers( geoInfo ) {
 
-				}
+			const buffers = {
+				vertex: [],
+				normal: [],
+				colors: [],
+				uvs: [],
+				materialIndex: [],
+				vertexWeights: [],
+				weightsIndices: []
+			};
+			let polygonIndex = 0;
+			let faceLength = 0;
+			let displayedWeightsWarning = false; // these will hold data for a single face
 
-				return geometryMap;
+			let facePositionIndexes = [];
+			let faceNormals = [];
+			let faceColors = [];
+			let faceUVs = [];
+			let faceWeights = [];
+			let faceWeightIndices = [];
+			const scope = this;
+			geoInfo.vertexIndices.forEach( function ( vertexIndex, polygonVertexIndex ) {
 
-			},
-			// Parse single node in FBXTree.Objects.Geometry
-			parseGeometry: function ( relationships, geoNode, deformers ) {
+				let materialIndex;
+				let endOfFace = false; // Face index and vertex index arrays are combined in a single array
+				// A cube with quad faces looks like this:
+				// PolygonVertexIndex: *24 {
+				//	a: 0, 1, 3, -3, 2, 3, 5, -5, 4, 5, 7, -7, 6, 7, 1, -1, 1, 7, 5, -4, 6, 0, 2, -5
+				//	}
+				// Negative numbers mark the end of a face - first face here is 0, 1, 3, -3
+				// to find index of last vertex bit shift the index: ^ - 1
 
-				switch ( geoNode.attrType ) {
+				if ( vertexIndex < 0 ) {
 
-					case 'Mesh':
-						return this.parseMeshGeometry( relationships, geoNode, deformers );
-						break;
+					vertexIndex = vertexIndex ^ - 1; // equivalent to ( x * -1 ) - 1
 
-					case 'NurbsCurve':
-						return this.parseNurbsGeometry( geoNode );
-						break;
+					endOfFace = true;
 
 				}
 
-			},
-			// Parse single node mesh geometry in FBXTree.Objects.Geometry
-			parseMeshGeometry: function ( relationships, geoNode, deformers ) {
+				let weightIndices = [];
+				let weights = [];
+				facePositionIndexes.push( vertexIndex * 3, vertexIndex * 3 + 1, vertexIndex * 3 + 2 );
 
-				var skeletons = deformers.skeletons;
-				var morphTargets = [];
-				var modelNodes = relationships.parents.map( function ( parent ) {
+				if ( geoInfo.color ) {
 
-					return fbxTree.Objects.Model[ parent.ID ];
+					const data = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.color );
+					faceColors.push( data[ 0 ], data[ 1 ], data[ 2 ] );
 
-				} ); // don't create geometry if it is not associated with any models
+				}
 
-				if ( modelNodes.length === 0 ) return;
-				var skeleton = relationships.children.reduce( function ( skeleton, child ) {
+				if ( geoInfo.skeleton ) {
 
-					if ( skeletons[ child.ID ] !== undefined ) skeleton = skeletons[ child.ID ];
-					return skeleton;
+					if ( geoInfo.weightTable[ vertexIndex ] !== undefined ) {
 
-				}, null );
-				relationships.children.forEach( function ( child ) {
+						geoInfo.weightTable[ vertexIndex ].forEach( function ( wt ) {
 
-					if ( deformers.morphTargets[ child.ID ] !== undefined ) {
+							weights.push( wt.weight );
+							weightIndices.push( wt.id );
 
-						morphTargets.push( deformers.morphTargets[ child.ID ] );
+						} );
 
 					}
 
-				} ); // Assume one model and get the preRotation from that
-				// if there is more than one model associated with the geometry this may cause problems
+					if ( weights.length > 4 ) {
 
-				var modelNode = modelNodes[ 0 ];
-				var transformData = {};
-				if ( 'RotationOrder' in modelNode ) transformData.eulerOrder = getEulerOrder( modelNode.RotationOrder.value );
-				if ( 'InheritType' in modelNode ) transformData.inheritType = parseInt( modelNode.InheritType.value );
-				if ( 'GeometricTranslation' in modelNode ) transformData.translation = modelNode.GeometricTranslation.value;
-				if ( 'GeometricRotation' in modelNode ) transformData.rotation = modelNode.GeometricRotation.value;
-				if ( 'GeometricScaling' in modelNode ) transformData.scale = modelNode.GeometricScaling.value;
-				var transform = generateTransform( transformData );
-				return this.genGeometry( geoNode, skeleton, morphTargets, transform );
+						if ( ! displayedWeightsWarning ) {
 
-			},
-			// Generate a THREE.BufferGeometry from a node in FBXTree.Objects.Geometry
-			genGeometry: function ( geoNode, skeleton, morphTargets, preTransform ) {
+							console.warn( 'THREE.FBXLoader: Vertex has more than 4 skinning weights assigned to vertex. Deleting additional weights.' );
+							displayedWeightsWarning = true;
 
-				var geo = new THREE.BufferGeometry();
-				if ( geoNode.attrName ) geo.name = geoNode.attrName;
-				var geoInfo = this.parseGeoNode( geoNode, skeleton );
-				var buffers = this.genBuffers( geoInfo );
-				var positionAttribute = new THREE.Float32BufferAttribute( buffers.vertex, 3 );
-				positionAttribute.applyMatrix4( preTransform );
-				geo.setAttribute( 'position', positionAttribute );
+						}
 
-				if ( buffers.colors.length > 0 ) {
+						const wIndex = [ 0, 0, 0, 0 ];
+						const Weight = [ 0, 0, 0, 0 ];
+						weights.forEach( function ( weight, weightIndex ) {
 
-					geo.setAttribute( 'color', new THREE.Float32BufferAttribute( buffers.colors, 3 ) );
+							let currentWeight = weight;
+							let currentIndex = weightIndices[ weightIndex ];
+							Weight.forEach( function ( comparedWeight, comparedWeightIndex, comparedWeightArray ) {
 
-				}
+								if ( currentWeight > comparedWeight ) {
 
-				if ( skeleton ) {
+									comparedWeightArray[ comparedWeightIndex ] = currentWeight;
+									currentWeight = comparedWeight;
+									const tmp = wIndex[ comparedWeightIndex ];
+									wIndex[ comparedWeightIndex ] = currentIndex;
+									currentIndex = tmp;
 
-					geo.setAttribute( 'skinIndex', new THREE.Uint16BufferAttribute( buffers.weightsIndices, 4 ) );
-					geo.setAttribute( 'skinWeight', new THREE.Float32BufferAttribute( buffers.vertexWeights, 4 ) ); // used later to bind the skeleton to the model
+								}
 
-					geo.FBX_Deformer = skeleton;
+							} );
 
-				}
+						} );
+						weightIndices = wIndex;
+						weights = Weight;
 
-				if ( buffers.normal.length > 0 ) {
+					} // if the weight array is shorter than 4 pad with 0s
 
-					var normalMatrix = new THREE.Matrix3().getNormalMatrix( preTransform );
-					var normalAttribute = new THREE.Float32BufferAttribute( buffers.normal, 3 );
-					normalAttribute.applyNormalMatrix( normalMatrix );
-					geo.setAttribute( 'normal', normalAttribute );
 
-				}
+					while ( weights.length < 4 ) {
 
-				buffers.uvs.forEach( function ( uvBuffer, i ) {
+						weights.push( 0 );
+						weightIndices.push( 0 );
 
-					// subsequent uv buffers are called 'uv1', 'uv2', ...
-					var name = 'uv' + ( i + 1 ).toString(); // the first uv buffer is just called 'uv'
+					}
 
-					if ( i === 0 ) {
+					for ( let i = 0; i < 4; ++ i ) {
 
-						name = 'uv';
+						faceWeights.push( weights[ i ] );
+						faceWeightIndices.push( weightIndices[ i ] );
 
 					}
 
-					geo.setAttribute( name, new THREE.Float32BufferAttribute( buffers.uvs[ i ], 2 ) );
+				}
 
-				} );
+				if ( geoInfo.normal ) {
 
-				if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
+					const data = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.normal );
+					faceNormals.push( data[ 0 ], data[ 1 ], data[ 2 ] );
 
-					// Convert the material indices of each vertex into rendering groups on the geometry.
-					var prevMaterialIndex = buffers.materialIndex[ 0 ];
-					var startIndex = 0;
-					buffers.materialIndex.forEach( function ( currentIndex, i ) {
+				}
 
-						if ( currentIndex !== prevMaterialIndex ) {
+				if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
 
-							geo.addGroup( startIndex, i - startIndex, prevMaterialIndex );
-							prevMaterialIndex = currentIndex;
-							startIndex = i;
+					materialIndex = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.material )[ 0 ];
 
-						}
+				}
 
-					} ); // the loop above doesn't add the last group, do that here.
+				if ( geoInfo.uv ) {
 
-					if ( geo.groups.length > 0 ) {
+					geoInfo.uv.forEach( function ( uv, i ) {
 
-						var lastGroup = geo.groups[ geo.groups.length - 1 ];
-						var lastIndex = lastGroup.start + lastGroup.count;
+						const data = getData( polygonVertexIndex, polygonIndex, vertexIndex, uv );
 
-						if ( lastIndex !== buffers.materialIndex.length ) {
+						if ( faceUVs[ i ] === undefined ) {
 
-							geo.addGroup( lastIndex, buffers.materialIndex.length - lastIndex, prevMaterialIndex );
+							faceUVs[ i ] = [];
 
 						}
 
-					} // case where there are multiple materials but the whole geometry is only
-					// using one of them
+						faceUVs[ i ].push( data[ 0 ] );
+						faceUVs[ i ].push( data[ 1 ] );
 
-
-					if ( geo.groups.length === 0 ) {
-
-						geo.addGroup( 0, buffers.materialIndex.length, buffers.materialIndex[ 0 ] );
-
-					}
+					} );
 
 				}
 
-				this.addMorphTargets( geo, geoNode, morphTargets, preTransform );
-				return geo;
-
-			},
-			parseGeoNode: function ( geoNode, skeleton ) {
+				faceLength ++;
 
-				var geoInfo = {};
-				geoInfo.vertexPositions = geoNode.Vertices !== undefined ? geoNode.Vertices.a : [];
-				geoInfo.vertexIndices = geoNode.PolygonVertexIndex !== undefined ? geoNode.PolygonVertexIndex.a : [];
+				if ( endOfFace ) {
 
-				if ( geoNode.LayerElementColor ) {
+					scope.genFace( buffers, geoInfo, facePositionIndexes, materialIndex, faceNormals, faceColors, faceUVs, faceWeights, faceWeightIndices, faceLength );
+					polygonIndex ++;
+					faceLength = 0; // reset arrays for the next face
 
-					geoInfo.color = this.parseVertexColors( geoNode.LayerElementColor[ 0 ] );
+					facePositionIndexes = [];
+					faceNormals = [];
+					faceColors = [];
+					faceUVs = [];
+					faceWeights = [];
+					faceWeightIndices = [];
 
 				}
 
-				if ( geoNode.LayerElementMaterial ) {
-
-					geoInfo.material = this.parseMaterialIndices( geoNode.LayerElementMaterial[ 0 ] );
+			} );
+			return buffers;
+
+		} // Generate data for a single face in a geometry. If the face is a quad then split it into 2 tris
+
+
+		genFace( buffers, geoInfo, facePositionIndexes, materialIndex, faceNormals, faceColors, faceUVs, faceWeights, faceWeightIndices, faceLength ) {
+
+			for ( let i = 2; i < faceLength; i ++ ) {
+
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 0 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 1 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 2 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 + 1 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 + 2 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 + 1 ] ] );
+				buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 + 2 ] ] );
+
+				if ( geoInfo.skeleton ) {
+
+					buffers.vertexWeights.push( faceWeights[ 0 ] );
+					buffers.vertexWeights.push( faceWeights[ 1 ] );
+					buffers.vertexWeights.push( faceWeights[ 2 ] );
+					buffers.vertexWeights.push( faceWeights[ 3 ] );
+					buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 ] );
+					buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 1 ] );
+					buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 2 ] );
+					buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 3 ] );
+					buffers.vertexWeights.push( faceWeights[ i * 4 ] );
+					buffers.vertexWeights.push( faceWeights[ i * 4 + 1 ] );
+					buffers.vertexWeights.push( faceWeights[ i * 4 + 2 ] );
+					buffers.vertexWeights.push( faceWeights[ i * 4 + 3 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ 0 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ 1 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ 2 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ 3 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 1 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 2 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 3 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ i * 4 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 1 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 2 ] );
+					buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 3 ] );
+
+				}
+
+				if ( geoInfo.color ) {
+
+					buffers.colors.push( faceColors[ 0 ] );
+					buffers.colors.push( faceColors[ 1 ] );
+					buffers.colors.push( faceColors[ 2 ] );
+					buffers.colors.push( faceColors[ ( i - 1 ) * 3 ] );
+					buffers.colors.push( faceColors[ ( i - 1 ) * 3 + 1 ] );
+					buffers.colors.push( faceColors[ ( i - 1 ) * 3 + 2 ] );
+					buffers.colors.push( faceColors[ i * 3 ] );
+					buffers.colors.push( faceColors[ i * 3 + 1 ] );
+					buffers.colors.push( faceColors[ i * 3 + 2 ] );
 
 				}
 
-				if ( geoNode.LayerElementNormal ) {
+				if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
 
-					geoInfo.normal = this.parseNormals( geoNode.LayerElementNormal[ 0 ] );
+					buffers.materialIndex.push( materialIndex );
+					buffers.materialIndex.push( materialIndex );
+					buffers.materialIndex.push( materialIndex );
 
 				}
 
-				if ( geoNode.LayerElementUV ) {
+				if ( geoInfo.normal ) {
 
-					geoInfo.uv = [];
-					var i = 0;
+					buffers.normal.push( faceNormals[ 0 ] );
+					buffers.normal.push( faceNormals[ 1 ] );
+					buffers.normal.push( faceNormals[ 2 ] );
+					buffers.normal.push( faceNormals[ ( i - 1 ) * 3 ] );
+					buffers.normal.push( faceNormals[ ( i - 1 ) * 3 + 1 ] );
+					buffers.normal.push( faceNormals[ ( i - 1 ) * 3 + 2 ] );
+					buffers.normal.push( faceNormals[ i * 3 ] );
+					buffers.normal.push( faceNormals[ i * 3 + 1 ] );
+					buffers.normal.push( faceNormals[ i * 3 + 2 ] );
 
-					while ( geoNode.LayerElementUV[ i ] ) {
-
-						if ( geoNode.LayerElementUV[ i ].UV ) {
+				}
 
-							geoInfo.uv.push( this.parseUVs( geoNode.LayerElementUV[ i ] ) );
+				if ( geoInfo.uv ) {
 
-						}
+					geoInfo.uv.forEach( function ( uv, j ) {
 
-						i ++;
+						if ( buffers.uvs[ j ] === undefined ) buffers.uvs[ j ] = [];
+						buffers.uvs[ j ].push( faceUVs[ j ][ 0 ] );
+						buffers.uvs[ j ].push( faceUVs[ j ][ 1 ] );
+						buffers.uvs[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 ] );
+						buffers.uvs[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 + 1 ] );
+						buffers.uvs[ j ].push( faceUVs[ j ][ i * 2 ] );
+						buffers.uvs[ j ].push( faceUVs[ j ][ i * 2 + 1 ] );
 
-					}
+					} );
 
 				}
 
-				geoInfo.weightTable = {};
+			}
 
-				if ( skeleton !== null ) {
+		}
 
-					geoInfo.skeleton = skeleton;
-					skeleton.rawBones.forEach( function ( rawBone, i ) {
+		addMorphTargets( parentGeo, parentGeoNode, morphTargets, preTransform ) {
 
-						// loop over the bone's vertex indices and weights
-						rawBone.indices.forEach( function ( index, j ) {
+			if ( morphTargets.length === 0 ) return;
+			parentGeo.morphTargetsRelative = true;
+			parentGeo.morphAttributes.position = []; // parentGeo.morphAttributes.normal = []; // not implemented
 
-							if ( geoInfo.weightTable[ index ] === undefined ) geoInfo.weightTable[ index ] = [];
-							geoInfo.weightTable[ index ].push( {
-								id: i,
-								weight: rawBone.weights[ j ]
-							} );
+			const scope = this;
+			morphTargets.forEach( function ( morphTarget ) {
 
-						} );
+				morphTarget.rawTargets.forEach( function ( rawTarget ) {
 
-					} );
+					const morphGeoNode = fbxTree.Objects.Geometry[ rawTarget.geoID ];
 
-				}
+					if ( morphGeoNode !== undefined ) {
 
-				return geoInfo;
+						scope.genMorphGeometry( parentGeo, parentGeoNode, morphGeoNode, preTransform, rawTarget.name );
 
-			},
-			genBuffers: function ( geoInfo ) {
+					}
 
-				var buffers = {
-					vertex: [],
-					normal: [],
-					colors: [],
-					uvs: [],
-					materialIndex: [],
-					vertexWeights: [],
-					weightsIndices: []
-				};
-				var polygonIndex = 0;
-				var faceLength = 0;
-				var displayedWeightsWarning = false; // these will hold data for a single face
+				} );
 
-				var facePositionIndexes = [];
-				var faceNormals = [];
-				var faceColors = [];
-				var faceUVs = [];
-				var faceWeights = [];
-				var faceWeightIndices = [];
-				var scope = this;
-				geoInfo.vertexIndices.forEach( function ( vertexIndex, polygonVertexIndex ) {
+			} );
 
-					var endOfFace = false; // Face index and vertex index arrays are combined in a single array
-					// A cube with quad faces looks like this:
-					// PolygonVertexIndex: *24 {
-					//	a: 0, 1, 3, -3, 2, 3, 5, -5, 4, 5, 7, -7, 6, 7, 1, -1, 1, 7, 5, -4, 6, 0, 2, -5
-					//	}
-					// Negative numbers mark the end of a face - first face here is 0, 1, 3, -3
-					// to find index of last vertex bit shift the index: ^ - 1
+		} // a morph geometry node is similar to a standard	node, and the node is also contained
+		// in FBXTree.Objects.Geometry, however it can only have attributes for position, normal
+		// and a special attribute Index defining which vertices of the original geometry are affected
+		// Normal and position attributes only have data for the vertices that are affected by the morph
 
-					if ( vertexIndex < 0 ) {
 
-						vertexIndex = vertexIndex ^ - 1; // equivalent to ( x * -1 ) - 1
+		genMorphGeometry( parentGeo, parentGeoNode, morphGeoNode, preTransform, name ) {
 
-						endOfFace = true;
+			const vertexIndices = parentGeoNode.PolygonVertexIndex !== undefined ? parentGeoNode.PolygonVertexIndex.a : [];
+			const morphPositionsSparse = morphGeoNode.Vertices !== undefined ? morphGeoNode.Vertices.a : [];
+			const indices = morphGeoNode.Indexes !== undefined ? morphGeoNode.Indexes.a : [];
+			const length = parentGeo.attributes.position.count * 3;
+			const morphPositions = new Float32Array( length );
 
-					}
+			for ( let i = 0; i < indices.length; i ++ ) {
 
-					var weightIndices = [];
-					var weights = [];
-					facePositionIndexes.push( vertexIndex * 3, vertexIndex * 3 + 1, vertexIndex * 3 + 2 );
+				const morphIndex = indices[ i ] * 3;
+				morphPositions[ morphIndex ] = morphPositionsSparse[ i * 3 ];
+				morphPositions[ morphIndex + 1 ] = morphPositionsSparse[ i * 3 + 1 ];
+				morphPositions[ morphIndex + 2 ] = morphPositionsSparse[ i * 3 + 2 ];
 
-					if ( geoInfo.color ) {
+			} // TODO: add morph normal support
 
-						var data = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.color );
-						faceColors.push( data[ 0 ], data[ 1 ], data[ 2 ] );
 
-					}
+			const morphGeoInfo = {
+				vertexIndices: vertexIndices,
+				vertexPositions: morphPositions
+			};
+			const morphBuffers = this.genBuffers( morphGeoInfo );
+			const positionAttribute = new THREE.Float32BufferAttribute( morphBuffers.vertex, 3 );
+			positionAttribute.name = name || morphGeoNode.attrName;
+			positionAttribute.applyMatrix4( preTransform );
+			parentGeo.morphAttributes.position.push( positionAttribute );
 
-					if ( geoInfo.skeleton ) {
+		} // Parse normal from FBXTree.Objects.Geometry.LayerElementNormal if it exists
 
-						if ( geoInfo.weightTable[ vertexIndex ] !== undefined ) {
 
-							geoInfo.weightTable[ vertexIndex ].forEach( function ( wt ) {
+		parseNormals( NormalNode ) {
 
-								weights.push( wt.weight );
-								weightIndices.push( wt.id );
+			const mappingType = NormalNode.MappingInformationType;
+			const referenceType = NormalNode.ReferenceInformationType;
+			const buffer = NormalNode.Normals.a;
+			let indexBuffer = [];
 
-							} );
+			if ( referenceType === 'IndexToDirect' ) {
 
-						}
+				if ( 'NormalIndex' in NormalNode ) {
 
-						if ( weights.length > 4 ) {
+					indexBuffer = NormalNode.NormalIndex.a;
 
-							if ( ! displayedWeightsWarning ) {
+				} else if ( 'NormalsIndex' in NormalNode ) {
 
-								console.warn( 'THREE.FBXLoader: Vertex has more than 4 skinning weights assigned to vertex. Deleting additional weights.' );
-								displayedWeightsWarning = true;
+					indexBuffer = NormalNode.NormalsIndex.a;
 
-							}
+				}
 
-							var wIndex = [ 0, 0, 0, 0 ];
-							var Weight = [ 0, 0, 0, 0 ];
-							weights.forEach( function ( weight, weightIndex ) {
+			}
 
-								var currentWeight = weight;
-								var currentIndex = weightIndices[ weightIndex ];
-								Weight.forEach( function ( comparedWeight, comparedWeightIndex, comparedWeightArray ) {
+			return {
+				dataSize: 3,
+				buffer: buffer,
+				indices: indexBuffer,
+				mappingType: mappingType,
+				referenceType: referenceType
+			};
 
-									if ( currentWeight > comparedWeight ) {
+		} // Parse UVs from FBXTree.Objects.Geometry.LayerElementUV if it exists
 
-										comparedWeightArray[ comparedWeightIndex ] = currentWeight;
-										currentWeight = comparedWeight;
-										var tmp = wIndex[ comparedWeightIndex ];
-										wIndex[ comparedWeightIndex ] = currentIndex;
-										currentIndex = tmp;
 
-									}
+		parseUVs( UVNode ) {
 
-								} );
+			const mappingType = UVNode.MappingInformationType;
+			const referenceType = UVNode.ReferenceInformationType;
+			const buffer = UVNode.UV.a;
+			let indexBuffer = [];
 
-							} );
-							weightIndices = wIndex;
-							weights = Weight;
+			if ( referenceType === 'IndexToDirect' ) {
 
-						} // if the weight array is shorter than 4 pad with 0s
+				indexBuffer = UVNode.UVIndex.a;
 
+			}
 
-						while ( weights.length < 4 ) {
+			return {
+				dataSize: 2,
+				buffer: buffer,
+				indices: indexBuffer,
+				mappingType: mappingType,
+				referenceType: referenceType
+			};
 
-							weights.push( 0 );
-							weightIndices.push( 0 );
+		} // Parse Vertex Colors from FBXTree.Objects.Geometry.LayerElementColor if it exists
 
-						}
 
-						for ( var i = 0; i < 4; ++ i ) {
+		parseVertexColors( ColorNode ) {
 
-							faceWeights.push( weights[ i ] );
-							faceWeightIndices.push( weightIndices[ i ] );
+			const mappingType = ColorNode.MappingInformationType;
+			const referenceType = ColorNode.ReferenceInformationType;
+			const buffer = ColorNode.Colors.a;
+			let indexBuffer = [];
 
-						}
+			if ( referenceType === 'IndexToDirect' ) {
 
-					}
+				indexBuffer = ColorNode.ColorIndex.a;
 
-					if ( geoInfo.normal ) {
+			}
 
-						var data = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.normal );
-						faceNormals.push( data[ 0 ], data[ 1 ], data[ 2 ] );
+			return {
+				dataSize: 4,
+				buffer: buffer,
+				indices: indexBuffer,
+				mappingType: mappingType,
+				referenceType: referenceType
+			};
 
-					}
+		} // Parse mapping and material data in FBXTree.Objects.Geometry.LayerElementMaterial if it exists
 
-					if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
 
-						var materialIndex = getData( polygonVertexIndex, polygonIndex, vertexIndex, geoInfo.material )[ 0 ];
+		parseMaterialIndices( MaterialNode ) {
 
-					}
+			const mappingType = MaterialNode.MappingInformationType;
+			const referenceType = MaterialNode.ReferenceInformationType;
 
-					if ( geoInfo.uv ) {
+			if ( mappingType === 'NoMappingInformation' ) {
 
-						geoInfo.uv.forEach( function ( uv, i ) {
+				return {
+					dataSize: 1,
+					buffer: [ 0 ],
+					indices: [ 0 ],
+					mappingType: 'AllSame',
+					referenceType: referenceType
+				};
 
-							var data = getData( polygonVertexIndex, polygonIndex, vertexIndex, uv );
+			}
 
-							if ( faceUVs[ i ] === undefined ) {
+			const materialIndexBuffer = MaterialNode.Materials.a; // Since materials are stored as indices, there's a bit of a mismatch between FBX and what
+			// we expect.So we create an intermediate buffer that points to the index in the buffer,
+			// for conforming with the other functions we've written for other data.
 
-								faceUVs[ i ] = [];
+			const materialIndices = [];
 
-							}
+			for ( let i = 0; i < materialIndexBuffer.length; ++ i ) {
 
-							faceUVs[ i ].push( data[ 0 ] );
-							faceUVs[ i ].push( data[ 1 ] );
+				materialIndices.push( i );
 
-						} );
+			}
 
-					}
+			return {
+				dataSize: 1,
+				buffer: materialIndexBuffer,
+				indices: materialIndices,
+				mappingType: mappingType,
+				referenceType: referenceType
+			};
 
-					faceLength ++;
+		} // Generate a NurbGeometry from a node in FBXTree.Objects.Geometry
 
-					if ( endOfFace ) {
 
-						scope.genFace( buffers, geoInfo, facePositionIndexes, materialIndex, faceNormals, faceColors, faceUVs, faceWeights, faceWeightIndices, faceLength );
-						polygonIndex ++;
-						faceLength = 0; // reset arrays for the next face
+		parseNurbsGeometry( geoNode ) {
 
-						facePositionIndexes = [];
-						faceNormals = [];
-						faceColors = [];
-						faceUVs = [];
-						faceWeights = [];
-						faceWeightIndices = [];
+			if ( THREE.NURBSCurve === undefined ) {
 
-					}
+				console.error( 'THREE.FBXLoader: The loader relies on THREE.NURBSCurve for any nurbs present in the model. Nurbs will show up as empty geometry.' );
+				return new THREE.BufferGeometry();
 
-				} );
-				return buffers;
-
-			},
-			// Generate data for a single face in a geometry. If the face is a quad then split it into 2 tris
-			genFace: function ( buffers, geoInfo, facePositionIndexes, materialIndex, faceNormals, faceColors, faceUVs, faceWeights, faceWeightIndices, faceLength ) {
-
-				for ( var i = 2; i < faceLength; i ++ ) {
-
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 0 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 1 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ 2 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 + 1 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ ( i - 1 ) * 3 + 2 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 + 1 ] ] );
-					buffers.vertex.push( geoInfo.vertexPositions[ facePositionIndexes[ i * 3 + 2 ] ] );
-
-					if ( geoInfo.skeleton ) {
-
-						buffers.vertexWeights.push( faceWeights[ 0 ] );
-						buffers.vertexWeights.push( faceWeights[ 1 ] );
-						buffers.vertexWeights.push( faceWeights[ 2 ] );
-						buffers.vertexWeights.push( faceWeights[ 3 ] );
-						buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 ] );
-						buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 1 ] );
-						buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 2 ] );
-						buffers.vertexWeights.push( faceWeights[ ( i - 1 ) * 4 + 3 ] );
-						buffers.vertexWeights.push( faceWeights[ i * 4 ] );
-						buffers.vertexWeights.push( faceWeights[ i * 4 + 1 ] );
-						buffers.vertexWeights.push( faceWeights[ i * 4 + 2 ] );
-						buffers.vertexWeights.push( faceWeights[ i * 4 + 3 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ 0 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ 1 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ 2 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ 3 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 1 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 2 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ ( i - 1 ) * 4 + 3 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ i * 4 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 1 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 2 ] );
-						buffers.weightsIndices.push( faceWeightIndices[ i * 4 + 3 ] );
+			}
 
-					}
+			const order = parseInt( geoNode.Order );
 
-					if ( geoInfo.color ) {
+			if ( isNaN( order ) ) {
 
-						buffers.colors.push( faceColors[ 0 ] );
-						buffers.colors.push( faceColors[ 1 ] );
-						buffers.colors.push( faceColors[ 2 ] );
-						buffers.colors.push( faceColors[ ( i - 1 ) * 3 ] );
-						buffers.colors.push( faceColors[ ( i - 1 ) * 3 + 1 ] );
-						buffers.colors.push( faceColors[ ( i - 1 ) * 3 + 2 ] );
-						buffers.colors.push( faceColors[ i * 3 ] );
-						buffers.colors.push( faceColors[ i * 3 + 1 ] );
-						buffers.colors.push( faceColors[ i * 3 + 2 ] );
+				console.error( 'THREE.FBXLoader: Invalid Order %s given for geometry ID: %s', geoNode.Order, geoNode.id );
+				return new THREE.BufferGeometry();
 
-					}
+			}
 
-					if ( geoInfo.material && geoInfo.material.mappingType !== 'AllSame' ) {
+			const degree = order - 1;
+			const knots = geoNode.KnotVector.a;
+			const controlPoints = [];
+			const pointsValues = geoNode.Points.a;
 
-						buffers.materialIndex.push( materialIndex );
-						buffers.materialIndex.push( materialIndex );
-						buffers.materialIndex.push( materialIndex );
+			for ( let i = 0, l = pointsValues.length; i < l; i += 4 ) {
 
-					}
+				controlPoints.push( new THREE.Vector4().fromArray( pointsValues, i ) );
 
-					if ( geoInfo.normal ) {
+			}
 
-						buffers.normal.push( faceNormals[ 0 ] );
-						buffers.normal.push( faceNormals[ 1 ] );
-						buffers.normal.push( faceNormals[ 2 ] );
-						buffers.normal.push( faceNormals[ ( i - 1 ) * 3 ] );
-						buffers.normal.push( faceNormals[ ( i - 1 ) * 3 + 1 ] );
-						buffers.normal.push( faceNormals[ ( i - 1 ) * 3 + 2 ] );
-						buffers.normal.push( faceNormals[ i * 3 ] );
-						buffers.normal.push( faceNormals[ i * 3 + 1 ] );
-						buffers.normal.push( faceNormals[ i * 3 + 2 ] );
+			let startKnot, endKnot;
 
-					}
+			if ( geoNode.Form === 'Closed' ) {
 
-					if ( geoInfo.uv ) {
+				controlPoints.push( controlPoints[ 0 ] );
 
-						geoInfo.uv.forEach( function ( uv, j ) {
+			} else if ( geoNode.Form === 'Periodic' ) {
 
-							if ( buffers.uvs[ j ] === undefined ) buffers.uvs[ j ] = [];
-							buffers.uvs[ j ].push( faceUVs[ j ][ 0 ] );
-							buffers.uvs[ j ].push( faceUVs[ j ][ 1 ] );
-							buffers.uvs[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 ] );
-							buffers.uvs[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 + 1 ] );
-							buffers.uvs[ j ].push( faceUVs[ j ][ i * 2 ] );
-							buffers.uvs[ j ].push( faceUVs[ j ][ i * 2 + 1 ] );
+				startKnot = degree;
+				endKnot = knots.length - 1 - startKnot;
 
-						} );
+				for ( let i = 0; i < degree; ++ i ) {
 
-					}
+					controlPoints.push( controlPoints[ i ] );
 
 				}
 
-			},
-			addMorphTargets: function ( parentGeo, parentGeoNode, morphTargets, preTransform ) {
+			}
+
+			const curve = new THREE.NURBSCurve( degree, knots, controlPoints, startKnot, endKnot );
+			const vertices = curve.getPoints( controlPoints.length * 7 );
+			const positions = new Float32Array( vertices.length * 3 );
+			vertices.forEach( function ( vertex, i ) {
 
-				if ( morphTargets.length === 0 ) return;
-				parentGeo.morphTargetsRelative = true;
-				parentGeo.morphAttributes.position = []; // parentGeo.morphAttributes.normal = []; // not implemented
+				vertex.toArray( positions, i * 3 );
 
-				var scope = this;
-				morphTargets.forEach( function ( morphTarget ) {
+			} );
+			const geometry = new THREE.BufferGeometry();
+			geometry.setAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
+			return geometry;
 
-					morphTarget.rawTargets.forEach( function ( rawTarget ) {
+		}
 
-						var morphGeoNode = fbxTree.Objects.Geometry[ rawTarget.geoID ];
+	} // parse animation data from FBXTree
 
-						if ( morphGeoNode !== undefined ) {
 
-							scope.genMorphGeometry( parentGeo, parentGeoNode, morphGeoNode, preTransform, rawTarget.name );
+	class AnimationParser {
 
-						}
+		// take raw animation clips and turn them into three.js animation clips
+		parse() {
 
-					} );
+			const animationClips = [];
+			const rawClips = this.parseClips();
 
-				} );
+			if ( rawClips !== undefined ) {
+
+				for ( const key in rawClips ) {
 
-			},
-			// a morph geometry node is similar to a standard	node, and the node is also contained
-			// in FBXTree.Objects.Geometry, however it can only have attributes for position, normal
-			// and a special attribute Index defining which vertices of the original geometry are affected
-			// Normal and position attributes only have data for the vertices that are affected by the morph
-			genMorphGeometry: function ( parentGeo, parentGeoNode, morphGeoNode, preTransform, name ) {
+					const rawClip = rawClips[ key ];
+					const clip = this.addClip( rawClip );
+					animationClips.push( clip );
 
-				var vertexIndices = parentGeoNode.PolygonVertexIndex !== undefined ? parentGeoNode.PolygonVertexIndex.a : [];
-				var morphPositionsSparse = morphGeoNode.Vertices !== undefined ? morphGeoNode.Vertices.a : [];
-				var indices = morphGeoNode.Indexes !== undefined ? morphGeoNode.Indexes.a : [];
-				var length = parentGeo.attributes.position.count * 3;
-				var morphPositions = new Float32Array( length );
+				}
 
-				for ( var i = 0; i < indices.length; i ++ ) {
+			}
 
-					var morphIndex = indices[ i ] * 3;
-					morphPositions[ morphIndex ] = morphPositionsSparse[ i * 3 ];
-					morphPositions[ morphIndex + 1 ] = morphPositionsSparse[ i * 3 + 1 ];
-					morphPositions[ morphIndex + 2 ] = morphPositionsSparse[ i * 3 + 2 ];
+			return animationClips;
 
-				} // TODO: add morph normal support
+		}
 
+		parseClips() {
 
-				var morphGeoInfo = {
-					vertexIndices: vertexIndices,
-					vertexPositions: morphPositions
-				};
-				var morphBuffers = this.genBuffers( morphGeoInfo );
-				var positionAttribute = new THREE.Float32BufferAttribute( morphBuffers.vertex, 3 );
-				positionAttribute.name = name || morphGeoNode.attrName;
-				positionAttribute.applyMatrix4( preTransform );
-				parentGeo.morphAttributes.position.push( positionAttribute );
+			// since the actual transformation data is stored in FBXTree.Objects.AnimationCurve,
+			// if this is undefined we can safely assume there are no animations
+			if ( fbxTree.Objects.AnimationCurve === undefined ) return undefined;
+			const curveNodesMap = this.parseAnimationCurveNodes();
+			this.parseAnimationCurves( curveNodesMap );
+			const layersMap = this.parseAnimationLayers( curveNodesMap );
+			const rawClips = this.parseAnimStacks( layersMap );
+			return rawClips;
 
-			},
-			// Parse normal from FBXTree.Objects.Geometry.LayerElementNormal if it exists
-			parseNormals: function ( NormalNode ) {
+		} // parse nodes in FBXTree.Objects.AnimationCurveNode
+		// each AnimationCurveNode holds data for an animation transform for a model (e.g. left arm rotation )
+		// and is referenced by an AnimationLayer
 
-				var mappingType = NormalNode.MappingInformationType;
-				var referenceType = NormalNode.ReferenceInformationType;
-				var buffer = NormalNode.Normals.a;
-				var indexBuffer = [];
 
-				if ( referenceType === 'IndexToDirect' ) {
+		parseAnimationCurveNodes() {
 
-					if ( 'NormalIndex' in NormalNode ) {
+			const rawCurveNodes = fbxTree.Objects.AnimationCurveNode;
+			const curveNodesMap = new Map();
 
-						indexBuffer = NormalNode.NormalIndex.a;
+			for ( const nodeID in rawCurveNodes ) {
 
-					} else if ( 'NormalsIndex' in NormalNode ) {
+				const rawCurveNode = rawCurveNodes[ nodeID ];
 
-						indexBuffer = NormalNode.NormalsIndex.a;
+				if ( rawCurveNode.attrName.match( /S|R|T|DeformPercent/ ) !== null ) {
 
-					}
+					const curveNode = {
+						id: rawCurveNode.id,
+						attr: rawCurveNode.attrName,
+						curves: {}
+					};
+					curveNodesMap.set( curveNode.id, curveNode );
 
 				}
 
-				return {
-					dataSize: 3,
-					buffer: buffer,
-					indices: indexBuffer,
-					mappingType: mappingType,
-					referenceType: referenceType
-				};
+			}
 
-			},
-			// Parse UVs from FBXTree.Objects.Geometry.LayerElementUV if it exists
-			parseUVs: function ( UVNode ) {
+			return curveNodesMap;
 
-				var mappingType = UVNode.MappingInformationType;
-				var referenceType = UVNode.ReferenceInformationType;
-				var buffer = UVNode.UV.a;
-				var indexBuffer = [];
+		} // parse nodes in FBXTree.Objects.AnimationCurve and connect them up to
+		// previously parsed AnimationCurveNodes. Each AnimationCurve holds data for a single animated
+		// axis ( e.g. times and values of x rotation)
 
-				if ( referenceType === 'IndexToDirect' ) {
 
-					indexBuffer = UVNode.UVIndex.a;
+		parseAnimationCurves( curveNodesMap ) {
 
-				}
+			const rawCurves = fbxTree.Objects.AnimationCurve; // TODO: Many values are identical up to roundoff error, but won't be optimised
+			// e.g. position times: [0, 0.4, 0. 8]
+			// position values: [7.23538335023477e-7, 93.67518615722656, -0.9982695579528809, 7.23538335023477e-7, 93.67518615722656, -0.9982695579528809, 7.235384487103147e-7, 93.67520904541016, -0.9982695579528809]
+			// clearly, this should be optimised to
+			// times: [0], positions [7.23538335023477e-7, 93.67518615722656, -0.9982695579528809]
+			// this shows up in nearly every FBX file, and generally time array is length > 100
 
-				return {
-					dataSize: 2,
-					buffer: buffer,
-					indices: indexBuffer,
-					mappingType: mappingType,
-					referenceType: referenceType
+			for ( const nodeID in rawCurves ) {
+
+				const animationCurve = {
+					id: rawCurves[ nodeID ].id,
+					times: rawCurves[ nodeID ].KeyTime.a.map( convertFBXTimeToSeconds ),
+					values: rawCurves[ nodeID ].KeyValueFloat.a
 				};
+				const relationships = connections.get( animationCurve.id );
 
-			},
-			// Parse Vertex Colors from FBXTree.Objects.Geometry.LayerElementColor if it exists
-			parseVertexColors: function ( ColorNode ) {
+				if ( relationships !== undefined ) {
 
-				var mappingType = ColorNode.MappingInformationType;
-				var referenceType = ColorNode.ReferenceInformationType;
-				var buffer = ColorNode.Colors.a;
-				var indexBuffer = [];
+					const animationCurveID = relationships.parents[ 0 ].ID;
+					const animationCurveRelationship = relationships.parents[ 0 ].relationship;
 
-				if ( referenceType === 'IndexToDirect' ) {
+					if ( animationCurveRelationship.match( /X/ ) ) {
 
-					indexBuffer = ColorNode.ColorIndex.a;
+						curveNodesMap.get( animationCurveID ).curves[ 'x' ] = animationCurve;
 
-				}
+					} else if ( animationCurveRelationship.match( /Y/ ) ) {
 
-				return {
-					dataSize: 4,
-					buffer: buffer,
-					indices: indexBuffer,
-					mappingType: mappingType,
-					referenceType: referenceType
-				};
+						curveNodesMap.get( animationCurveID ).curves[ 'y' ] = animationCurve;
 
-			},
-			// Parse mapping and material data in FBXTree.Objects.Geometry.LayerElementMaterial if it exists
-			parseMaterialIndices: function ( MaterialNode ) {
+					} else if ( animationCurveRelationship.match( /Z/ ) ) {
 
-				var mappingType = MaterialNode.MappingInformationType;
-				var referenceType = MaterialNode.ReferenceInformationType;
+						curveNodesMap.get( animationCurveID ).curves[ 'z' ] = animationCurve;
 
-				if ( mappingType === 'NoMappingInformation' ) {
+					} else if ( animationCurveRelationship.match( /d|DeformPercent/ ) && curveNodesMap.has( animationCurveID ) ) {
 
-					return {
-						dataSize: 1,
-						buffer: [ 0 ],
-						indices: [ 0 ],
-						mappingType: 'AllSame',
-						referenceType: referenceType
-					};
+						curveNodesMap.get( animationCurveID ).curves[ 'morph' ] = animationCurve;
+
+					}
 
 				}
 
-				var materialIndexBuffer = MaterialNode.Materials.a; // Since materials are stored as indices, there's a bit of a mismatch between FBX and what
-				// we expect.So we create an intermediate buffer that points to the index in the buffer,
-				// for conforming with the other functions we've written for other data.
+			}
 
-				var materialIndices = [];
+		} // parse nodes in FBXTree.Objects.AnimationLayer. Each layers holds references
+		// to various AnimationCurveNodes and is referenced by an AnimationStack node
+		// note: theoretically a stack can have multiple layers, however in practice there always seems to be one per stack
 
-				for ( var i = 0; i < materialIndexBuffer.length; ++ i ) {
 
-					materialIndices.push( i );
+		parseAnimationLayers( curveNodesMap ) {
 
-				}
+			const rawLayers = fbxTree.Objects.AnimationLayer;
+			const layersMap = new Map();
 
-				return {
-					dataSize: 1,
-					buffer: materialIndexBuffer,
-					indices: materialIndices,
-					mappingType: mappingType,
-					referenceType: referenceType
-				};
+			for ( const nodeID in rawLayers ) {
 
-			},
-			// Generate a NurbGeometry from a node in FBXTree.Objects.Geometry
-			parseNurbsGeometry: function ( geoNode ) {
+				const layerCurveNodes = [];
+				const connection = connections.get( parseInt( nodeID ) );
 
-				if ( THREE.NURBSCurve === undefined ) {
+				if ( connection !== undefined ) {
 
-					console.error( 'THREE.FBXLoader: The loader relies on THREE.NURBSCurve for any nurbs present in the model. Nurbs will show up as empty geometry.' );
-					return new THREE.BufferGeometry();
+					// all the animationCurveNodes used in the layer
+					const children = connection.children;
+					children.forEach( function ( child, i ) {
 
-				}
+						if ( curveNodesMap.has( child.ID ) ) {
 
-				var order = parseInt( geoNode.Order );
+							const curveNode = curveNodesMap.get( child.ID ); // check that the curves are defined for at least one axis, otherwise ignore the curveNode
 
-				if ( isNaN( order ) ) {
+							if ( curveNode.curves.x !== undefined || curveNode.curves.y !== undefined || curveNode.curves.z !== undefined ) {
 
-					console.error( 'THREE.FBXLoader: Invalid Order %s given for geometry ID: %s', geoNode.Order, geoNode.id );
-					return new THREE.BufferGeometry();
+								if ( layerCurveNodes[ i ] === undefined ) {
 
-				}
+									const modelID = connections.get( child.ID ).parents.filter( function ( parent ) {
 
-				var degree = order - 1;
-				var knots = geoNode.KnotVector.a;
-				var controlPoints = [];
-				var pointsValues = geoNode.Points.a;
+										return parent.relationship !== undefined;
 
-				for ( var i = 0, l = pointsValues.length; i < l; i += 4 ) {
+									} )[ 0 ].ID;
 
-					controlPoints.push( new THREE.Vector4().fromArray( pointsValues, i ) );
+									if ( modelID !== undefined ) {
 
-				}
+										const rawModel = fbxTree.Objects.Model[ modelID.toString() ];
 
-				var startKnot, endKnot;
+										if ( rawModel === undefined ) {
 
-				if ( geoNode.Form === 'Closed' ) {
+											console.warn( 'THREE.FBXLoader: Encountered a unused curve.', child );
+											return;
 
-					controlPoints.push( controlPoints[ 0 ] );
+										}
+
+										const node = {
+											modelName: rawModel.attrName ? THREE.PropertyBinding.sanitizeNodeName( rawModel.attrName ) : '',
+											ID: rawModel.id,
+											initialPosition: [ 0, 0, 0 ],
+											initialRotation: [ 0, 0, 0 ],
+											initialScale: [ 1, 1, 1 ]
+										};
+										sceneGraph.traverse( function ( child ) {
 
-				} else if ( geoNode.Form === 'Periodic' ) {
+											if ( child.ID === rawModel.id ) {
 
-					startKnot = degree;
-					endKnot = knots.length - 1 - startKnot;
+												node.transform = child.matrix;
+												if ( child.userData.transformData ) node.eulerOrder = child.userData.transformData.eulerOrder;
 
-					for ( var i = 0; i < degree; ++ i ) {
+											}
 
-						controlPoints.push( controlPoints[ i ] );
+										} );
+										if ( ! node.transform ) node.transform = new THREE.Matrix4(); // if the animated model is pre rotated, we'll have to apply the pre rotations to every
+										// animation value as well
 
-					}
+										if ( 'PreRotation' in rawModel ) node.preRotation = rawModel.PreRotation.value;
+										if ( 'PostRotation' in rawModel ) node.postRotation = rawModel.PostRotation.value;
+										layerCurveNodes[ i ] = node;
 
-				}
+									}
 
-				var curve = new THREE.NURBSCurve( degree, knots, controlPoints, startKnot, endKnot );
-				var vertices = curve.getPoints( controlPoints.length * 7 );
-				var positions = new Float32Array( vertices.length * 3 );
-				vertices.forEach( function ( vertex, i ) {
+								}
 
-					vertex.toArray( positions, i * 3 );
+								if ( layerCurveNodes[ i ] ) layerCurveNodes[ i ][ curveNode.attr ] = curveNode;
 
-				} );
-				var geometry = new THREE.BufferGeometry();
-				geometry.setAttribute( 'position', new THREE.BufferAttribute( positions, 3 ) );
-				return geometry;
+							} else if ( curveNode.curves.morph !== undefined ) {
 
-			}
-		}; // parse animation data from FBXTree
+								if ( layerCurveNodes[ i ] === undefined ) {
 
-		function AnimationParser() {}
+									const deformerID = connections.get( child.ID ).parents.filter( function ( parent ) {
 
-		AnimationParser.prototype = {
-			constructor: AnimationParser,
-			// take raw animation clips and turn them into three.js animation clips
-			parse: function () {
+										return parent.relationship !== undefined;
 
-				var animationClips = [];
-				var rawClips = this.parseClips();
+									} )[ 0 ].ID;
+									const morpherID = connections.get( deformerID ).parents[ 0 ].ID;
+									const geoID = connections.get( morpherID ).parents[ 0 ].ID; // assuming geometry is not used in more than one model
 
-				if ( rawClips !== undefined ) {
+									const modelID = connections.get( geoID ).parents[ 0 ].ID;
+									const rawModel = fbxTree.Objects.Model[ modelID ];
+									const node = {
+										modelName: rawModel.attrName ? THREE.PropertyBinding.sanitizeNodeName( rawModel.attrName ) : '',
+										morphName: fbxTree.Objects.Deformer[ deformerID ].attrName
+									};
+									layerCurveNodes[ i ] = node;
 
-					for ( var key in rawClips ) {
+								}
 
-						var rawClip = rawClips[ key ];
-						var clip = this.addClip( rawClip );
-						animationClips.push( clip );
+								layerCurveNodes[ i ][ curveNode.attr ] = curveNode;
 
-					}
+							}
 
-				}
+						}
+
+					} );
+					layersMap.set( parseInt( nodeID ), layerCurveNodes );
 
-				return animationClips;
+				}
 
-			},
-			parseClips: function () {
+			}
 
-				// since the actual transformation data is stored in FBXTree.Objects.AnimationCurve,
-				// if this is undefined we can safely assume there are no animations
-				if ( fbxTree.Objects.AnimationCurve === undefined ) return undefined;
-				var curveNodesMap = this.parseAnimationCurveNodes();
-				this.parseAnimationCurves( curveNodesMap );
-				var layersMap = this.parseAnimationLayers( curveNodesMap );
-				var rawClips = this.parseAnimStacks( layersMap );
-				return rawClips;
+			return layersMap;
 
-			},
-			// parse nodes in FBXTree.Objects.AnimationCurveNode
-			// each AnimationCurveNode holds data for an animation transform for a model (e.g. left arm rotation )
-			// and is referenced by an AnimationLayer
-			parseAnimationCurveNodes: function () {
+		} // parse nodes in FBXTree.Objects.AnimationStack. These are the top level node in the animation
+		// hierarchy. Each Stack node will be used to create a THREE.AnimationClip
 
-				var rawCurveNodes = fbxTree.Objects.AnimationCurveNode;
-				var curveNodesMap = new Map();
 
-				for ( var nodeID in rawCurveNodes ) {
+		parseAnimStacks( layersMap ) {
 
-					var rawCurveNode = rawCurveNodes[ nodeID ];
+			const rawStacks = fbxTree.Objects.AnimationStack; // connect the stacks (clips) up to the layers
 
-					if ( rawCurveNode.attrName.match( /S|R|T|DeformPercent/ ) !== null ) {
+			const rawClips = {};
 
-						var curveNode = {
-							id: rawCurveNode.id,
-							attr: rawCurveNode.attrName,
-							curves: {}
-						};
-						curveNodesMap.set( curveNode.id, curveNode );
+			for ( const nodeID in rawStacks ) {
 
-					}
+				const children = connections.get( parseInt( nodeID ) ).children;
 
-				}
+				if ( children.length > 1 ) {
 
-				return curveNodesMap;
+					// it seems like stacks will always be associated with a single layer. But just in case there are files
+					// where there are multiple layers per stack, we'll display a warning
+					console.warn( 'THREE.FBXLoader: Encountered an animation stack with multiple layers, this is currently not supported. Ignoring subsequent layers.' );
 
-			},
-			// parse nodes in FBXTree.Objects.AnimationCurve and connect them up to
-			// previously parsed AnimationCurveNodes. Each AnimationCurve holds data for a single animated
-			// axis ( e.g. times and values of x rotation)
-			parseAnimationCurves: function ( curveNodesMap ) {
+				}
 
-				var rawCurves = fbxTree.Objects.AnimationCurve; // TODO: Many values are identical up to roundoff error, but won't be optimised
-				// e.g. position times: [0, 0.4, 0. 8]
-				// position values: [7.23538335023477e-7, 93.67518615722656, -0.9982695579528809, 7.23538335023477e-7, 93.67518615722656, -0.9982695579528809, 7.235384487103147e-7, 93.67520904541016, -0.9982695579528809]
-				// clearly, this should be optimised to
-				// times: [0], positions [7.23538335023477e-7, 93.67518615722656, -0.9982695579528809]
-				// this shows up in nearly every FBX file, and generally time array is length > 100
+				const layer = layersMap.get( children[ 0 ].ID );
+				rawClips[ nodeID ] = {
+					name: rawStacks[ nodeID ].attrName,
+					layer: layer
+				};
 
-				for ( var nodeID in rawCurves ) {
+			}
 
-					var animationCurve = {
-						id: rawCurves[ nodeID ].id,
-						times: rawCurves[ nodeID ].KeyTime.a.map( convertFBXTimeToSeconds ),
-						values: rawCurves[ nodeID ].KeyValueFloat.a
-					};
-					var relationships = connections.get( animationCurve.id );
+			return rawClips;
 
-					if ( relationships !== undefined ) {
+		}
 
-						var animationCurveID = relationships.parents[ 0 ].ID;
-						var animationCurveRelationship = relationships.parents[ 0 ].relationship;
+		addClip( rawClip ) {
 
-						if ( animationCurveRelationship.match( /X/ ) ) {
+			let tracks = [];
+			const scope = this;
+			rawClip.layer.forEach( function ( rawTracks ) {
 
-							curveNodesMap.get( animationCurveID ).curves[ 'x' ] = animationCurve;
+				tracks = tracks.concat( scope.generateTracks( rawTracks ) );
 
-						} else if ( animationCurveRelationship.match( /Y/ ) ) {
+			} );
+			return new THREE.AnimationClip( rawClip.name, - 1, tracks );
 
-							curveNodesMap.get( animationCurveID ).curves[ 'y' ] = animationCurve;
+		}
 
-						} else if ( animationCurveRelationship.match( /Z/ ) ) {
+		generateTracks( rawTracks ) {
 
-							curveNodesMap.get( animationCurveID ).curves[ 'z' ] = animationCurve;
+			const tracks = [];
+			let initialPosition = new THREE.Vector3();
+			let initialRotation = new THREE.Quaternion();
+			let initialScale = new THREE.Vector3();
+			if ( rawTracks.transform ) rawTracks.transform.decompose( initialPosition, initialRotation, initialScale );
+			initialPosition = initialPosition.toArray();
+			initialRotation = new THREE.Euler().setFromQuaternion( initialRotation, rawTracks.eulerOrder ).toArray();
+			initialScale = initialScale.toArray();
 
-						} else if ( animationCurveRelationship.match( /d|DeformPercent/ ) && curveNodesMap.has( animationCurveID ) ) {
+			if ( rawTracks.T !== undefined && Object.keys( rawTracks.T.curves ).length > 0 ) {
 
-							curveNodesMap.get( animationCurveID ).curves[ 'morph' ] = animationCurve;
+				const positionTrack = this.generateVectorTrack( rawTracks.modelName, rawTracks.T.curves, initialPosition, 'position' );
+				if ( positionTrack !== undefined ) tracks.push( positionTrack );
 
-						}
+			}
 
-					}
+			if ( rawTracks.R !== undefined && Object.keys( rawTracks.R.curves ).length > 0 ) {
 
-				}
+				const rotationTrack = this.generateRotationTrack( rawTracks.modelName, rawTracks.R.curves, initialRotation, rawTracks.preRotation, rawTracks.postRotation, rawTracks.eulerOrder );
+				if ( rotationTrack !== undefined ) tracks.push( rotationTrack );
 
-			},
-			// parse nodes in FBXTree.Objects.AnimationLayer. Each layers holds references
-			// to various AnimationCurveNodes and is referenced by an AnimationStack node
-			// note: theoretically a stack can have multiple layers, however in practice there always seems to be one per stack
-			parseAnimationLayers: function ( curveNodesMap ) {
+			}
 
-				var rawLayers = fbxTree.Objects.AnimationLayer;
-				var layersMap = new Map();
+			if ( rawTracks.S !== undefined && Object.keys( rawTracks.S.curves ).length > 0 ) {
 
-				for ( var nodeID in rawLayers ) {
+				const scaleTrack = this.generateVectorTrack( rawTracks.modelName, rawTracks.S.curves, initialScale, 'scale' );
+				if ( scaleTrack !== undefined ) tracks.push( scaleTrack );
 
-					var layerCurveNodes = [];
-					var connection = connections.get( parseInt( nodeID ) );
+			}
 
-					if ( connection !== undefined ) {
+			if ( rawTracks.DeformPercent !== undefined ) {
 
-						// all the animationCurveNodes used in the layer
-						var children = connection.children;
-						children.forEach( function ( child, i ) {
+				const morphTrack = this.generateMorphTrack( rawTracks );
+				if ( morphTrack !== undefined ) tracks.push( morphTrack );
 
-							if ( curveNodesMap.has( child.ID ) ) {
+			}
 
-								var curveNode = curveNodesMap.get( child.ID ); // check that the curves are defined for at least one axis, otherwise ignore the curveNode
+			return tracks;
 
-								if ( curveNode.curves.x !== undefined || curveNode.curves.y !== undefined || curveNode.curves.z !== undefined ) {
+		}
 
-									if ( layerCurveNodes[ i ] === undefined ) {
+		generateVectorTrack( modelName, curves, initialValue, type ) {
 
-										var modelID = connections.get( child.ID ).parents.filter( function ( parent ) {
+			const times = this.getTimesForAllAxes( curves );
+			const values = this.getKeyframeTrackValues( times, curves, initialValue );
+			return new THREE.VectorKeyframeTrack( modelName + '.' + type, times, values );
 
-											return parent.relationship !== undefined;
+		}
 
-										} )[ 0 ].ID;
+		generateRotationTrack( modelName, curves, initialValue, preRotation, postRotation, eulerOrder ) {
 
-										if ( modelID !== undefined ) {
+			if ( curves.x !== undefined ) {
 
-											var rawModel = fbxTree.Objects.Model[ modelID.toString() ];
+				this.interpolateRotations( curves.x );
+				curves.x.values = curves.x.values.map( THREE.MathUtils.degToRad );
 
-											if ( rawModel === undefined ) {
+			}
 
-												console.warn( 'THREE.FBXLoader: Encountered a unused curve.', child );
-												return;
+			if ( curves.y !== undefined ) {
 
-											}
+				this.interpolateRotations( curves.y );
+				curves.y.values = curves.y.values.map( THREE.MathUtils.degToRad );
 
-											var node = {
-												modelName: rawModel.attrName ? THREE.PropertyBinding.sanitizeNodeName( rawModel.attrName ) : '',
-												ID: rawModel.id,
-												initialPosition: [ 0, 0, 0 ],
-												initialRotation: [ 0, 0, 0 ],
-												initialScale: [ 1, 1, 1 ]
-											};
-											sceneGraph.traverse( function ( child ) {
+			}
 
-												if ( child.ID === rawModel.id ) {
+			if ( curves.z !== undefined ) {
 
-													node.transform = child.matrix;
-													if ( child.userData.transformData ) node.eulerOrder = child.userData.transformData.eulerOrder;
+				this.interpolateRotations( curves.z );
+				curves.z.values = curves.z.values.map( THREE.MathUtils.degToRad );
 
-												}
+			}
 
-											} );
-											if ( ! node.transform ) node.transform = new THREE.Matrix4(); // if the animated model is pre rotated, we'll have to apply the pre rotations to every
-											// animation value as well
+			const times = this.getTimesForAllAxes( curves );
+			const values = this.getKeyframeTrackValues( times, curves, initialValue );
 
-											if ( 'PreRotation' in rawModel ) node.preRotation = rawModel.PreRotation.value;
-											if ( 'PostRotation' in rawModel ) node.postRotation = rawModel.PostRotation.value;
-											layerCurveNodes[ i ] = node;
+			if ( preRotation !== undefined ) {
 
-										}
+				preRotation = preRotation.map( THREE.MathUtils.degToRad );
+				preRotation.push( eulerOrder );
+				preRotation = new THREE.Euler().fromArray( preRotation );
+				preRotation = new THREE.Quaternion().setFromEuler( preRotation );
 
-									}
+			}
 
-									if ( layerCurveNodes[ i ] ) layerCurveNodes[ i ][ curveNode.attr ] = curveNode;
+			if ( postRotation !== undefined ) {
 
-								} else if ( curveNode.curves.morph !== undefined ) {
+				postRotation = postRotation.map( THREE.MathUtils.degToRad );
+				postRotation.push( eulerOrder );
+				postRotation = new THREE.Euler().fromArray( postRotation );
+				postRotation = new THREE.Quaternion().setFromEuler( postRotation ).invert();
 
-									if ( layerCurveNodes[ i ] === undefined ) {
+			}
 
-										var deformerID = connections.get( child.ID ).parents.filter( function ( parent ) {
+			const quaternion = new THREE.Quaternion();
+			const euler = new THREE.Euler();
+			const quaternionValues = [];
 
-											return parent.relationship !== undefined;
+			for ( let i = 0; i < values.length; i += 3 ) {
 
-										} )[ 0 ].ID;
-										var morpherID = connections.get( deformerID ).parents[ 0 ].ID;
-										var geoID = connections.get( morpherID ).parents[ 0 ].ID; // assuming geometry is not used in more than one model
+				euler.set( values[ i ], values[ i + 1 ], values[ i + 2 ], eulerOrder );
+				quaternion.setFromEuler( euler );
+				if ( preRotation !== undefined ) quaternion.premultiply( preRotation );
+				if ( postRotation !== undefined ) quaternion.multiply( postRotation );
+				quaternion.toArray( quaternionValues, i / 3 * 4 );
 
-										var modelID = connections.get( geoID ).parents[ 0 ].ID;
-										var rawModel = fbxTree.Objects.Model[ modelID ];
-										var node = {
-											modelName: rawModel.attrName ? THREE.PropertyBinding.sanitizeNodeName( rawModel.attrName ) : '',
-											morphName: fbxTree.Objects.Deformer[ deformerID ].attrName
-										};
-										layerCurveNodes[ i ] = node;
+			}
 
-									}
+			return new THREE.QuaternionKeyframeTrack( modelName + '.quaternion', times, quaternionValues );
 
-									layerCurveNodes[ i ][ curveNode.attr ] = curveNode;
+		}
 
-								}
+		generateMorphTrack( rawTracks ) {
 
-							}
+			const curves = rawTracks.DeformPercent.curves.morph;
+			const values = curves.values.map( function ( val ) {
 
-						} );
-						layersMap.set( parseInt( nodeID ), layerCurveNodes );
+				return val / 100;
 
-					}
+			} );
+			const morphNum = sceneGraph.getObjectByName( rawTracks.modelName ).morphTargetDictionary[ rawTracks.morphName ];
+			return new THREE.NumberKeyframeTrack( rawTracks.modelName + '.morphTargetInfluences[' + morphNum + ']', curves.times, values );
 
-				}
+		} // For all animated objects, times are defined separately for each axis
+		// Here we'll combine the times into one sorted array without duplicates
 
-				return layersMap;
 
-			},
-			// parse nodes in FBXTree.Objects.AnimationStack. These are the top level node in the animation
-			// hierarchy. Each Stack node will be used to create a THREE.AnimationClip
-			parseAnimStacks: function ( layersMap ) {
+		getTimesForAllAxes( curves ) {
 
-				var rawStacks = fbxTree.Objects.AnimationStack; // connect the stacks (clips) up to the layers
+			let times = []; // first join together the times for each axis, if defined
 
-				var rawClips = {};
+			if ( curves.x !== undefined ) times = times.concat( curves.x.times );
+			if ( curves.y !== undefined ) times = times.concat( curves.y.times );
+			if ( curves.z !== undefined ) times = times.concat( curves.z.times ); // then sort them
 
-				for ( var nodeID in rawStacks ) {
+			times = times.sort( function ( a, b ) {
 
-					var children = connections.get( parseInt( nodeID ) ).children;
+				return a - b;
 
-					if ( children.length > 1 ) {
+			} ); // and remove duplicates
 
-						// it seems like stacks will always be associated with a single layer. But just in case there are files
-						// where there are multiple layers per stack, we'll display a warning
-						console.warn( 'THREE.FBXLoader: Encountered an animation stack with multiple layers, this is currently not supported. Ignoring subsequent layers.' );
+			if ( times.length > 1 ) {
 
-					}
+				let targetIndex = 1;
+				let lastValue = times[ 0 ];
 
-					var layer = layersMap.get( children[ 0 ].ID );
-					rawClips[ nodeID ] = {
-						name: rawStacks[ nodeID ].attrName,
-						layer: layer
-					};
+				for ( let i = 1; i < times.length; i ++ ) {
 
-				}
+					const currentValue = times[ i ];
 
-				return rawClips;
+					if ( currentValue !== lastValue ) {
 
-			},
-			addClip: function ( rawClip ) {
+						times[ targetIndex ] = currentValue;
+						lastValue = currentValue;
+						targetIndex ++;
 
-				var tracks = [];
-				var scope = this;
-				rawClip.layer.forEach( function ( rawTracks ) {
+					}
 
-					tracks = tracks.concat( scope.generateTracks( rawTracks ) );
+				}
 
-				} );
-				return new THREE.AnimationClip( rawClip.name, - 1, tracks );
+				times = times.slice( 0, targetIndex );
 
-			},
-			generateTracks: function ( rawTracks ) {
+			}
 
-				var tracks = [];
-				var initialPosition = new THREE.Vector3();
-				var initialRotation = new THREE.Quaternion();
-				var initialScale = new THREE.Vector3();
-				if ( rawTracks.transform ) rawTracks.transform.decompose( initialPosition, initialRotation, initialScale );
-				initialPosition = initialPosition.toArray();
-				initialRotation = new THREE.Euler().setFromQuaternion( initialRotation, rawTracks.eulerOrder ).toArray();
-				initialScale = initialScale.toArray();
+			return times;
 
-				if ( rawTracks.T !== undefined && Object.keys( rawTracks.T.curves ).length > 0 ) {
+		}
 
-					var positionTrack = this.generateVectorTrack( rawTracks.modelName, rawTracks.T.curves, initialPosition, 'position' );
-					if ( positionTrack !== undefined ) tracks.push( positionTrack );
+		getKeyframeTrackValues( times, curves, initialValue ) {
 
-				}
+			const prevValue = initialValue;
+			const values = [];
+			let xIndex = - 1;
+			let yIndex = - 1;
+			let zIndex = - 1;
+			times.forEach( function ( time ) {
 
-				if ( rawTracks.R !== undefined && Object.keys( rawTracks.R.curves ).length > 0 ) {
+				if ( curves.x ) xIndex = curves.x.times.indexOf( time );
+				if ( curves.y ) yIndex = curves.y.times.indexOf( time );
+				if ( curves.z ) zIndex = curves.z.times.indexOf( time ); // if there is an x value defined for this frame, use that
 
-					var rotationTrack = this.generateRotationTrack( rawTracks.modelName, rawTracks.R.curves, initialRotation, rawTracks.preRotation, rawTracks.postRotation, rawTracks.eulerOrder );
-					if ( rotationTrack !== undefined ) tracks.push( rotationTrack );
+				if ( xIndex !== - 1 ) {
 
-				}
+					const xValue = curves.x.values[ xIndex ];
+					values.push( xValue );
+					prevValue[ 0 ] = xValue;
 
-				if ( rawTracks.S !== undefined && Object.keys( rawTracks.S.curves ).length > 0 ) {
+				} else {
 
-					var scaleTrack = this.generateVectorTrack( rawTracks.modelName, rawTracks.S.curves, initialScale, 'scale' );
-					if ( scaleTrack !== undefined ) tracks.push( scaleTrack );
+					// otherwise use the x value from the previous frame
+					values.push( prevValue[ 0 ] );
 
 				}
 
-				if ( rawTracks.DeformPercent !== undefined ) {
+				if ( yIndex !== - 1 ) {
 
-					var morphTrack = this.generateMorphTrack( rawTracks );
-					if ( morphTrack !== undefined ) tracks.push( morphTrack );
+					const yValue = curves.y.values[ yIndex ];
+					values.push( yValue );
+					prevValue[ 1 ] = yValue;
 
-				}
+				} else {
 
-				return tracks;
+					values.push( prevValue[ 1 ] );
 
-			},
-			generateVectorTrack: function ( modelName, curves, initialValue, type ) {
+				}
 
-				var times = this.getTimesForAllAxes( curves );
-				var values = this.getKeyframeTrackValues( times, curves, initialValue );
-				return new THREE.VectorKeyframeTrack( modelName + '.' + type, times, values );
+				if ( zIndex !== - 1 ) {
 
-			},
-			generateRotationTrack: function ( modelName, curves, initialValue, preRotation, postRotation, eulerOrder ) {
+					const zValue = curves.z.values[ zIndex ];
+					values.push( zValue );
+					prevValue[ 2 ] = zValue;
 
-				if ( curves.x !== undefined ) {
+				} else {
 
-					this.interpolateRotations( curves.x );
-					curves.x.values = curves.x.values.map( THREE.MathUtils.degToRad );
+					values.push( prevValue[ 2 ] );
 
 				}
 
-				if ( curves.y !== undefined ) {
+			} );
+			return values;
 
-					this.interpolateRotations( curves.y );
-					curves.y.values = curves.y.values.map( THREE.MathUtils.degToRad );
+		} // Rotations are defined as THREE.Euler angles which can have values	of any size
+		// These will be converted to quaternions which don't support values greater than
+		// PI, so we'll interpolate large rotations
 
-				}
 
-				if ( curves.z !== undefined ) {
+		interpolateRotations( curve ) {
 
-					this.interpolateRotations( curves.z );
-					curves.z.values = curves.z.values.map( THREE.MathUtils.degToRad );
+			for ( let i = 1; i < curve.values.length; i ++ ) {
 
-				}
+				const initialValue = curve.values[ i - 1 ];
+				const valuesSpan = curve.values[ i ] - initialValue;
+				const absoluteSpan = Math.abs( valuesSpan );
 
-				var times = this.getTimesForAllAxes( curves );
-				var values = this.getKeyframeTrackValues( times, curves, initialValue );
+				if ( absoluteSpan >= 180 ) {
 
-				if ( preRotation !== undefined ) {
+					const numSubIntervals = absoluteSpan / 180;
+					const step = valuesSpan / numSubIntervals;
+					let nextValue = initialValue + step;
+					const initialTime = curve.times[ i - 1 ];
+					const timeSpan = curve.times[ i ] - initialTime;
+					const interval = timeSpan / numSubIntervals;
+					let nextTime = initialTime + interval;
+					const interpolatedTimes = [];
+					const interpolatedValues = [];
 
-					preRotation = preRotation.map( THREE.MathUtils.degToRad );
-					preRotation.push( eulerOrder );
-					preRotation = new THREE.Euler().fromArray( preRotation );
-					preRotation = new THREE.Quaternion().setFromEuler( preRotation );
+					while ( nextTime < curve.times[ i ] ) {
 
-				}
+						interpolatedTimes.push( nextTime );
+						nextTime += interval;
+						interpolatedValues.push( nextValue );
+						nextValue += step;
 
-				if ( postRotation !== undefined ) {
+					}
 
-					postRotation = postRotation.map( THREE.MathUtils.degToRad );
-					postRotation.push( eulerOrder );
-					postRotation = new THREE.Euler().fromArray( postRotation );
-					postRotation = new THREE.Quaternion().setFromEuler( postRotation ).invert();
+					curve.times = inject( curve.times, i, interpolatedTimes );
+					curve.values = inject( curve.values, i, interpolatedValues );
 
 				}
 
-				var quaternion = new THREE.Quaternion();
-				var euler = new THREE.Euler();
-				var quaternionValues = [];
+			}
 
-				for ( var i = 0; i < values.length; i += 3 ) {
+		}
 
-					euler.set( values[ i ], values[ i + 1 ], values[ i + 2 ], eulerOrder );
-					quaternion.setFromEuler( euler );
-					if ( preRotation !== undefined ) quaternion.premultiply( preRotation );
-					if ( postRotation !== undefined ) quaternion.multiply( postRotation );
-					quaternion.toArray( quaternionValues, i / 3 * 4 );
+	} // parse an FBX file in ASCII format
 
-				}
 
-				return new THREE.QuaternionKeyframeTrack( modelName + '.quaternion', times, quaternionValues );
+	class TextParser {
 
-			},
-			generateMorphTrack: function ( rawTracks ) {
+		getPrevNode() {
 
-				var curves = rawTracks.DeformPercent.curves.morph;
-				var values = curves.values.map( function ( val ) {
+			return this.nodeStack[ this.currentIndent - 2 ];
 
-					return val / 100;
+		}
 
-				} );
-				var morphNum = sceneGraph.getObjectByName( rawTracks.modelName ).morphTargetDictionary[ rawTracks.morphName ];
-				return new THREE.NumberKeyframeTrack( rawTracks.modelName + '.morphTargetInfluences[' + morphNum + ']', curves.times, values );
+		getCurrentNode() {
 
-			},
-			// For all animated objects, times are defined separately for each axis
-			// Here we'll combine the times into one sorted array without duplicates
-			getTimesForAllAxes: function ( curves ) {
+			return this.nodeStack[ this.currentIndent - 1 ];
 
-				var times = []; // first join together the times for each axis, if defined
+		}
 
-				if ( curves.x !== undefined ) times = times.concat( curves.x.times );
-				if ( curves.y !== undefined ) times = times.concat( curves.y.times );
-				if ( curves.z !== undefined ) times = times.concat( curves.z.times ); // then sort them
+		getCurrentProp() {
 
-				times = times.sort( function ( a, b ) {
+			return this.currentProp;
 
-					return a - b;
+		}
 
-				} ); // and remove duplicates
+		pushStack( node ) {
 
-				if ( times.length > 1 ) {
+			this.nodeStack.push( node );
+			this.currentIndent += 1;
 
-					var targetIndex = 1;
-					var lastValue = times[ 0 ];
+		}
 
-					for ( var i = 1; i < times.length; i ++ ) {
+		popStack() {
 
-						var currentValue = times[ i ];
+			this.nodeStack.pop();
+			this.currentIndent -= 1;
 
-						if ( currentValue !== lastValue ) {
+		}
 
-							times[ targetIndex ] = currentValue;
-							lastValue = currentValue;
-							targetIndex ++;
+		setCurrentProp( val, name ) {
 
-						}
+			this.currentProp = val;
+			this.currentPropName = name;
 
-					}
+		}
 
-					times = times.slice( 0, targetIndex );
+		parse( text ) {
 
-				}
+			this.currentIndent = 0;
+			this.allNodes = new FBXTree();
+			this.nodeStack = [];
+			this.currentProp = [];
+			this.currentPropName = '';
+			const scope = this;
+			const split = text.split( /[\r\n]+/ );
+			split.forEach( function ( line, i ) {
 
-				return times;
+				const matchComment = line.match( /^[\s\t]*;/ );
+				const matchEmpty = line.match( /^[\s\t]*$/ );
+				if ( matchComment || matchEmpty ) return;
+				const matchBeginning = line.match( '^\\t{' + scope.currentIndent + '}(\\w+):(.*){', '' );
+				const matchProperty = line.match( '^\\t{' + scope.currentIndent + '}(\\w+):[\\s\\t\\r\\n](.*)' );
+				const matchEnd = line.match( '^\\t{' + ( scope.currentIndent - 1 ) + '}}' );
 
-			},
-			getKeyframeTrackValues: function ( times, curves, initialValue ) {
+				if ( matchBeginning ) {
 
-				var prevValue = initialValue;
-				var values = [];
-				var xIndex = - 1;
-				var yIndex = - 1;
-				var zIndex = - 1;
-				times.forEach( function ( time ) {
+					scope.parseNodeBegin( line, matchBeginning );
 
-					if ( curves.x ) xIndex = curves.x.times.indexOf( time );
-					if ( curves.y ) yIndex = curves.y.times.indexOf( time );
-					if ( curves.z ) zIndex = curves.z.times.indexOf( time ); // if there is an x value defined for this frame, use that
+				} else if ( matchProperty ) {
 
-					if ( xIndex !== - 1 ) {
+					scope.parseNodeProperty( line, matchProperty, split[ ++ i ] );
 
-						var xValue = curves.x.values[ xIndex ];
-						values.push( xValue );
-						prevValue[ 0 ] = xValue;
+				} else if ( matchEnd ) {
 
-					} else {
+					scope.popStack();
 
-						// otherwise use the x value from the previous frame
-						values.push( prevValue[ 0 ] );
+				} else if ( line.match( /^[^\s\t}]/ ) ) {
 
-					}
+					// large arrays are split over multiple lines terminated with a ',' character
+					// if this is encountered the line needs to be joined to the previous line
+					scope.parseNodePropertyContinued( line );
 
-					if ( yIndex !== - 1 ) {
+				}
 
-						var yValue = curves.y.values[ yIndex ];
-						values.push( yValue );
-						prevValue[ 1 ] = yValue;
+			} );
+			return this.allNodes;
 
-					} else {
+		}
 
-						values.push( prevValue[ 1 ] );
+		parseNodeBegin( line, property ) {
 
-					}
+			const nodeName = property[ 1 ].trim().replace( /^"/, '' ).replace( /"$/, '' );
+			const nodeAttrs = property[ 2 ].split( ',' ).map( function ( attr ) {
 
-					if ( zIndex !== - 1 ) {
+				return attr.trim().replace( /^"/, '' ).replace( /"$/, '' );
 
-						var zValue = curves.z.values[ zIndex ];
-						values.push( zValue );
-						prevValue[ 2 ] = zValue;
+			} );
+			const node = {
+				name: nodeName
+			};
+			const attrs = this.parseNodeAttr( nodeAttrs );
+			const currentNode = this.getCurrentNode(); // a top node
 
-					} else {
+			if ( this.currentIndent === 0 ) {
 
-						values.push( prevValue[ 2 ] );
+				this.allNodes.add( nodeName, node );
 
-					}
+			} else {
 
-				} );
-				return values;
+				// a subnode
+				// if the subnode already exists, append it
+				if ( nodeName in currentNode ) {
 
-			},
-			// Rotations are defined as THREE.Euler angles which can have values	of any size
-			// These will be converted to quaternions which don't support values greater than
-			// PI, so we'll interpolate large rotations
-			interpolateRotations: function ( curve ) {
+					// special case Pose needs PoseNodes as an array
+					if ( nodeName === 'PoseNode' ) {
 
-				for ( var i = 1; i < curve.values.length; i ++ ) {
+						currentNode.PoseNode.push( node );
 
-					var initialValue = curve.values[ i - 1 ];
-					var valuesSpan = curve.values[ i ] - initialValue;
-					var absoluteSpan = Math.abs( valuesSpan );
+					} else if ( currentNode[ nodeName ].id !== undefined ) {
 
-					if ( absoluteSpan >= 180 ) {
+						currentNode[ nodeName ] = {};
+						currentNode[ nodeName ][ currentNode[ nodeName ].id ] = currentNode[ nodeName ];
 
-						var numSubIntervals = absoluteSpan / 180;
-						var step = valuesSpan / numSubIntervals;
-						var nextValue = initialValue + step;
-						var initialTime = curve.times[ i - 1 ];
-						var timeSpan = curve.times[ i ] - initialTime;
-						var interval = timeSpan / numSubIntervals;
-						var nextTime = initialTime + interval;
-						var interpolatedTimes = [];
-						var interpolatedValues = [];
+					}
 
-						while ( nextTime < curve.times[ i ] ) {
+					if ( attrs.id !== '' ) currentNode[ nodeName ][ attrs.id ] = node;
 
-							interpolatedTimes.push( nextTime );
-							nextTime += interval;
-							interpolatedValues.push( nextValue );
-							nextValue += step;
+				} else if ( typeof attrs.id === 'number' ) {
 
-						}
+					currentNode[ nodeName ] = {};
+					currentNode[ nodeName ][ attrs.id ] = node;
 
-						curve.times = inject( curve.times, i, interpolatedTimes );
-						curve.values = inject( curve.values, i, interpolatedValues );
+				} else if ( nodeName !== 'Properties70' ) {
 
-					}
+					if ( nodeName === 'PoseNode' ) currentNode[ nodeName ] = [ node ]; else currentNode[ nodeName ] = node;
 
 				}
 
 			}
-		}; // parse an FBX file in ASCII format
 
-		function TextParser() {}
+			if ( typeof attrs.id === 'number' ) node.id = attrs.id;
+			if ( attrs.name !== '' ) node.attrName = attrs.name;
+			if ( attrs.type !== '' ) node.attrType = attrs.type;
+			this.pushStack( node );
 
-		TextParser.prototype = {
-			constructor: TextParser,
-			getPrevNode: function () {
+		}
 
-				return this.nodeStack[ this.currentIndent - 2 ];
+		parseNodeAttr( attrs ) {
 
-			},
-			getCurrentNode: function () {
+			let id = attrs[ 0 ];
 
-				return this.nodeStack[ this.currentIndent - 1 ];
+			if ( attrs[ 0 ] !== '' ) {
 
-			},
-			getCurrentProp: function () {
+				id = parseInt( attrs[ 0 ] );
 
-				return this.currentProp;
+				if ( isNaN( id ) ) {
 
-			},
-			pushStack: function ( node ) {
+					id = attrs[ 0 ];
 
-				this.nodeStack.push( node );
-				this.currentIndent += 1;
+				}
 
-			},
-			popStack: function () {
+			}
 
-				this.nodeStack.pop();
-				this.currentIndent -= 1;
+			let name = '',
+				type = '';
 
-			},
-			setCurrentProp: function ( val, name ) {
+			if ( attrs.length > 1 ) {
 
-				this.currentProp = val;
-				this.currentPropName = name;
+				name = attrs[ 1 ].replace( /^(\w+)::/, '' );
+				type = attrs[ 2 ];
 
-			},
-			parse: function ( text ) {
+			}
 
-				this.currentIndent = 0;
-				this.allNodes = new FBXTree();
-				this.nodeStack = [];
-				this.currentProp = [];
-				this.currentPropName = '';
-				var scope = this;
-				var split = text.split( /[\r\n]+/ );
-				split.forEach( function ( line, i ) {
+			return {
+				id: id,
+				name: name,
+				type: type
+			};
 
-					var matchComment = line.match( /^[\s\t]*;/ );
-					var matchEmpty = line.match( /^[\s\t]*$/ );
-					if ( matchComment || matchEmpty ) return;
-					var matchBeginning = line.match( '^\\t{' + scope.currentIndent + '}(\\w+):(.*){', '' );
-					var matchProperty = line.match( '^\\t{' + scope.currentIndent + '}(\\w+):[\\s\\t\\r\\n](.*)' );
-					var matchEnd = line.match( '^\\t{' + ( scope.currentIndent - 1 ) + '}}' );
+		}
 
-					if ( matchBeginning ) {
+		parseNodeProperty( line, property, contentLine ) {
 
-						scope.parseNodeBegin( line, matchBeginning );
+			let propName = property[ 1 ].replace( /^"/, '' ).replace( /"$/, '' ).trim();
+			let propValue = property[ 2 ].replace( /^"/, '' ).replace( /"$/, '' ).trim(); // for special case: base64 image data follows "Content: ," line
+			//	Content: ,
+			//	 "/9j/4RDaRXhpZgAATU0A..."
 
-					} else if ( matchProperty ) {
+			if ( propName === 'Content' && propValue === ',' ) {
 
-						scope.parseNodeProperty( line, matchProperty, split[ ++ i ] );
+				propValue = contentLine.replace( /"/g, '' ).replace( /,$/, '' ).trim();
 
-					} else if ( matchEnd ) {
+			}
 
-						scope.popStack();
+			const currentNode = this.getCurrentNode();
+			const parentName = currentNode.name;
 
-					} else if ( line.match( /^[^\s\t}]/ ) ) {
+			if ( parentName === 'Properties70' ) {
 
-						// large arrays are split over multiple lines terminated with a ',' character
-						// if this is encountered the line needs to be joined to the previous line
-						scope.parseNodePropertyContinued( line );
+				this.parseNodeSpecialProperty( line, propName, propValue );
+				return;
 
-					}
+			} // Connections
 
-				} );
-				return this.allNodes;
 
-			},
-			parseNodeBegin: function ( line, property ) {
+			if ( propName === 'C' ) {
 
-				var nodeName = property[ 1 ].trim().replace( /^"/, '' ).replace( /"$/, '' );
-				var nodeAttrs = property[ 2 ].split( ',' ).map( function ( attr ) {
+				const connProps = propValue.split( ',' ).slice( 1 );
+				const from = parseInt( connProps[ 0 ] );
+				const to = parseInt( connProps[ 1 ] );
+				let rest = propValue.split( ',' ).slice( 3 );
+				rest = rest.map( function ( elem ) {
 
-					return attr.trim().replace( /^"/, '' ).replace( /"$/, '' );
+					return elem.trim().replace( /^"/, '' );
 
 				} );
-				var node = {
-					name: nodeName
-				};
-				var attrs = this.parseNodeAttr( nodeAttrs );
-				var currentNode = this.getCurrentNode(); // a top node
-
-				if ( this.currentIndent === 0 ) {
-
-					this.allNodes.add( nodeName, node );
+				propName = 'connections';
+				propValue = [ from, to ];
+				append( propValue, rest );
 
-				} else {
-
-					// a subnode
-					// if the subnode already exists, append it
-					if ( nodeName in currentNode ) {
+				if ( currentNode[ propName ] === undefined ) {
 
-						// special case Pose needs PoseNodes as an array
-						if ( nodeName === 'PoseNode' ) {
+					currentNode[ propName ] = [];
 
-							currentNode.PoseNode.push( node );
+				}
 
-						} else if ( currentNode[ nodeName ].id !== undefined ) {
+			} // Node
 
-							currentNode[ nodeName ] = {};
-							currentNode[ nodeName ][ currentNode[ nodeName ].id ] = currentNode[ nodeName ];
 
-						}
+			if ( propName === 'Node' ) currentNode.id = propValue; // connections
 
-						if ( attrs.id !== '' ) currentNode[ nodeName ][ attrs.id ] = node;
+			if ( propName in currentNode && Array.isArray( currentNode[ propName ] ) ) {
 
-					} else if ( typeof attrs.id === 'number' ) {
+				currentNode[ propName ].push( propValue );
 
-						currentNode[ nodeName ] = {};
-						currentNode[ nodeName ][ attrs.id ] = node;
+			} else {
 
-					} else if ( nodeName !== 'Properties70' ) {
+				if ( propName !== 'a' ) currentNode[ propName ] = propValue; else currentNode.a = propValue;
 
-						if ( nodeName === 'PoseNode' ) currentNode[ nodeName ] = [ node ]; else currentNode[ nodeName ] = node;
+			}
 
-					}
+			this.setCurrentProp( currentNode, propName ); // convert string to array, unless it ends in ',' in which case more will be added to it
 
-				}
+			if ( propName === 'a' && propValue.slice( - 1 ) !== ',' ) {
 
-				if ( typeof attrs.id === 'number' ) node.id = attrs.id;
-				if ( attrs.name !== '' ) node.attrName = attrs.name;
-				if ( attrs.type !== '' ) node.attrType = attrs.type;
-				this.pushStack( node );
+				currentNode.a = parseNumberArray( propValue );
 
-			},
-			parseNodeAttr: function ( attrs ) {
+			}
 
-				var id = attrs[ 0 ];
+		}
 
-				if ( attrs[ 0 ] !== '' ) {
+		parseNodePropertyContinued( line ) {
 
-					id = parseInt( attrs[ 0 ] );
+			const currentNode = this.getCurrentNode();
+			currentNode.a += line; // if the line doesn't end in ',' we have reached the end of the property value
+			// so convert the string to an array
 
-					if ( isNaN( id ) ) {
+			if ( line.slice( - 1 ) !== ',' ) {
 
-						id = attrs[ 0 ];
+				currentNode.a = parseNumberArray( currentNode.a );
 
-					}
+			}
 
-				}
+		} // parse "Property70"
 
-				var name = '',
-					type = '';
 
-				if ( attrs.length > 1 ) {
+		parseNodeSpecialProperty( line, propName, propValue ) {
 
-					name = attrs[ 1 ].replace( /^(\w+)::/, '' );
-					type = attrs[ 2 ];
+			// split this
+			// P: "Lcl Scaling", "Lcl Scaling", "", "A",1,1,1
+			// into array like below
+			// ["Lcl Scaling", "Lcl Scaling", "", "A", "1,1,1" ]
+			const props = propValue.split( '",' ).map( function ( prop ) {
 
-				}
+				return prop.trim().replace( /^\"/, '' ).replace( /\s/, '_' );
 
-				return {
-					id: id,
-					name: name,
-					type: type
-				};
+			} );
+			const innerPropName = props[ 0 ];
+			const innerPropType1 = props[ 1 ];
+			const innerPropType2 = props[ 2 ];
+			const innerPropFlag = props[ 3 ];
+			let innerPropValue = props[ 4 ]; // cast values where needed, otherwise leave as strings
+
+			switch ( innerPropType1 ) {
+
+				case 'int':
+				case 'enum':
+				case 'bool':
+				case 'ULongLong':
+				case 'double':
+				case 'Number':
+				case 'FieldOfView':
+					innerPropValue = parseFloat( innerPropValue );
+					break;
 
-			},
-			parseNodeProperty: function ( line, property, contentLine ) {
+				case 'Color':
+				case 'ColorRGB':
+				case 'Vector3D':
+				case 'Lcl_Translation':
+				case 'Lcl_Rotation':
+				case 'Lcl_Scaling':
+					innerPropValue = parseNumberArray( innerPropValue );
+					break;
 
-				var propName = property[ 1 ].replace( /^"/, '' ).replace( /"$/, '' ).trim();
-				var propValue = property[ 2 ].replace( /^"/, '' ).replace( /"$/, '' ).trim(); // for special case: base64 image data follows "Content: ," line
-				//	Content: ,
-				//	 "/9j/4RDaRXhpZgAATU0A..."
+			} // CAUTION: these props must append to parent's parent
 
-				if ( propName === 'Content' && propValue === ',' ) {
 
-					propValue = contentLine.replace( /"/g, '' ).replace( /,$/, '' ).trim();
+			this.getPrevNode()[ innerPropName ] = {
+				'type': innerPropType1,
+				'type2': innerPropType2,
+				'flag': innerPropFlag,
+				'value': innerPropValue
+			};
+			this.setCurrentProp( this.getPrevNode(), innerPropName );
 
-				}
+		}
 
-				var currentNode = this.getCurrentNode();
-				var parentName = currentNode.name;
+	} // Parse an FBX file in Binary format
 
-				if ( parentName === 'Properties70' ) {
 
-					this.parseNodeSpecialProperty( line, propName, propValue );
-					return;
+	class BinaryParser {
 
-				} // Connections
+		parse( buffer ) {
 
+			const reader = new BinaryReader( buffer );
+			reader.skip( 23 ); // skip magic 23 bytes
 
-				if ( propName === 'C' ) {
+			const version = reader.getUint32();
 
-					var connProps = propValue.split( ',' ).slice( 1 );
-					var from = parseInt( connProps[ 0 ] );
-					var to = parseInt( connProps[ 1 ] );
-					var rest = propValue.split( ',' ).slice( 3 );
-					rest = rest.map( function ( elem ) {
+			if ( version < 6400 ) {
 
-						return elem.trim().replace( /^"/, '' );
+				throw new Error( 'THREE.FBXLoader: FBX version not supported, FileVersion: ' + version );
 
-					} );
-					propName = 'connections';
-					propValue = [ from, to ];
-					append( propValue, rest );
+			}
 
-					if ( currentNode[ propName ] === undefined ) {
+			const allNodes = new FBXTree();
 
-						currentNode[ propName ] = [];
+			while ( ! this.endOfContent( reader ) ) {
 
-					}
+				const node = this.parseNode( reader, version );
+				if ( node !== null ) allNodes.add( node.name, node );
 
-				} // Node
+			}
 
+			return allNodes;
 
-				if ( propName === 'Node' ) currentNode.id = propValue; // connections
+		} // Check if reader has reached the end of content.
 
-				if ( propName in currentNode && Array.isArray( currentNode[ propName ] ) ) {
 
-					currentNode[ propName ].push( propValue );
+		endOfContent( reader ) {
 
-				} else {
+			// footer size: 160bytes + 16-byte alignment padding
+			// - 16bytes: magic
+			// - padding til 16-byte alignment (at least 1byte?)
+			//	(seems like some exporters embed fixed 15 or 16bytes?)
+			// - 4bytes: magic
+			// - 4bytes: version
+			// - 120bytes: zero
+			// - 16bytes: magic
+			if ( reader.size() % 16 === 0 ) {
 
-					if ( propName !== 'a' ) currentNode[ propName ] = propValue; else currentNode.a = propValue;
+				return ( reader.getOffset() + 160 + 16 & ~ 0xf ) >= reader.size();
 
-				}
+			} else {
 
-				this.setCurrentProp( currentNode, propName ); // convert string to array, unless it ends in ',' in which case more will be added to it
+				return reader.getOffset() + 160 + 16 >= reader.size();
 
-				if ( propName === 'a' && propValue.slice( - 1 ) !== ',' ) {
+			}
 
-					currentNode.a = parseNumberArray( propValue );
+		} // recursively parse nodes until the end of the file is reached
 
-				}
 
-			},
-			parseNodePropertyContinued: function ( line ) {
+		parseNode( reader, version ) {
 
-				var currentNode = this.getCurrentNode();
-				currentNode.a += line; // if the line doesn't end in ',' we have reached the end of the property value
-				// so convert the string to an array
+			const node = {}; // The first three data sizes depends on version.
 
-				if ( line.slice( - 1 ) !== ',' ) {
+			const endOffset = version >= 7500 ? reader.getUint64() : reader.getUint32();
+			const numProperties = version >= 7500 ? reader.getUint64() : reader.getUint32();
+			version >= 7500 ? reader.getUint64() : reader.getUint32(); // the returned propertyListLen is not used
 
-					currentNode.a = parseNumberArray( currentNode.a );
+			const nameLen = reader.getUint8();
+			const name = reader.getString( nameLen ); // Regards this node as NULL-record if endOffset is zero
 
-				}
+			if ( endOffset === 0 ) return null;
+			const propertyList = [];
 
-			},
-			// parse "Property70"
-			parseNodeSpecialProperty: function ( line, propName, propValue ) {
+			for ( let i = 0; i < numProperties; i ++ ) {
 
-				// split this
-				// P: "Lcl Scaling", "Lcl Scaling", "", "A",1,1,1
-				// into array like below
-				// ["Lcl Scaling", "Lcl Scaling", "", "A", "1,1,1" ]
-				var props = propValue.split( '",' ).map( function ( prop ) {
+				propertyList.push( this.parseProperty( reader ) );
 
-					return prop.trim().replace( /^\"/, '' ).replace( /\s/, '_' );
+			} // Regards the first three elements in propertyList as id, attrName, and attrType
 
-				} );
-				var innerPropName = props[ 0 ];
-				var innerPropType1 = props[ 1 ];
-				var innerPropType2 = props[ 2 ];
-				var innerPropFlag = props[ 3 ];
-				var innerPropValue = props[ 4 ]; // cast values where needed, otherwise leave as strings
-
-				switch ( innerPropType1 ) {
-
-					case 'int':
-					case 'enum':
-					case 'bool':
-					case 'ULongLong':
-					case 'double':
-					case 'Number':
-					case 'FieldOfView':
-						innerPropValue = parseFloat( innerPropValue );
-						break;
 
-					case 'Color':
-					case 'ColorRGB':
-					case 'Vector3D':
-					case 'Lcl_Translation':
-					case 'Lcl_Rotation':
-					case 'Lcl_Scaling':
-						innerPropValue = parseNumberArray( innerPropValue );
-						break;
+			const id = propertyList.length > 0 ? propertyList[ 0 ] : '';
+			const attrName = propertyList.length > 1 ? propertyList[ 1 ] : '';
+			const attrType = propertyList.length > 2 ? propertyList[ 2 ] : ''; // check if this node represents just a single property
+			// like (name, 0) set or (name2, [0, 1, 2]) set of {name: 0, name2: [0, 1, 2]}
 
-				} // CAUTION: these props must append to parent's parent
+			node.singleProperty = numProperties === 1 && reader.getOffset() === endOffset ? true : false;
 
+			while ( endOffset > reader.getOffset() ) {
 
-				this.getPrevNode()[ innerPropName ] = {
-					'type': innerPropType1,
-					'type2': innerPropType2,
-					'flag': innerPropFlag,
-					'value': innerPropValue
-				};
-				this.setCurrentProp( this.getPrevNode(), innerPropName );
+				const subNode = this.parseNode( reader, version );
+				if ( subNode !== null ) this.parseSubNode( name, node, subNode );
 
 			}
-		}; // Parse an FBX file in Binary format
 
-		function BinaryParser() {}
+			node.propertyList = propertyList; // raw property list used by parent
 
-		BinaryParser.prototype = {
-			constructor: BinaryParser,
-			parse: function ( buffer ) {
+			if ( typeof id === 'number' ) node.id = id;
+			if ( attrName !== '' ) node.attrName = attrName;
+			if ( attrType !== '' ) node.attrType = attrType;
+			if ( name !== '' ) node.name = name;
+			return node;
 
-				var reader = new BinaryReader( buffer );
-				reader.skip( 23 ); // skip magic 23 bytes
+		}
 
-				var version = reader.getUint32();
+		parseSubNode( name, node, subNode ) {
 
-				if ( version < 6400 ) {
+			// special case: child node is single property
+			if ( subNode.singleProperty === true ) {
 
-					throw new Error( 'THREE.FBXLoader: FBX version not supported, FileVersion: ' + version );
+				const value = subNode.propertyList[ 0 ];
 
-				}
+				if ( Array.isArray( value ) ) {
 
-				var allNodes = new FBXTree();
+					node[ subNode.name ] = subNode;
+					subNode.a = value;
 
-				while ( ! this.endOfContent( reader ) ) {
+				} else {
 
-					var node = this.parseNode( reader, version );
-					if ( node !== null ) allNodes.add( node.name, node );
+					node[ subNode.name ] = value;
 
 				}
 
-				return allNodes;
+			} else if ( name === 'Connections' && subNode.name === 'C' ) {
 
-			},
-			// Check if reader has reached the end of content.
-			endOfContent: function ( reader ) {
+				const array = [];
+				subNode.propertyList.forEach( function ( property, i ) {
 
-				// footer size: 160bytes + 16-byte alignment padding
-				// - 16bytes: magic
-				// - padding til 16-byte alignment (at least 1byte?)
-				//	(seems like some exporters embed fixed 15 or 16bytes?)
-				// - 4bytes: magic
-				// - 4bytes: version
-				// - 120bytes: zero
-				// - 16bytes: magic
-				if ( reader.size() % 16 === 0 ) {
+					// first Connection is FBX type (OO, OP, etc.). We'll discard these
+					if ( i !== 0 ) array.push( property );
 
-					return ( reader.getOffset() + 160 + 16 & ~ 0xf ) >= reader.size();
+				} );
 
-				} else {
+				if ( node.connections === undefined ) {
 
-					return reader.getOffset() + 160 + 16 >= reader.size();
+					node.connections = [];
 
 				}
 
-			},
-			// recursively parse nodes until the end of the file is reached
-			parseNode: function ( reader, version ) {
-
-				var node = {}; // The first three data sizes depends on version.
-
-				var endOffset = version >= 7500 ? reader.getUint64() : reader.getUint32();
-				var numProperties = version >= 7500 ? reader.getUint64() : reader.getUint32();
-				version >= 7500 ? reader.getUint64() : reader.getUint32(); // the returned propertyListLen is not used
-
-				var nameLen = reader.getUint8();
-				var name = reader.getString( nameLen ); // Regards this node as NULL-record if endOffset is zero
-
-				if ( endOffset === 0 ) return null;
-				var propertyList = [];
-
-				for ( var i = 0; i < numProperties; i ++ ) {
+				node.connections.push( array );
 
-					propertyList.push( this.parseProperty( reader ) );
+			} else if ( subNode.name === 'Properties70' ) {
 
-				} // Regards the first three elements in propertyList as id, attrName, and attrType
+				const keys = Object.keys( subNode );
+				keys.forEach( function ( key ) {
 
+					node[ key ] = subNode[ key ];
 
-				var id = propertyList.length > 0 ? propertyList[ 0 ] : '';
-				var attrName = propertyList.length > 1 ? propertyList[ 1 ] : '';
-				var attrType = propertyList.length > 2 ? propertyList[ 2 ] : ''; // check if this node represents just a single property
-				// like (name, 0) set or (name2, [0, 1, 2]) set of {name: 0, name2: [0, 1, 2]}
-
-				node.singleProperty = numProperties === 1 && reader.getOffset() === endOffset ? true : false;
+				} );
 
-				while ( endOffset > reader.getOffset() ) {
+			} else if ( name === 'Properties70' && subNode.name === 'P' ) {
 
-					var subNode = this.parseNode( reader, version );
-					if ( subNode !== null ) this.parseSubNode( name, node, subNode );
+				let innerPropName = subNode.propertyList[ 0 ];
+				let innerPropType1 = subNode.propertyList[ 1 ];
+				const innerPropType2 = subNode.propertyList[ 2 ];
+				const innerPropFlag = subNode.propertyList[ 3 ];
+				let innerPropValue;
+				if ( innerPropName.indexOf( 'Lcl ' ) === 0 ) innerPropName = innerPropName.replace( 'Lcl ', 'Lcl_' );
+				if ( innerPropType1.indexOf( 'Lcl ' ) === 0 ) innerPropType1 = innerPropType1.replace( 'Lcl ', 'Lcl_' );
 
-				}
+				if ( innerPropType1 === 'Color' || innerPropType1 === 'ColorRGB' || innerPropType1 === 'Vector' || innerPropType1 === 'Vector3D' || innerPropType1.indexOf( 'Lcl_' ) === 0 ) {
 
-				node.propertyList = propertyList; // raw property list used by parent
+					innerPropValue = [ subNode.propertyList[ 4 ], subNode.propertyList[ 5 ], subNode.propertyList[ 6 ] ];
 
-				if ( typeof id === 'number' ) node.id = id;
-				if ( attrName !== '' ) node.attrName = attrName;
-				if ( attrType !== '' ) node.attrType = attrType;
-				if ( name !== '' ) node.name = name;
-				return node;
+				} else {
 
-			},
-			parseSubNode: function ( name, node, subNode ) {
+					innerPropValue = subNode.propertyList[ 4 ];
 
-				// special case: child node is single property
-				if ( subNode.singleProperty === true ) {
+				} // this will be copied to parent, see above
 
-					var value = subNode.propertyList[ 0 ];
 
-					if ( Array.isArray( value ) ) {
+				node[ innerPropName ] = {
+					'type': innerPropType1,
+					'type2': innerPropType2,
+					'flag': innerPropFlag,
+					'value': innerPropValue
+				};
 
-						node[ subNode.name ] = subNode;
-						subNode.a = value;
+			} else if ( node[ subNode.name ] === undefined ) {
 
-					} else {
+				if ( typeof subNode.id === 'number' ) {
 
-						node[ subNode.name ] = value;
+					node[ subNode.name ] = {};
+					node[ subNode.name ][ subNode.id ] = subNode;
 
-					}
+				} else {
 
-				} else if ( name === 'Connections' && subNode.name === 'C' ) {
+					node[ subNode.name ] = subNode;
 
-					var array = [];
-					subNode.propertyList.forEach( function ( property, i ) {
+				}
 
-						// first Connection is FBX type (OO, OP, etc.). We'll discard these
-						if ( i !== 0 ) array.push( property );
+			} else {
 
-					} );
+				if ( subNode.name === 'PoseNode' ) {
 
-					if ( node.connections === undefined ) {
+					if ( ! Array.isArray( node[ subNode.name ] ) ) {
 
-						node.connections = [];
+						node[ subNode.name ] = [ node[ subNode.name ] ];
 
 					}
 
-					node.connections.push( array );
-
-				} else if ( subNode.name === 'Properties70' ) {
+					node[ subNode.name ].push( subNode );
 
-					var keys = Object.keys( subNode );
-					keys.forEach( function ( key ) {
+				} else if ( node[ subNode.name ][ subNode.id ] === undefined ) {
 
-						node[ key ] = subNode[ key ];
+					node[ subNode.name ][ subNode.id ] = subNode;
 
-					} );
+				}
 
-				} else if ( name === 'Properties70' && subNode.name === 'P' ) {
+			}
 
-					var innerPropName = subNode.propertyList[ 0 ];
-					var innerPropType1 = subNode.propertyList[ 1 ];
-					var innerPropType2 = subNode.propertyList[ 2 ];
-					var innerPropFlag = subNode.propertyList[ 3 ];
-					var innerPropValue;
-					if ( innerPropName.indexOf( 'Lcl ' ) === 0 ) innerPropName = innerPropName.replace( 'Lcl ', 'Lcl_' );
-					if ( innerPropType1.indexOf( 'Lcl ' ) === 0 ) innerPropType1 = innerPropType1.replace( 'Lcl ', 'Lcl_' );
+		}
 
-					if ( innerPropType1 === 'Color' || innerPropType1 === 'ColorRGB' || innerPropType1 === 'Vector' || innerPropType1 === 'Vector3D' || innerPropType1.indexOf( 'Lcl_' ) === 0 ) {
+		parseProperty( reader ) {
 
-						innerPropValue = [ subNode.propertyList[ 4 ], subNode.propertyList[ 5 ], subNode.propertyList[ 6 ] ];
+			const type = reader.getString( 1 );
+			let length;
 
-					} else {
+			switch ( type ) {
 
-						innerPropValue = subNode.propertyList[ 4 ];
+				case 'C':
+					return reader.getBoolean();
 
-					} // this will be copied to parent, see above
+				case 'D':
+					return reader.getFloat64();
 
+				case 'F':
+					return reader.getFloat32();
 
-					node[ innerPropName ] = {
-						'type': innerPropType1,
-						'type2': innerPropType2,
-						'flag': innerPropFlag,
-						'value': innerPropValue
-					};
+				case 'I':
+					return reader.getInt32();
 
-				} else if ( node[ subNode.name ] === undefined ) {
+				case 'L':
+					return reader.getInt64();
 
-					if ( typeof subNode.id === 'number' ) {
+				case 'R':
+					length = reader.getUint32();
+					return reader.getArrayBuffer( length );
 
-						node[ subNode.name ] = {};
-						node[ subNode.name ][ subNode.id ] = subNode;
+				case 'S':
+					length = reader.getUint32();
+					return reader.getString( length );
 
-					} else {
+				case 'Y':
+					return reader.getInt16();
 
-						node[ subNode.name ] = subNode;
+				case 'b':
+				case 'c':
+				case 'd':
+				case 'f':
+				case 'i':
+				case 'l':
+					const arrayLength = reader.getUint32();
+					const encoding = reader.getUint32(); // 0: non-compressed, 1: compressed
 
-					}
+					const compressedLength = reader.getUint32();
 
-				} else {
+					if ( encoding === 0 ) {
 
-					if ( subNode.name === 'PoseNode' ) {
+						switch ( type ) {
 
-						if ( ! Array.isArray( node[ subNode.name ] ) ) {
+							case 'b':
+							case 'c':
+								return reader.getBooleanArray( arrayLength );
 
-							node[ subNode.name ] = [ node[ subNode.name ] ];
+							case 'd':
+								return reader.getFloat64Array( arrayLength );
 
-						}
+							case 'f':
+								return reader.getFloat32Array( arrayLength );
 
-						node[ subNode.name ].push( subNode );
+							case 'i':
+								return reader.getInt32Array( arrayLength );
 
-					} else if ( node[ subNode.name ][ subNode.id ] === undefined ) {
+							case 'l':
+								return reader.getInt64Array( arrayLength );
 
-						node[ subNode.name ][ subNode.id ] = subNode;
+						}
 
 					}
 
-				}
+					if ( typeof fflate === 'undefined' ) {
 
-			},
-			parseProperty: function ( reader ) {
+						console.error( 'THREE.FBXLoader: External library fflate.min.js required.' );
 
-				var type = reader.getString( 1 );
+					}
 
-				switch ( type ) {
+					const data = fflate.unzlibSync( new Uint8Array( reader.getArrayBuffer( compressedLength ) ) ); // eslint-disable-line no-undef
 
-					case 'C':
-						return reader.getBoolean();
+					const reader2 = new BinaryReader( data.buffer );
 
-					case 'D':
-						return reader.getFloat64();
+					switch ( type ) {
 
-					case 'F':
-						return reader.getFloat32();
+						case 'b':
+						case 'c':
+							return reader2.getBooleanArray( arrayLength );
 
-					case 'I':
-						return reader.getInt32();
+						case 'd':
+							return reader2.getFloat64Array( arrayLength );
 
-					case 'L':
-						return reader.getInt64();
+						case 'f':
+							return reader2.getFloat32Array( arrayLength );
 
-					case 'R':
-						var length = reader.getUint32();
-						return reader.getArrayBuffer( length );
+						case 'i':
+							return reader2.getInt32Array( arrayLength );
 
-					case 'S':
-						var length = reader.getUint32();
-						return reader.getString( length );
+						case 'l':
+							return reader2.getInt64Array( arrayLength );
 
-					case 'Y':
-						return reader.getInt16();
+					}
 
-					case 'b':
-					case 'c':
-					case 'd':
-					case 'f':
-					case 'i':
-					case 'l':
-						var arrayLength = reader.getUint32();
-						var encoding = reader.getUint32(); // 0: non-compressed, 1: compressed
+				default:
+					throw new Error( 'THREE.FBXLoader: Unknown property type ' + type );
 
-						var compressedLength = reader.getUint32();
+			}
 
-						if ( encoding === 0 ) {
+		}
 
-							switch ( type ) {
+	}
 
-								case 'b':
-								case 'c':
-									return reader.getBooleanArray( arrayLength );
+	class BinaryReader {
 
-								case 'd':
-									return reader.getFloat64Array( arrayLength );
+		constructor( buffer, littleEndian ) {
 
-								case 'f':
-									return reader.getFloat32Array( arrayLength );
+			this.dv = new DataView( buffer );
+			this.offset = 0;
+			this.littleEndian = littleEndian !== undefined ? littleEndian : true;
 
-								case 'i':
-									return reader.getInt32Array( arrayLength );
+		}
 
-								case 'l':
-									return reader.getInt64Array( arrayLength );
+		getOffset() {
 
-							}
+			return this.offset;
 
-						}
+		}
 
-						if ( typeof fflate === 'undefined' ) {
+		size() {
 
-							console.error( 'THREE.FBXLoader: External library fflate.min.js required.' );
+			return this.dv.buffer.byteLength;
 
-						}
+		}
 
-						var data = fflate.unzlibSync( new Uint8Array( reader.getArrayBuffer( compressedLength ) ) ); // eslint-disable-line no-undef
+		skip( length ) {
 
-						var reader2 = new BinaryReader( data.buffer );
+			this.offset += length;
 
-						switch ( type ) {
+		} // seems like true/false representation depends on exporter.
+		// true: 1 or 'Y'(=0x59), false: 0 or 'T'(=0x54)
+		// then sees LSB.
 
-							case 'b':
-							case 'c':
-								return reader2.getBooleanArray( arrayLength );
 
-							case 'd':
-								return reader2.getFloat64Array( arrayLength );
+		getBoolean() {
 
-							case 'f':
-								return reader2.getFloat32Array( arrayLength );
+			return ( this.getUint8() & 1 ) === 1;
 
-							case 'i':
-								return reader2.getInt32Array( arrayLength );
+		}
 
-							case 'l':
-								return reader2.getInt64Array( arrayLength );
+		getBooleanArray( size ) {
 
-						}
+			const a = [];
 
-					default:
-						throw new Error( 'THREE.FBXLoader: Unknown property type ' + type );
+			for ( let i = 0; i < size; i ++ ) {
 
-				}
+				a.push( this.getBoolean() );
 
 			}
-		};
 
-		function BinaryReader( buffer, littleEndian ) {
-
-			this.dv = new DataView( buffer );
-			this.offset = 0;
-			this.littleEndian = littleEndian !== undefined ? littleEndian : true;
+			return a;
 
 		}
 
-		BinaryReader.prototype = {
-			constructor: BinaryReader,
-			getOffset: function () {
-
-				return this.offset;
+		getUint8() {
 
-			},
-			size: function () {
+			const value = this.dv.getUint8( this.offset );
+			this.offset += 1;
+			return value;
 
-				return this.dv.buffer.byteLength;
+		}
 
-			},
-			skip: function ( length ) {
+		getInt16() {
 
-				this.offset += length;
+			const value = this.dv.getInt16( this.offset, this.littleEndian );
+			this.offset += 2;
+			return value;
 
-			},
-			// seems like true/false representation depends on exporter.
-			// true: 1 or 'Y'(=0x59), false: 0 or 'T'(=0x54)
-			// then sees LSB.
-			getBoolean: function () {
+		}
 
-				return ( this.getUint8() & 1 ) === 1;
+		getInt32() {
 
-			},
-			getBooleanArray: function ( size ) {
+			const value = this.dv.getInt32( this.offset, this.littleEndian );
+			this.offset += 4;
+			return value;
 
-				var a = [];
+		}
 
-				for ( var i = 0; i < size; i ++ ) {
+		getInt32Array( size ) {
 
-					a.push( this.getBoolean() );
+			const a = [];
 
-				}
+			for ( let i = 0; i < size; i ++ ) {
 
-				return a;
+				a.push( this.getInt32() );
 
-			},
-			getUint8: function () {
+			}
 
-				var value = this.dv.getUint8( this.offset );
-				this.offset += 1;
-				return value;
+			return a;
 
-			},
-			getInt16: function () {
+		}
 
-				var value = this.dv.getInt16( this.offset, this.littleEndian );
-				this.offset += 2;
-				return value;
+		getUint32() {
 
-			},
-			getInt32: function () {
+			const value = this.dv.getUint32( this.offset, this.littleEndian );
+			this.offset += 4;
+			return value;
 
-				var value = this.dv.getInt32( this.offset, this.littleEndian );
-				this.offset += 4;
-				return value;
+		} // JavaScript doesn't support 64-bit integer so calculate this here
+		// 1 << 32 will return 1 so using multiply operation instead here.
+		// There's a possibility that this method returns wrong value if the value
+		// is out of the range between Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER.
+		// TODO: safely handle 64-bit integer
 
-			},
-			getInt32Array: function ( size ) {
 
-				var a = [];
+		getInt64() {
 
-				for ( var i = 0; i < size; i ++ ) {
+			let low, high;
 
-					a.push( this.getInt32() );
+			if ( this.littleEndian ) {
 
-				}
+				low = this.getUint32();
+				high = this.getUint32();
 
-				return a;
+			} else {
 
-			},
-			getUint32: function () {
+				high = this.getUint32();
+				low = this.getUint32();
 
-				var value = this.dv.getUint32( this.offset, this.littleEndian );
-				this.offset += 4;
-				return value;
+			} // calculate negative value
 
-			},
-			// JavaScript doesn't support 64-bit integer so calculate this here
-			// 1 << 32 will return 1 so using multiply operation instead here.
-			// There's a possibility that this method returns wrong value if the value
-			// is out of the range between Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER.
-			// TODO: safely handle 64-bit integer
-			getInt64: function () {
 
-				var low, high;
+			if ( high & 0x80000000 ) {
 
-				if ( this.littleEndian ) {
+				high = ~ high & 0xFFFFFFFF;
+				low = ~ low & 0xFFFFFFFF;
+				if ( low === 0xFFFFFFFF ) high = high + 1 & 0xFFFFFFFF;
+				low = low + 1 & 0xFFFFFFFF;
+				return - ( high * 0x100000000 + low );
 
-					low = this.getUint32();
-					high = this.getUint32();
+			}
 
-				} else {
+			return high * 0x100000000 + low;
 
-					high = this.getUint32();
-					low = this.getUint32();
+		}
 
-				} // calculate negative value
+		getInt64Array( size ) {
 
+			const a = [];
 
-				if ( high & 0x80000000 ) {
+			for ( let i = 0; i < size; i ++ ) {
 
-					high = ~ high & 0xFFFFFFFF;
-					low = ~ low & 0xFFFFFFFF;
-					if ( low === 0xFFFFFFFF ) high = high + 1 & 0xFFFFFFFF;
-					low = low + 1 & 0xFFFFFFFF;
-					return - ( high * 0x100000000 + low );
+				a.push( this.getInt64() );
 
-				}
+			}
 
-				return high * 0x100000000 + low;
+			return a;
 
-			},
-			getInt64Array: function ( size ) {
+		} // Note: see getInt64() comment
 
-				var a = [];
 
-				for ( var i = 0; i < size; i ++ ) {
+		getUint64() {
 
-					a.push( this.getInt64() );
+			let low, high;
 
-				}
+			if ( this.littleEndian ) {
 
-				return a;
+				low = this.getUint32();
+				high = this.getUint32();
 
-			},
-			// Note: see getInt64() comment
-			getUint64: function () {
+			} else {
 
-				var low, high;
+				high = this.getUint32();
+				low = this.getUint32();
 
-				if ( this.littleEndian ) {
+			}
 
-					low = this.getUint32();
-					high = this.getUint32();
+			return high * 0x100000000 + low;
 
-				} else {
+		}
 
-					high = this.getUint32();
-					low = this.getUint32();
+		getFloat32() {
 
-				}
+			const value = this.dv.getFloat32( this.offset, this.littleEndian );
+			this.offset += 4;
+			return value;
 
-				return high * 0x100000000 + low;
+		}
 
-			},
-			getFloat32: function () {
+		getFloat32Array( size ) {
 
-				var value = this.dv.getFloat32( this.offset, this.littleEndian );
-				this.offset += 4;
-				return value;
+			const a = [];
 
-			},
-			getFloat32Array: function ( size ) {
+			for ( let i = 0; i < size; i ++ ) {
 
-				var a = [];
+				a.push( this.getFloat32() );
 
-				for ( var i = 0; i < size; i ++ ) {
+			}
 
-					a.push( this.getFloat32() );
+			return a;
 
-				}
+		}
 
-				return a;
+		getFloat64() {
 
-			},
-			getFloat64: function () {
+			const value = this.dv.getFloat64( this.offset, this.littleEndian );
+			this.offset += 8;
+			return value;
 
-				var value = this.dv.getFloat64( this.offset, this.littleEndian );
-				this.offset += 8;
-				return value;
+		}
 
-			},
-			getFloat64Array: function ( size ) {
+		getFloat64Array( size ) {
 
-				var a = [];
+			const a = [];
 
-				for ( var i = 0; i < size; i ++ ) {
+			for ( let i = 0; i < size; i ++ ) {
 
-					a.push( this.getFloat64() );
+				a.push( this.getFloat64() );
 
-				}
+			}
 
-				return a;
+			return a;
 
-			},
-			getArrayBuffer: function ( size ) {
+		}
 
-				var value = this.dv.buffer.slice( this.offset, this.offset + size );
-				this.offset += size;
-				return value;
+		getArrayBuffer( size ) {
 
-			},
-			getString: function ( size ) {
+			const value = this.dv.buffer.slice( this.offset, this.offset + size );
+			this.offset += size;
+			return value;
 
-				// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
-				var a = [];
+		}
 
-				for ( var i = 0; i < size; i ++ ) {
+		getString( size ) {
 
-					a[ i ] = this.getUint8();
+			// note: safari 9 doesn't support Uint8Array.indexOf; create intermediate array instead
+			let a = [];
 
-				}
+			for ( let i = 0; i < size; i ++ ) {
 
-				var nullByte = a.indexOf( 0 );
-				if ( nullByte >= 0 ) a = a.slice( 0, nullByte );
-				return THREE.LoaderUtils.decodeText( new Uint8Array( a ) );
+				a[ i ] = this.getUint8();
 
 			}
-		}; // FBXTree holds a representation of the FBX data, returned by the TextParser ( FBX ASCII format)
-		// and BinaryParser( FBX Binary format)
-
-		function FBXTree() {}
 
-		FBXTree.prototype = {
-			constructor: FBXTree,
-			add: function ( key, val ) {
+			const nullByte = a.indexOf( 0 );
+			if ( nullByte >= 0 ) a = a.slice( 0, nullByte );
+			return THREE.LoaderUtils.decodeText( new Uint8Array( a ) );
 
-				this[ key ] = val;
-
-			}
-		}; // ************** UTILITY FUNCTIONS **************
+		}
 
-		function isFbxFormatBinary( buffer ) {
+	} // FBXTree holds a representation of the FBX data, returned by the TextParser ( FBX ASCII format)
+	// and BinaryParser( FBX Binary format)
 
-			var CORRECT = 'Kaydara FBX Binary	\0';
-			return buffer.byteLength >= CORRECT.length && CORRECT === convertArrayBufferToString( buffer, 0, CORRECT.length );
 
-		}
+	class FBXTree {
 
-		function isFbxFormatASCII( text ) {
+		add( key, val ) {
 
-			var CORRECT = [ 'K', 'a', 'y', 'd', 'a', 'r', 'a', '\\', 'F', 'B', 'X', '\\', 'B', 'i', 'n', 'a', 'r', 'y', '\\', '\\' ];
-			var cursor = 0;
+			this[ key ] = val;
 
-			function read( offset ) {
+		}
 
-				var result = text[ offset - 1 ];
-				text = text.slice( cursor + offset );
-				cursor ++;
-				return result;
+	} // ************** UTILITY FUNCTIONS **************
 
-			}
 
-			for ( var i = 0; i < CORRECT.length; ++ i ) {
+	function isFbxFormatBinary( buffer ) {
 
-				var num = read( 1 );
+		const CORRECT = 'Kaydara FBX Binary	\0';
+		return buffer.byteLength >= CORRECT.length && CORRECT === convertArrayBufferToString( buffer, 0, CORRECT.length );
 
-				if ( num === CORRECT[ i ] ) {
+	}
 
-					return false;
+	function isFbxFormatASCII( text ) {
 
-				}
+		const CORRECT = [ 'K', 'a', 'y', 'd', 'a', 'r', 'a', '\\', 'F', 'B', 'X', '\\', 'B', 'i', 'n', 'a', 'r', 'y', '\\', '\\' ];
+		let cursor = 0;
 
-			}
+		function read( offset ) {
 
-			return true;
+			const result = text[ offset - 1 ];
+			text = text.slice( cursor + offset );
+			cursor ++;
+			return result;
 
 		}
 
-		function getFbxVersion( text ) {
+		for ( let i = 0; i < CORRECT.length; ++ i ) {
 
-			var versionRegExp = /FBXVersion: (\d+)/;
-			var match = text.match( versionRegExp );
+			const num = read( 1 );
 
-			if ( match ) {
+			if ( num === CORRECT[ i ] ) {
 
-				var version = parseInt( match[ 1 ] );
-				return version;
+				return false;
 
 			}
 
-			throw new Error( 'THREE.FBXLoader: Cannot find the version number for the file given.' );
+		}
 
-		} // Converts FBX ticks into real time seconds.
+		return true;
 
+	}
 
-		function convertFBXTimeToSeconds( time ) {
+	function getFbxVersion( text ) {
 
-			return time / 46186158000;
+		const versionRegExp = /FBXVersion: (\d+)/;
+		const match = text.match( versionRegExp );
 
-		}
+		if ( match ) {
 
-		var dataArray = []; // extracts the data from the correct position in the FBX array based on indexing type
+			const version = parseInt( match[ 1 ] );
+			return version;
 
-		function getData( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
+		}
 
-			var index;
+		throw new Error( 'THREE.FBXLoader: Cannot find the version number for the file given.' );
 
-			switch ( infoObject.mappingType ) {
+	} // Converts FBX ticks into real time seconds.
 
-				case 'ByPolygonVertex':
-					index = polygonVertexIndex;
-					break;
 
-				case 'ByPolygon':
-					index = polygonIndex;
-					break;
+	function convertFBXTimeToSeconds( time ) {
 
-				case 'ByVertice':
-					index = vertexIndex;
-					break;
+		return time / 46186158000;
 
-				case 'AllSame':
-					index = infoObject.indices[ 0 ];
-					break;
+	}
 
-				default:
-					console.warn( 'THREE.FBXLoader: unknown attribute mapping type ' + infoObject.mappingType );
+	const dataArray = []; // extracts the data from the correct position in the FBX array based on indexing type
 
-			}
+	function getData( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
 
-			if ( infoObject.referenceType === 'IndexToDirect' ) index = infoObject.indices[ index ];
-			var from = index * infoObject.dataSize;
-			var to = from + infoObject.dataSize;
-			return slice( dataArray, infoObject.buffer, from, to );
+		let index;
 
-		}
+		switch ( infoObject.mappingType ) {
 
-		var tempEuler = new THREE.Euler();
-		var tempVec = new THREE.Vector3(); // generate transformation from FBX transform data
-		// ref: https://help.autodesk.com/view/FBX/2017/ENU/?guid=__files_GUID_10CDD63C_79C1_4F2D_BB28_AD2BE65A02ED_htm
-		// ref: http://docs.autodesk.com/FBX/2014/ENU/FBX-SDK-Documentation/index.html?url=cpp_ref/_transformations_2main_8cxx-example.html,topicNumber=cpp_ref__transformations_2main_8cxx_example_htmlfc10a1e1-b18d-4e72-9dc0-70d0f1959f5e
+			case 'ByPolygonVertex':
+				index = polygonVertexIndex;
+				break;
 
-		function generateTransform( transformData ) {
+			case 'ByPolygon':
+				index = polygonIndex;
+				break;
 
-			var lTranslationM = new THREE.Matrix4();
-			var lPreRotationM = new THREE.Matrix4();
-			var lRotationM = new THREE.Matrix4();
-			var lPostRotationM = new THREE.Matrix4();
-			var lScalingM = new THREE.Matrix4();
-			var lScalingPivotM = new THREE.Matrix4();
-			var lScalingOffsetM = new THREE.Matrix4();
-			var lRotationOffsetM = new THREE.Matrix4();
-			var lRotationPivotM = new THREE.Matrix4();
-			var lParentGX = new THREE.Matrix4();
-			var lParentLX = new THREE.Matrix4();
-			var lGlobalT = new THREE.Matrix4();
-			var inheritType = transformData.inheritType ? transformData.inheritType : 0;
-			if ( transformData.translation ) lTranslationM.setPosition( tempVec.fromArray( transformData.translation ) );
+			case 'ByVertice':
+				index = vertexIndex;
+				break;
 
-			if ( transformData.preRotation ) {
+			case 'AllSame':
+				index = infoObject.indices[ 0 ];
+				break;
 
-				var array = transformData.preRotation.map( THREE.MathUtils.degToRad );
-				array.push( transformData.eulerOrder );
-				lPreRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
+			default:
+				console.warn( 'THREE.FBXLoader: unknown attribute mapping type ' + infoObject.mappingType );
 
-			}
+		}
 
-			if ( transformData.rotation ) {
+		if ( infoObject.referenceType === 'IndexToDirect' ) index = infoObject.indices[ index ];
+		const from = index * infoObject.dataSize;
+		const to = from + infoObject.dataSize;
+		return slice( dataArray, infoObject.buffer, from, to );
+
+	}
+
+	const tempEuler = new THREE.Euler();
+	const tempVec = new THREE.Vector3(); // generate transformation from FBX transform data
+	// ref: https://help.autodesk.com/view/FBX/2017/ENU/?guid=__files_GUID_10CDD63C_79C1_4F2D_BB28_AD2BE65A02ED_htm
+	// ref: http://docs.autodesk.com/FBX/2014/ENU/FBX-SDK-Documentation/index.html?url=cpp_ref/_transformations_2main_8cxx-example.html,topicNumber=cpp_ref__transformations_2main_8cxx_example_htmlfc10a1e1-b18d-4e72-9dc0-70d0f1959f5e
+
+	function generateTransform( transformData ) {
+
+		const lTranslationM = new THREE.Matrix4();
+		const lPreRotationM = new THREE.Matrix4();
+		const lRotationM = new THREE.Matrix4();
+		const lPostRotationM = new THREE.Matrix4();
+		const lScalingM = new THREE.Matrix4();
+		const lScalingPivotM = new THREE.Matrix4();
+		const lScalingOffsetM = new THREE.Matrix4();
+		const lRotationOffsetM = new THREE.Matrix4();
+		const lRotationPivotM = new THREE.Matrix4();
+		const lParentGX = new THREE.Matrix4();
+		const lParentLX = new THREE.Matrix4();
+		const lGlobalT = new THREE.Matrix4();
+		const inheritType = transformData.inheritType ? transformData.inheritType : 0;
+		if ( transformData.translation ) lTranslationM.setPosition( tempVec.fromArray( transformData.translation ) );
+
+		if ( transformData.preRotation ) {
+
+			const array = transformData.preRotation.map( THREE.MathUtils.degToRad );
+			array.push( transformData.eulerOrder );
+			lPreRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
 
-				var array = transformData.rotation.map( THREE.MathUtils.degToRad );
-				array.push( transformData.eulerOrder );
-				lRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
+		}
 
-			}
+		if ( transformData.rotation ) {
 
-			if ( transformData.postRotation ) {
+			const array = transformData.rotation.map( THREE.MathUtils.degToRad );
+			array.push( transformData.eulerOrder );
+			lRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
 
-				var array = transformData.postRotation.map( THREE.MathUtils.degToRad );
-				array.push( transformData.eulerOrder );
-				lPostRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
-				lPostRotationM.invert();
+		}
 
-			}
+		if ( transformData.postRotation ) {
 
-			if ( transformData.scale ) lScalingM.scale( tempVec.fromArray( transformData.scale ) ); // Pivots and offsets
+			const array = transformData.postRotation.map( THREE.MathUtils.degToRad );
+			array.push( transformData.eulerOrder );
+			lPostRotationM.makeRotationFromEuler( tempEuler.fromArray( array ) );
+			lPostRotationM.invert();
 
-			if ( transformData.scalingOffset ) lScalingOffsetM.setPosition( tempVec.fromArray( transformData.scalingOffset ) );
-			if ( transformData.scalingPivot ) lScalingPivotM.setPosition( tempVec.fromArray( transformData.scalingPivot ) );
-			if ( transformData.rotationOffset ) lRotationOffsetM.setPosition( tempVec.fromArray( transformData.rotationOffset ) );
-			if ( transformData.rotationPivot ) lRotationPivotM.setPosition( tempVec.fromArray( transformData.rotationPivot ) ); // parent transform
+		}
 
-			if ( transformData.parentMatrixWorld ) {
+		if ( transformData.scale ) lScalingM.scale( tempVec.fromArray( transformData.scale ) ); // Pivots and offsets
 
-				lParentLX.copy( transformData.parentMatrix );
-				lParentGX.copy( transformData.parentMatrixWorld );
+		if ( transformData.scalingOffset ) lScalingOffsetM.setPosition( tempVec.fromArray( transformData.scalingOffset ) );
+		if ( transformData.scalingPivot ) lScalingPivotM.setPosition( tempVec.fromArray( transformData.scalingPivot ) );
+		if ( transformData.rotationOffset ) lRotationOffsetM.setPosition( tempVec.fromArray( transformData.rotationOffset ) );
+		if ( transformData.rotationPivot ) lRotationPivotM.setPosition( tempVec.fromArray( transformData.rotationPivot ) ); // parent transform
 
-			}
+		if ( transformData.parentMatrixWorld ) {
 
-			var lLRM = new THREE.Matrix4().copy( lPreRotationM ).multiply( lRotationM ).multiply( lPostRotationM ); // Global Rotation
+			lParentLX.copy( transformData.parentMatrix );
+			lParentGX.copy( transformData.parentMatrixWorld );
 
-			var lParentGRM = new THREE.Matrix4();
-			lParentGRM.extractRotation( lParentGX ); // Global Shear*Scaling
+		}
 
-			var lParentTM = new THREE.Matrix4();
-			lParentTM.copyPosition( lParentGX );
-			var lParentGSM = new THREE.Matrix4();
-			var lParentGRSM = new THREE.Matrix4().copy( lParentTM ).invert().multiply( lParentGX );
-			lParentGSM.copy( lParentGRM ).invert().multiply( lParentGRSM );
-			var lLSM = lScalingM;
-			var lGlobalRS = new THREE.Matrix4();
+		const lLRM = new THREE.Matrix4().copy( lPreRotationM ).multiply( lRotationM ).multiply( lPostRotationM ); // Global Rotation
 
-			if ( inheritType === 0 ) {
+		const lParentGRM = new THREE.Matrix4();
+		lParentGRM.extractRotation( lParentGX ); // Global Shear*Scaling
 
-				lGlobalRS.copy( lParentGRM ).multiply( lLRM ).multiply( lParentGSM ).multiply( lLSM );
+		const lParentTM = new THREE.Matrix4();
+		lParentTM.copyPosition( lParentGX );
+		const lParentGSM = new THREE.Matrix4();
+		const lParentGRSM = new THREE.Matrix4().copy( lParentTM ).invert().multiply( lParentGX );
+		lParentGSM.copy( lParentGRM ).invert().multiply( lParentGRSM );
+		const lLSM = lScalingM;
+		const lGlobalRS = new THREE.Matrix4();
 
-			} else if ( inheritType === 1 ) {
+		if ( inheritType === 0 ) {
 
-				lGlobalRS.copy( lParentGRM ).multiply( lParentGSM ).multiply( lLRM ).multiply( lLSM );
+			lGlobalRS.copy( lParentGRM ).multiply( lLRM ).multiply( lParentGSM ).multiply( lLSM );
 
-			} else {
+		} else if ( inheritType === 1 ) {
 
-				var lParentLSM = new THREE.Matrix4().scale( new THREE.Vector3().setFromMatrixScale( lParentLX ) );
-				var lParentLSM_inv = new THREE.Matrix4().copy( lParentLSM ).invert();
-				var lParentGSM_noLocal = new THREE.Matrix4().copy( lParentGSM ).multiply( lParentLSM_inv );
-				lGlobalRS.copy( lParentGRM ).multiply( lLRM ).multiply( lParentGSM_noLocal ).multiply( lLSM );
+			lGlobalRS.copy( lParentGRM ).multiply( lParentGSM ).multiply( lLRM ).multiply( lLSM );
 
-			}
+		} else {
 
-			var lRotationPivotM_inv = new THREE.Matrix4();
-			lRotationPivotM_inv.copy( lRotationPivotM ).invert();
-			var lScalingPivotM_inv = new THREE.Matrix4();
-			lScalingPivotM_inv.copy( lScalingPivotM ).invert(); // Calculate the local transform matrix
+			const lParentLSM = new THREE.Matrix4().scale( new THREE.Vector3().setFromMatrixScale( lParentLX ) );
+			const lParentLSM_inv = new THREE.Matrix4().copy( lParentLSM ).invert();
+			const lParentGSM_noLocal = new THREE.Matrix4().copy( lParentGSM ).multiply( lParentLSM_inv );
+			lGlobalRS.copy( lParentGRM ).multiply( lLRM ).multiply( lParentGSM_noLocal ).multiply( lLSM );
 
-			var lTransform = new THREE.Matrix4();
-			lTransform.copy( lTranslationM ).multiply( lRotationOffsetM ).multiply( lRotationPivotM ).multiply( lPreRotationM ).multiply( lRotationM ).multiply( lPostRotationM ).multiply( lRotationPivotM_inv ).multiply( lScalingOffsetM ).multiply( lScalingPivotM ).multiply( lScalingM ).multiply( lScalingPivotM_inv );
-			var lLocalTWithAllPivotAndOffsetInfo = new THREE.Matrix4().copyPosition( lTransform );
-			var lGlobalTranslation = new THREE.Matrix4().copy( lParentGX ).multiply( lLocalTWithAllPivotAndOffsetInfo );
-			lGlobalT.copyPosition( lGlobalTranslation );
-			lTransform = new THREE.Matrix4().copy( lGlobalT ).multiply( lGlobalRS ); // from global to local
+		}
 
-			lTransform.premultiply( lParentGX.invert() );
-			return lTransform;
+		const lRotationPivotM_inv = new THREE.Matrix4();
+		lRotationPivotM_inv.copy( lRotationPivotM ).invert();
+		const lScalingPivotM_inv = new THREE.Matrix4();
+		lScalingPivotM_inv.copy( lScalingPivotM ).invert(); // Calculate the local transform matrix
 
-		} // Returns the three.js intrinsic THREE.Euler order corresponding to FBX extrinsic THREE.Euler order
-		// ref: http://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_euler_html
+		let lTransform = new THREE.Matrix4();
+		lTransform.copy( lTranslationM ).multiply( lRotationOffsetM ).multiply( lRotationPivotM ).multiply( lPreRotationM ).multiply( lRotationM ).multiply( lPostRotationM ).multiply( lRotationPivotM_inv ).multiply( lScalingOffsetM ).multiply( lScalingPivotM ).multiply( lScalingM ).multiply( lScalingPivotM_inv );
+		const lLocalTWithAllPivotAndOffsetInfo = new THREE.Matrix4().copyPosition( lTransform );
+		const lGlobalTranslation = new THREE.Matrix4().copy( lParentGX ).multiply( lLocalTWithAllPivotAndOffsetInfo );
+		lGlobalT.copyPosition( lGlobalTranslation );
+		lTransform = new THREE.Matrix4().copy( lGlobalT ).multiply( lGlobalRS ); // from global to local
 
+		lTransform.premultiply( lParentGX.invert() );
+		return lTransform;
 
-		function getEulerOrder( order ) {
+	} // Returns the three.js intrinsic THREE.Euler order corresponding to FBX extrinsic THREE.Euler order
+	// ref: http://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_euler_html
 
-			order = order || 0;
-			var enums = [ 'ZYX', // -> XYZ extrinsic
-				'YZX', // -> XZY extrinsic
-				'XZY', // -> YZX extrinsic
-				'ZXY', // -> YXZ extrinsic
-				'YXZ', // -> ZXY extrinsic
-				'XYZ' // -> ZYX extrinsic
-				//'SphericXYZ', // not possible to support
-			];
 
-			if ( order === 6 ) {
+	function getEulerOrder( order ) {
 
-				console.warn( 'THREE.FBXLoader: unsupported THREE.Euler Order: Spherical XYZ. Animations and rotations may be incorrect.' );
-				return enums[ 0 ];
+		order = order || 0;
+		const enums = [ 'ZYX', // -> XYZ extrinsic
+			'YZX', // -> XZY extrinsic
+			'XZY', // -> YZX extrinsic
+			'ZXY', // -> YXZ extrinsic
+			'YXZ', // -> ZXY extrinsic
+			'XYZ' // -> ZYX extrinsic
+			//'SphericXYZ', // not possible to support
+		];
 
-			}
+		if ( order === 6 ) {
 
-			return enums[ order ];
+			console.warn( 'THREE.FBXLoader: unsupported THREE.Euler Order: Spherical XYZ. Animations and rotations may be incorrect.' );
+			return enums[ 0 ];
 
-		} // Parses comma separated list of numbers and returns them an array.
-		// Used internally by the TextParser
+		}
 
+		return enums[ order ];
 
-		function parseNumberArray( value ) {
+	} // Parses comma separated list of numbers and returns them an array.
+	// Used internally by the TextParser
 
-			var array = value.split( ',' ).map( function ( val ) {
 
-				return parseFloat( val );
+	function parseNumberArray( value ) {
 
-			} );
-			return array;
+		const array = value.split( ',' ).map( function ( val ) {
 
-		}
+			return parseFloat( val );
 
-		function convertArrayBufferToString( buffer, from, to ) {
+		} );
+		return array;
 
-			if ( from === undefined ) from = 0;
-			if ( to === undefined ) to = buffer.byteLength;
-			return THREE.LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) );
+	}
 
-		}
+	function convertArrayBufferToString( buffer, from, to ) {
 
-		function append( a, b ) {
+		if ( from === undefined ) from = 0;
+		if ( to === undefined ) to = buffer.byteLength;
+		return THREE.LoaderUtils.decodeText( new Uint8Array( buffer, from, to ) );
 
-			for ( var i = 0, j = a.length, l = b.length; i < l; i ++, j ++ ) {
+	}
 
-				a[ j ] = b[ i ];
+	function append( a, b ) {
 
-			}
+		for ( let i = 0, j = a.length, l = b.length; i < l; i ++, j ++ ) {
 
-		}
+			a[ j ] = b[ i ];
 
-		function slice( a, b, from, to ) {
+		}
 
-			for ( var i = from, j = 0; i < to; i ++, j ++ ) {
+	}
 
-				a[ j ] = b[ i ];
+	function slice( a, b, from, to ) {
 
-			}
+		for ( let i = from, j = 0; i < to; i ++, j ++ ) {
 
-			return a;
+			a[ j ] = b[ i ];
 
-		} // inject array a2 into array a1 at index
+		}
 
+		return a;
 
-		function inject( a1, index, a2 ) {
+	} // inject array a2 into array a1 at index
 
-			return a1.slice( 0, index ).concat( a2 ).concat( a1.slice( index ) );
 
-		}
+	function inject( a1, index, a2 ) {
 
-		return FBXLoader;
+		return a1.slice( 0, index ).concat( a2 ).concat( a1.slice( index ) );
 
-	}();
+	}
 
 	THREE.FBXLoader = FBXLoader;
 

+ 14 - 12
examples/js/loaders/GCodeLoader.js

@@ -9,19 +9,19 @@
  * @param {Manager} manager Loading manager.
  */
 
-	var GCodeLoader = function ( manager ) {
+	class GCodeLoader extends THREE.Loader {
 
-		THREE.Loader.call( this, manager );
-		this.splitLayer = false;
+		constructor( manager ) {
 
-	};
+			super( manager );
+			this.splitLayer = false;
 
-	GCodeLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-		constructor: GCodeLoader,
-		load: function ( url, onLoad, onProgress, onError ) {
+		}
+
+		load( url, onLoad, onProgress, onError ) {
 
-			var scope = this;
-			var loader = new THREE.FileLoader( scope.manager );
+			const scope = this;
+			const loader = new THREE.FileLoader( scope.manager );
 			loader.setPath( scope.path );
 			loader.setRequestHeader( scope.requestHeader );
 			loader.setWithCredentials( scope.withCredentials );
@@ -49,8 +49,9 @@
 
 			}, onProgress, onError );
 
-		},
-		parse: function ( data ) {
+		}
+
+		parse( data ) {
 
 			var state = {
 				x: 0,
@@ -248,7 +249,8 @@
 			return object;
 
 		}
-	} );
+
+	}
 
 	THREE.GCodeLoader = GCodeLoader;
 

ファイルの差分が大きいため隠しています
+ 915 - 869
examples/js/loaders/LDrawLoader.js


ファイルの差分が大きいため隠しています
+ 170 - 161
examples/js/loaders/MMDLoader.js


+ 172 - 176
examples/js/loaders/NRRDLoader.js

@@ -1,17 +1,17 @@
 ( function () {
 
-	var NRRDLoader = function ( manager ) {
+	class NRRDLoader extends THREE.Loader {
 
-		THREE.Loader.call( this, manager );
+		constructor( manager ) {
 
-	};
+			super( manager );
+
+		}
 
-	NRRDLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-		constructor: NRRDLoader,
-		load: function ( url, onLoad, onProgress, onError ) {
+		load( url, onLoad, onProgress, onError ) {
 
-			var scope = this;
-			var loader = new THREE.FileLoader( scope.manager );
+			const scope = this;
+			const loader = new THREE.FileLoader( scope.manager );
 			loader.setPath( scope.path );
 			loader.setResponseType( 'arraybuffer' );
 			loader.setRequestHeader( scope.requestHeader );
@@ -40,17 +40,18 @@
 
 			}, onProgress, onError );
 
-		},
-		parse: function ( data ) {
+		}
+
+		parse( data ) {
 
 			// this parser is largely inspired from the XTK NRRD parser : https://github.com/xtk/X
-			var _data = data;
-			var _dataPointer = 0;
+			let _data = data;
+			let _dataPointer = 0;
 
-			var _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
+			const _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
 
-			var _littleEndian = true;
-			var headerObject = {};
+			const _littleEndian = true;
+			const headerObject = {};
 
 			function scan( type, chunks ) {
 
@@ -60,8 +61,8 @@
 
 				}
 
-				var _chunkSize = 1;
-				var _array_type = Uint8Array;
+				let _chunkSize = 1;
+				let _array_type = Uint8Array;
 
 				switch ( type ) {
 
@@ -113,7 +114,7 @@
 				} // increase the data pointer in-place
 
 
-				var _bytes = new _array_type( _data.slice( _dataPointer, _dataPointer += chunks * _chunkSize ) ); // if required, flip the endianness of the bytes
+				let _bytes = new _array_type( _data.slice( _dataPointer, _dataPointer += chunks * _chunkSize ) ); // if required, flip the endianness of the bytes
 
 
 				if ( _nativeLittleEndian != _littleEndian ) {
@@ -138,13 +139,13 @@
 
 			function flipEndianness( array, chunkSize ) {
 
-				var u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
+				const u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
 
-				for ( var i = 0; i < array.byteLength; i += chunkSize ) {
+				for ( let i = 0; i < array.byteLength; i += chunkSize ) {
 
-					for ( var j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
+					for ( let j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
 
-						var tmp = u8[ k ];
+						const tmp = u8[ k ];
 						u8[ k ] = u8[ j ];
 						u8[ j ] = tmp;
 
@@ -159,9 +160,9 @@
 
 			function parseHeader( header ) {
 
-				var data, field, fn, i, l, lines, m, _i, _len;
+				let data, field, fn, i, l, m, _i, _len;
 
-				lines = header.split( /\r?\n/ );
+				const lines = header.split( /\r?\n/ );
 
 				for ( _i = 0, _len = lines.length; _i < _len; _i ++ ) {
 
@@ -175,7 +176,7 @@
 
 						field = m[ 1 ].trim();
 						data = m[ 2 ].trim();
-						fn = NRRDLoader.prototype.fieldFunctions[ field ];
+						fn = _fieldFunctions[ field ];
 
 						if ( fn ) {
 
@@ -229,17 +230,17 @@
 
 			function parseDataAsText( data, start, end ) {
 
-				var number = '';
+				let number = '';
 				start = start || 0;
 				end = end || data.length;
-				var value; //length of the result is the product of the sizes
+				let value; //length of the result is the product of the sizes
 
-				var lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
+				const lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
 
 					return previous * current;
 
 				}, 1 );
-				var base = 10;
+				let base = 10;
 
 				if ( headerObject.encoding === 'hex' ) {
 
@@ -247,9 +248,9 @@
 
 				}
 
-				var result = new headerObject.__array( lengthOfTheResult );
-				var resultIndex = 0;
-				var parsingFunction = parseInt;
+				const result = new headerObject.__array( lengthOfTheResult );
+				let resultIndex = 0;
+				let parsingFunction = parseInt;
 
 				if ( headerObject.__array === Float32Array || headerObject.__array === Float64Array ) {
 
@@ -257,7 +258,7 @@
 
 				}
 
-				for ( var i = start; i < end; i ++ ) {
+				for ( let i = start; i < end; i ++ ) {
 
 					value = data[ i ]; //if value is not a space
 
@@ -291,12 +292,12 @@
 
 			}
 
-			var _bytes = scan( 'uchar', data.byteLength );
+			const _bytes = scan( 'uchar', data.byteLength );
 
-			var _length = _bytes.length;
-			var _header = null;
-			var _data_start = 0;
-			var i;
+			const _length = _bytes.length;
+			let _header = null;
+			let _data_start = 0;
+			let i;
 
 			for ( i = 1; i < _length; i ++ ) {
 
@@ -315,9 +316,7 @@
 
 
 			parseHeader( _header );
-
-			var _data = _bytes.subarray( _data_start ); // the data without header
-
+			_data = _bytes.subarray( _data_start ); // the data without header
 
 			if ( headerObject.encoding.substring( 0, 2 ) === 'gz' ) {
 
@@ -332,9 +331,9 @@
 			} else if ( headerObject.encoding === 'raw' ) {
 
 				//we need to copy the array to create a new array buffer, else we retrieve the original arraybuffer with the header
-				var _copy = new Uint8Array( _data.length );
+				const _copy = new Uint8Array( _data.length );
 
-				for ( var i = 0; i < _data.length; i ++ ) {
+				for ( let i = 0; i < _data.length; i ++ ) {
 
 					_copy[ i ] = _data[ i ];
 
@@ -346,16 +345,16 @@
 
 
 			_data = _data.buffer;
-			var volume = new THREE.Volume();
+			const volume = new THREE.Volume();
 			volume.header = headerObject; //
 			// parse the (unzipped) data to a datastream of the correct type
 			//
 
 			volume.data = new headerObject.__array( _data ); // get the min and max intensities
 
-			var min_max = volume.computeMinMax();
-			var min = min_max[ 0 ];
-			var max = min_max[ 1 ]; // attach the scalar range to the volume
+			const min_max = volume.computeMinMax();
+			const min = min_max[ 0 ];
+			const max = min_max[ 1 ]; // attach the scalar range to the volume
 
 			volume.windowLow = min;
 			volume.windowHigh = max; // get the image dimensions
@@ -365,15 +364,15 @@
 			volume.yLength = volume.dimensions[ 1 ];
 			volume.zLength = volume.dimensions[ 2 ]; // spacing
 
-			var spacingX = new THREE.Vector3( headerObject.vectors[ 0 ][ 0 ], headerObject.vectors[ 0 ][ 1 ], headerObject.vectors[ 0 ][ 2 ] ).length();
-			var spacingY = new THREE.Vector3( headerObject.vectors[ 1 ][ 0 ], headerObject.vectors[ 1 ][ 1 ], headerObject.vectors[ 1 ][ 2 ] ).length();
-			var spacingZ = new THREE.Vector3( headerObject.vectors[ 2 ][ 0 ], headerObject.vectors[ 2 ][ 1 ], headerObject.vectors[ 2 ][ 2 ] ).length();
+			const spacingX = new THREE.Vector3( headerObject.vectors[ 0 ][ 0 ], headerObject.vectors[ 0 ][ 1 ], headerObject.vectors[ 0 ][ 2 ] ).length();
+			const spacingY = new THREE.Vector3( headerObject.vectors[ 1 ][ 0 ], headerObject.vectors[ 1 ][ 1 ], headerObject.vectors[ 1 ][ 2 ] ).length();
+			const spacingZ = new THREE.Vector3( headerObject.vectors[ 2 ][ 0 ], headerObject.vectors[ 2 ][ 1 ], headerObject.vectors[ 2 ][ 2 ] ).length();
 			volume.spacing = [ spacingX, spacingY, spacingZ ]; // Create IJKtoRAS matrix
 
 			volume.matrix = new THREE.Matrix4();
-			var _spaceX = 1;
-			var _spaceY = 1;
-			var _spaceZ = 1;
+			let _spaceX = 1;
+			let _spaceY = 1;
+			const _spaceZ = 1;
 
 			if ( headerObject.space == 'left-posterior-superior' ) {
 
@@ -392,7 +391,7 @@
 
 			} else {
 
-				var v = headerObject.vectors;
+				const v = headerObject.vectors;
 				volume.matrix.set( _spaceX * v[ 0 ][ 0 ], _spaceX * v[ 1 ][ 0 ], _spaceX * v[ 2 ][ 0 ], 0, _spaceY * v[ 0 ][ 1 ], _spaceY * v[ 1 ][ 1 ], _spaceY * v[ 2 ][ 1 ], 0, _spaceZ * v[ 0 ][ 2 ], _spaceZ * v[ 1 ][ 2 ], _spaceZ * v[ 2 ][ 2 ], 0, 0, 0, 0, 1 );
 
 			}
@@ -416,8 +415,9 @@
 
 			return volume;
 
-		},
-		parseChars: function ( array, start, end ) {
+		}
+
+		parseChars( array, start, end ) {
 
 			// without borders, use the whole array
 			if ( start === undefined ) {
@@ -432,9 +432,9 @@
 
 			}
 
-			var output = ''; // create and append the chars
+			let output = ''; // create and append the chars
 
-			var i = 0;
+			let i = 0;
 
 			for ( i = start; i < end; ++ i ) {
 
@@ -444,185 +444,181 @@
 
 			return output;
 
-		},
-		fieldFunctions: {
-			type: function ( data ) {
+		}
 
-				switch ( data ) {
+	}
 
-					case 'uchar':
-					case 'unsigned char':
-					case 'uint8':
-					case 'uint8_t':
-						this.__array = Uint8Array;
-						break;
+	const _fieldFunctions = {
+		type: function ( data ) {
 
-					case 'signed char':
-					case 'int8':
-					case 'int8_t':
-						this.__array = Int8Array;
-						break;
+			switch ( data ) {
 
-					case 'short':
-					case 'short int':
-					case 'signed short':
-					case 'signed short int':
-					case 'int16':
-					case 'int16_t':
-						this.__array = Int16Array;
-						break;
+				case 'uchar':
+				case 'unsigned char':
+				case 'uint8':
+				case 'uint8_t':
+					this.__array = Uint8Array;
+					break;
 
-					case 'ushort':
-					case 'unsigned short':
-					case 'unsigned short int':
-					case 'uint16':
-					case 'uint16_t':
-						this.__array = Uint16Array;
-						break;
+				case 'signed char':
+				case 'int8':
+				case 'int8_t':
+					this.__array = Int8Array;
+					break;
 
-					case 'int':
-					case 'signed int':
-					case 'int32':
-					case 'int32_t':
-						this.__array = Int32Array;
-						break;
+				case 'short':
+				case 'short int':
+				case 'signed short':
+				case 'signed short int':
+				case 'int16':
+				case 'int16_t':
+					this.__array = Int16Array;
+					break;
 
-					case 'uint':
-					case 'unsigned int':
-					case 'uint32':
-					case 'uint32_t':
-						this.__array = Uint32Array;
-						break;
+				case 'ushort':
+				case 'unsigned short':
+				case 'unsigned short int':
+				case 'uint16':
+				case 'uint16_t':
+					this.__array = Uint16Array;
+					break;
 
-					case 'float':
-						this.__array = Float32Array;
-						break;
+				case 'int':
+				case 'signed int':
+				case 'int32':
+				case 'int32_t':
+					this.__array = Int32Array;
+					break;
 
-					case 'double':
-						this.__array = Float64Array;
-						break;
+				case 'uint':
+				case 'unsigned int':
+				case 'uint32':
+				case 'uint32_t':
+					this.__array = Uint32Array;
+					break;
 
-					default:
-						throw new Error( 'Unsupported NRRD data type: ' + data );
+				case 'float':
+					this.__array = Float32Array;
+					break;
 
-				}
+				case 'double':
+					this.__array = Float64Array;
+					break;
 
-				return this.type = data;
+				default:
+					throw new Error( 'Unsupported NRRD data type: ' + data );
 
-			},
-			endian: function ( data ) {
+			}
 
-				return this.endian = data;
+			return this.type = data;
 
-			},
-			encoding: function ( data ) {
+		},
+		endian: function ( data ) {
 
-				return this.encoding = data;
+			return this.endian = data;
 
-			},
-			dimension: function ( data ) {
+		},
+		encoding: function ( data ) {
 
-				return this.dim = parseInt( data, 10 );
+			return this.encoding = data;
 
-			},
-			sizes: function ( data ) {
+		},
+		dimension: function ( data ) {
 
-				var i;
-				return this.sizes = function () {
+			return this.dim = parseInt( data, 10 );
 
-					var _i, _len, _ref, _results;
+		},
+		sizes: function ( data ) {
 
-					_ref = data.split( /\s+/ );
-					_results = [];
+			let i;
+			return this.sizes = function () {
 
-					for ( _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
+				const _ref = data.split( /\s+/ );
 
-						i = _ref[ _i ];
+				const _results = [];
 
-						_results.push( parseInt( i, 10 ) );
+				for ( let _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
 
-					}
+					i = _ref[ _i ];
 
-					return _results;
+					_results.push( parseInt( i, 10 ) );
 
-				}();
+				}
 
-			},
-			space: function ( data ) {
+				return _results;
 
-				return this.space = data;
+			}();
 
-			},
-			'space origin': function ( data ) {
+		},
+		space: function ( data ) {
 
-				return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
+			return this.space = data;
 
-			},
-			'space directions': function ( data ) {
+		},
+		'space origin': function ( data ) {
 
-				var f, parts, v;
-				parts = data.match( /\(.*?\)/g );
-				return this.vectors = function () {
+			return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
 
-					var _i, _len, _results;
+		},
+		'space directions': function ( data ) {
 
-					_results = [];
+			let f, v;
+			const parts = data.match( /\(.*?\)/g );
+			return this.vectors = function () {
 
-					for ( _i = 0, _len = parts.length; _i < _len; _i ++ ) {
+				const _results = [];
 
-						v = parts[ _i ];
+				for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
 
-						_results.push( function () {
+					v = parts[ _i ];
 
-							var _j, _len2, _ref, _results2;
+					_results.push( function () {
 
-							_ref = v.slice( 1, - 1 ).split( /,/ );
-							_results2 = [];
+						const _ref = v.slice( 1, - 1 ).split( /,/ );
 
-							for ( _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
+						const _results2 = [];
 
-								f = _ref[ _j ];
+						for ( let _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
 
-								_results2.push( parseFloat( f ) );
+							f = _ref[ _j ];
 
-							}
+							_results2.push( parseFloat( f ) );
 
-							return _results2;
+						}
 
-						}() );
+						return _results2;
 
-					}
+					}() );
 
-					return _results;
+				}
 
-				}();
+				return _results;
 
-			},
-			spacings: function ( data ) {
+			}();
 
-				var f, parts;
-				parts = data.split( /\s+/ );
-				return this.spacings = function () {
+		},
+		spacings: function ( data ) {
 
-					var _i,
-						_len,
-						_results = [];
+			let f;
+			const parts = data.split( /\s+/ );
+			return this.spacings = function () {
 
-					for ( _i = 0, _len = parts.length; _i < _len; _i ++ ) {
+				const _results = [];
 
-						f = parts[ _i ];
+				for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
 
-						_results.push( parseFloat( f ) );
+					f = parts[ _i ];
 
-					}
+					_results.push( parseFloat( f ) );
+
+				}
 
-					return _results;
+				return _results;
 
-				}();
+			}();
 
-			}
 		}
-	} );
+	};
 
 	THREE.NRRDLoader = NRRDLoader;
 

+ 14 - 12
examples/js/loaders/VTKLoader.js

@@ -1,17 +1,17 @@
 ( function () {
 
-	var VTKLoader = function ( manager ) {
+	class VTKLoader extends THREE.Loader {
 
-		THREE.Loader.call( this, manager );
+		constructor( manager ) {
 
-	};
+			super( manager );
 
-	VTKLoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
-		constructor: VTKLoader,
-		load: function ( url, onLoad, onProgress, onError ) {
+		}
+
+		load( url, onLoad, onProgress, onError ) {
 
-			var scope = this;
-			var loader = new THREE.FileLoader( scope.manager );
+			const scope = this;
+			const loader = new THREE.FileLoader( scope.manager );
 			loader.setPath( scope.path );
 			loader.setResponseType( 'arraybuffer' );
 			loader.setRequestHeader( scope.requestHeader );
@@ -40,8 +40,9 @@
 
 			}, onProgress, onError );
 
-		},
-		parse: function ( data ) {
+		}
+
+		parse( data ) {
 
 			function parseASCII( data ) {
 
@@ -511,7 +512,7 @@
 
 			function Float32Concat( first, second ) {
 
-				var firstLength = first.length,
+				const firstLength = first.length,
 					result = new Float32Array( firstLength + second.length );
 				result.set( first );
 				result.set( second, firstLength );
@@ -1121,7 +1122,8 @@
 			}
 
 		}
-	} );
+
+	}
 
 	THREE.VTKLoader = VTKLoader;
 

+ 51 - 53
examples/jsm/loaders/3DMLoader.js

@@ -24,52 +24,50 @@ import {
 	TextureLoader
 } from '../../../build/three.module.js';
 
-var Rhino3dmLoader = function ( manager ) {
+const _taskCache = new WeakMap();
 
-	Loader.call( this, manager );
+class Rhino3dmLoader extends Loader {
 
-	this.libraryPath = '';
-	this.libraryPending = null;
-	this.libraryBinary = null;
-	this.libraryConfig = {};
+	constructor( manager ) {
 
-	this.url = '';
+		super( manager );
 
-	this.workerLimit = 4;
-	this.workerPool = [];
-	this.workerNextTaskID = 1;
-	this.workerSourceURL = '';
-	this.workerConfig = {};
+		this.libraryPath = '';
+		this.libraryPending = null;
+		this.libraryBinary = null;
+		this.libraryConfig = {};
 
-	this.materials = [];
+		this.url = '';
 
-};
+		this.workerLimit = 4;
+		this.workerPool = [];
+		this.workerNextTaskID = 1;
+		this.workerSourceURL = '';
+		this.workerConfig = {};
 
-Rhino3dmLoader.taskCache = new WeakMap();
+		this.materials = [];
 
-Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
-
-	constructor: Rhino3dmLoader,
+	}
 
-	setLibraryPath: function ( path ) {
+	setLibraryPath( path ) {
 
 		this.libraryPath = path;
 
 		return this;
 
-	},
+	}
 
-	setWorkerLimit: function ( workerLimit ) {
+	setWorkerLimit( workerLimit ) {
 
 		this.workerLimit = workerLimit;
 
 		return this;
 
-	},
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var loader = new FileLoader( this.manager );
+		const loader = new FileLoader( this.manager );
 
 		loader.setPath( this.path );
 		loader.setResponseType( 'arraybuffer' );
@@ -81,9 +79,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 			// Check for an existing task using this buffer. A transferred buffer cannot be transferred
 			// again from this thread.
-			if ( Rhino3dmLoader.taskCache.has( buffer ) ) {
+			if ( _taskCache.has( buffer ) ) {
 
-				var cachedTask = Rhino3dmLoader.taskCache.get( buffer );
+				const cachedTask = _taskCache.get( buffer );
 
 				return cachedTask.promise.then( onLoad ).catch( onError );
 
@@ -96,15 +94,15 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		}, onProgress, onError );
 
 
-	},
+	}
 
-	debug: function () {
+	debug() {
 
 		console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
 
-	},
+	}
 
-	decodeObjects: function ( buffer, url ) {
+	decodeObjects( buffer, url ) {
 
 		var worker;
 		var taskID;
@@ -147,7 +145,7 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 			} );
 
 		// Cache the task result.
-		Rhino3dmLoader.taskCache.set( buffer, {
+		_taskCache.set( buffer, {
 
 			url: url,
 			promise: objectPending
@@ -156,17 +154,17 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return objectPending;
 
-	},
+	}
 
-	parse: function ( data, onLoad, onError ) {
+	parse( data, onLoad, onError ) {
 
 		this.decodeObjects( data, '' )
 			.then( onLoad )
 			.catch( onError );
 
-	},
+	}
 
-	_compareMaterials: function ( material ) {
+	_compareMaterials( material ) {
 
 		var mat = {};
 		mat.name = material.name;
@@ -199,9 +197,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return material;
 
-	},
+	}
 
-	_createMaterial: function ( material ) {
+	_createMaterial( material ) {
 
 		if ( material === undefined ) {
 
@@ -281,9 +279,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return mat;
 
-	},
+	}
 
-	_createGeometry: function ( data ) {
+	_createGeometry( data ) {
 
 		// console.log(data);
 
@@ -420,9 +418,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		object.userData[ 'materials' ] = this.materials;
 		return object;
 
-	},
+	}
 
-	_createObject: function ( obj, mat ) {
+	_createObject( obj, mat ) {
 
 		var loader = new BufferGeometryLoader();
 
@@ -635,9 +633,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}
 
-	},
+	}
 
-	_initLibrary: function () {
+	_initLibrary() {
 
 		if ( ! this.libraryPending ) {
 
@@ -666,7 +664,7 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 					//this.libraryBinary = binaryContent;
 					this.libraryConfig.wasmBinary = binaryContent;
 
-					var fn = Rhino3dmLoader.Rhino3dmWorker.toString();
+					var fn = Rhino3dmWorker.toString();
 
 					var body = [
 						'/* rhino3dm.js */',
@@ -683,9 +681,9 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return this.libraryPending;
 
-	},
+	}
 
-	_getWorker: function ( taskCost ) {
+	_getWorker( taskCost ) {
 
 		return this._initLibrary().then( () => {
 
@@ -743,17 +741,17 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		} );
 
-	},
+	}
 
-	_releaseTask: function ( worker, taskID ) {
+	_releaseTask( worker, taskID ) {
 
 		worker._taskLoad -= worker._taskCosts[ taskID ];
 		delete worker._callbacks[ taskID ];
 		delete worker._taskCosts[ taskID ];
 
-	},
+	}
 
-	dispose: function () {
+	dispose() {
 
 		for ( var i = 0; i < this.workerPool.length; ++ i ) {
 
@@ -767,11 +765,11 @@ Rhino3dmLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 	}
 
-} );
+}
 
 /* WEB WORKER */
 
-Rhino3dmLoader.Rhino3dmWorker = function () {
+function Rhino3dmWorker() {
 
 	var libraryPending;
 	var libraryConfig;
@@ -1416,6 +1414,6 @@ Rhino3dmLoader.Rhino3dmWorker = function () {
 
 	}
 
-};
+}
 
 export { Rhino3dmLoader };

+ 116 - 169
examples/jsm/loaders/DRACOLoader.js

@@ -5,87 +5,66 @@ import {
 	Loader
 } from '../../../build/three.module.js';
 
-var DRACOLoader = function ( manager ) {
+const _taskCache = new WeakMap();
 
-	Loader.call( this, manager );
+class DRACOLoader extends Loader {
 
-	this.decoderPath = '';
-	this.decoderConfig = {};
-	this.decoderBinary = null;
-	this.decoderPending = null;
+	constructor( manager ) {
 
-	this.workerLimit = 4;
-	this.workerPool = [];
-	this.workerNextTaskID = 1;
-	this.workerSourceURL = '';
+		super( manager );
 
-	this.defaultAttributeIDs = {
-		position: 'POSITION',
-		normal: 'NORMAL',
-		color: 'COLOR',
-		uv: 'TEX_COORD'
-	};
-	this.defaultAttributeTypes = {
-		position: 'Float32Array',
-		normal: 'Float32Array',
-		color: 'Float32Array',
-		uv: 'Float32Array'
-	};
+		this.decoderPath = '';
+		this.decoderConfig = {};
+		this.decoderBinary = null;
+		this.decoderPending = null;
 
-};
+		this.workerLimit = 4;
+		this.workerPool = [];
+		this.workerNextTaskID = 1;
+		this.workerSourceURL = '';
 
-DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
+		this.defaultAttributeIDs = {
+			position: 'POSITION',
+			normal: 'NORMAL',
+			color: 'COLOR',
+			uv: 'TEX_COORD'
+		};
+		this.defaultAttributeTypes = {
+			position: 'Float32Array',
+			normal: 'Float32Array',
+			color: 'Float32Array',
+			uv: 'Float32Array'
+		};
 
-	constructor: DRACOLoader,
+	}
 
-	setDecoderPath: function ( path ) {
+	setDecoderPath( path ) {
 
 		this.decoderPath = path;
 
 		return this;
 
-	},
+	}
 
-	setDecoderConfig: function ( config ) {
+	setDecoderConfig( config ) {
 
 		this.decoderConfig = config;
 
 		return this;
 
-	},
+	}
 
-	setWorkerLimit: function ( workerLimit ) {
+	setWorkerLimit( workerLimit ) {
 
 		this.workerLimit = workerLimit;
 
 		return this;
 
-	},
-
-	/** @deprecated */
-	setVerbosity: function () {
-
-		console.warn( 'THREE.DRACOLoader: The .setVerbosity() method has been removed.' );
-
-	},
-
-	/** @deprecated */
-	setDrawMode: function () {
-
-		console.warn( 'THREE.DRACOLoader: The .setDrawMode() method has been removed.' );
-
-	},
-
-	/** @deprecated */
-	setSkipDequantization: function () {
-
-		console.warn( 'THREE.DRACOLoader: The .setSkipDequantization() method has been removed.' );
-
-	},
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var loader = new FileLoader( this.manager );
+		const loader = new FileLoader( this.manager );
 
 		loader.setPath( this.path );
 		loader.setResponseType( 'arraybuffer' );
@@ -94,7 +73,7 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		loader.load( url, ( buffer ) => {
 
-			var taskConfig = {
+			const taskConfig = {
 				attributeIDs: this.defaultAttributeIDs,
 				attributeTypes: this.defaultAttributeTypes,
 				useUniqueIDs: false
@@ -106,12 +85,12 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}, onProgress, onError );
 
-	},
+	}
 
 	/** @deprecated Kept for backward-compatibility with previous DRACOLoader versions. */
-	decodeDracoFile: function ( buffer, callback, attributeIDs, attributeTypes ) {
+	decodeDracoFile( buffer, callback, attributeIDs, attributeTypes ) {
 
-		var taskConfig = {
+		const taskConfig = {
 			attributeIDs: attributeIDs || this.defaultAttributeIDs,
 			attributeTypes: attributeTypes || this.defaultAttributeTypes,
 			useUniqueIDs: !! attributeIDs
@@ -119,16 +98,16 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		this.decodeGeometry( buffer, taskConfig ).then( callback );
 
-	},
+	}
 
-	decodeGeometry: function ( buffer, taskConfig ) {
+	decodeGeometry( buffer, taskConfig ) {
 
 		// TODO: For backward-compatibility, support 'attributeTypes' objects containing
 		// references (rather than names) to typed array constructors. These must be
 		// serialized before sending them to the worker.
-		for ( var attribute in taskConfig.attributeTypes ) {
+		for ( const attribute in taskConfig.attributeTypes ) {
 
-			var type = taskConfig.attributeTypes[ attribute ];
+			const type = taskConfig.attributeTypes[ attribute ];
 
 			if ( type.BYTES_PER_ELEMENT !== undefined ) {
 
@@ -140,13 +119,13 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		//
 
-		var taskKey = JSON.stringify( taskConfig );
+		const taskKey = JSON.stringify( taskConfig );
 
 		// Check for an existing task using this buffer. A transferred buffer cannot be transferred
 		// again from this thread.
-		if ( DRACOLoader.taskCache.has( buffer ) ) {
+		if ( _taskCache.has( buffer ) ) {
 
-			var cachedTask = DRACOLoader.taskCache.get( buffer );
+			const cachedTask = _taskCache.get( buffer );
 
 			if ( cachedTask.key === taskKey ) {
 
@@ -171,13 +150,13 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		//
 
-		var worker;
-		var taskID = this.workerNextTaskID ++;
-		var taskCost = buffer.byteLength;
+		let worker;
+		const taskID = this.workerNextTaskID ++;
+		const taskCost = buffer.byteLength;
 
 		// Obtain a worker and assign a task, and construct a geometry instance
 		// when the task completes.
-		var geometryPending = this._getWorker( taskID, taskCost )
+		const geometryPending = this._getWorker( taskID, taskCost )
 			.then( ( _worker ) => {
 
 				worker = _worker;
@@ -212,7 +191,7 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 			} );
 
 		// Cache the task result.
-		DRACOLoader.taskCache.set( buffer, {
+		_taskCache.set( buffer, {
 
 			key: taskKey,
 			promise: geometryPending
@@ -221,11 +200,11 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return geometryPending;
 
-	},
+	}
 
-	_createGeometry: function ( geometryData ) {
+	_createGeometry( geometryData ) {
 
-		var geometry = new BufferGeometry();
+		const geometry = new BufferGeometry();
 
 		if ( geometryData.index ) {
 
@@ -233,12 +212,12 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}
 
-		for ( var i = 0; i < geometryData.attributes.length; i ++ ) {
+		for ( let i = 0; i < geometryData.attributes.length; i ++ ) {
 
-			var attribute = geometryData.attributes[ i ];
-			var name = attribute.name;
-			var array = attribute.array;
-			var itemSize = attribute.itemSize;
+			const attribute = geometryData.attributes[ i ];
+			const name = attribute.name;
+			const array = attribute.array;
+			const itemSize = attribute.itemSize;
 
 			geometry.setAttribute( name, new BufferAttribute( array, itemSize ) );
 
@@ -246,11 +225,11 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return geometry;
 
-	},
+	}
 
-	_loadLibrary: function ( url, responseType ) {
+	_loadLibrary( url, responseType ) {
 
-		var loader = new FileLoader( this.manager );
+		const loader = new FileLoader( this.manager );
 		loader.setPath( this.decoderPath );
 		loader.setResponseType( responseType );
 		loader.setWithCredentials( this.withCredentials );
@@ -261,22 +240,22 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		} );
 
-	},
+	}
 
-	preload: function () {
+	preload() {
 
 		this._initDecoder();
 
 		return this;
 
-	},
+	}
 
-	_initDecoder: function () {
+	_initDecoder() {
 
 		if ( this.decoderPending ) return this.decoderPending;
 
-		var useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
-		var librariesPending = [];
+		const useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
+		const librariesPending = [];
 
 		if ( useJS ) {
 
@@ -292,7 +271,7 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		this.decoderPending = Promise.all( librariesPending )
 			.then( ( libraries ) => {
 
-				var jsContent = libraries[ 0 ];
+				const jsContent = libraries[ 0 ];
 
 				if ( ! useJS ) {
 
@@ -300,9 +279,9 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 				}
 
-				var fn = DRACOLoader.DRACOWorker.toString();
+				const fn = DRACOWorker.toString();
 
-				var body = [
+				const body = [
 					'/* draco decoder */',
 					jsContent,
 					'',
@@ -316,15 +295,15 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return this.decoderPending;
 
-	},
+	}
 
-	_getWorker: function ( taskID, taskCost ) {
+	_getWorker( taskID, taskCost ) {
 
 		return this._initDecoder().then( () => {
 
 			if ( this.workerPool.length < this.workerLimit ) {
 
-				var worker = new Worker( this.workerSourceURL );
+				const worker = new Worker( this.workerSourceURL );
 
 				worker._callbacks = {};
 				worker._taskCosts = {};
@@ -334,7 +313,7 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 				worker.onmessage = function ( e ) {
 
-					var message = e.data;
+					const message = e.data;
 
 					switch ( message.type ) {
 
@@ -365,32 +344,32 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 			}
 
-			var worker = this.workerPool[ this.workerPool.length - 1 ];
+			const worker = this.workerPool[ this.workerPool.length - 1 ];
 			worker._taskCosts[ taskID ] = taskCost;
 			worker._taskLoad += taskCost;
 			return worker;
 
 		} );
 
-	},
+	}
 
-	_releaseTask: function ( worker, taskID ) {
+	_releaseTask( worker, taskID ) {
 
 		worker._taskLoad -= worker._taskCosts[ taskID ];
 		delete worker._callbacks[ taskID ];
 		delete worker._taskCosts[ taskID ];
 
-	},
+	}
 
-	debug: function () {
+	debug() {
 
 		console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
 
-	},
+	}
 
-	dispose: function () {
+	dispose() {
 
-		for ( var i = 0; i < this.workerPool.length; ++ i ) {
+		for ( let i = 0; i < this.workerPool.length; ++ i ) {
 
 			this.workerPool[ i ].terminate();
 
@@ -402,18 +381,18 @@ DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 	}
 
-} );
+}
 
 /* WEB WORKER */
 
-DRACOLoader.DRACOWorker = function () {
+function DRACOWorker() {
 
-	var decoderConfig;
-	var decoderPending;
+	let decoderConfig;
+	let decoderPending;
 
 	onmessage = function ( e ) {
 
-		var message = e.data;
+		const message = e.data;
 
 		switch ( message.type ) {
 
@@ -434,20 +413,20 @@ DRACOLoader.DRACOWorker = function () {
 				break;
 
 			case 'decode':
-				var buffer = message.buffer;
-				var taskConfig = message.taskConfig;
+				const buffer = message.buffer;
+				const taskConfig = message.taskConfig;
 				decoderPending.then( ( module ) => {
 
-					var draco = module.draco;
-					var decoder = new draco.Decoder();
-					var decoderBuffer = new draco.DecoderBuffer();
+					const draco = module.draco;
+					const decoder = new draco.Decoder();
+					const decoderBuffer = new draco.DecoderBuffer();
 					decoderBuffer.Init( new Int8Array( buffer ), buffer.byteLength );
 
 					try {
 
-						var geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
+						const geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
 
-						var buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
+						const buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
 
 						if ( geometry.index ) buffers.push( geometry.index.array.buffer );
 
@@ -475,13 +454,13 @@ DRACOLoader.DRACOWorker = function () {
 
 	function decodeGeometry( draco, decoder, decoderBuffer, taskConfig ) {
 
-		var attributeIDs = taskConfig.attributeIDs;
-		var attributeTypes = taskConfig.attributeTypes;
+		const attributeIDs = taskConfig.attributeIDs;
+		const attributeTypes = taskConfig.attributeTypes;
 
-		var dracoGeometry;
-		var decodingStatus;
+		let dracoGeometry;
+		let decodingStatus;
 
-		var geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
+		const geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
 
 		if ( geometryType === draco.TRIANGULAR_MESH ) {
 
@@ -505,15 +484,15 @@ DRACOLoader.DRACOWorker = function () {
 
 		}
 
-		var geometry = { index: null, attributes: [] };
+		const geometry = { index: null, attributes: [] };
 
 		// Gather all vertex attributes.
-		for ( var attributeName in attributeIDs ) {
+		for ( const attributeName in attributeIDs ) {
 
-			var attributeType = self[ attributeTypes[ attributeName ] ];
+			const attributeType = self[ attributeTypes[ attributeName ] ];
 
-			var attribute;
-			var attributeID;
+			let attribute;
+			let attributeID;
 
 			// A Draco file may be created with default vertex attributes, whose attribute IDs
 			// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
@@ -553,13 +532,13 @@ DRACOLoader.DRACOWorker = function () {
 
 	function decodeIndex( draco, decoder, dracoGeometry ) {
 
-		var numFaces = dracoGeometry.num_faces();
-		var numIndices = numFaces * 3;
-		var byteLength = numIndices * 4;
+		const numFaces = dracoGeometry.num_faces();
+		const numIndices = numFaces * 3;
+		const byteLength = numIndices * 4;
 
-		var ptr = draco._malloc( byteLength );
+		const ptr = draco._malloc( byteLength );
 		decoder.GetTrianglesUInt32Array( dracoGeometry, byteLength, ptr );
-		var index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
+		const index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
 		draco._free( ptr );
 
 		return { array: index, itemSize: 1 };
@@ -568,15 +547,15 @@ DRACOLoader.DRACOWorker = function () {
 
 	function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
 
-		var numComponents = attribute.num_components();
-		var numPoints = dracoGeometry.num_points();
-		var numValues = numPoints * numComponents;
-		var byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
-		var dataType = getDracoDataType( draco, attributeType );
+		const numComponents = attribute.num_components();
+		const numPoints = dracoGeometry.num_points();
+		const numValues = numPoints * numComponents;
+		const byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
+		const dataType = getDracoDataType( draco, attributeType );
 
-		var ptr = draco._malloc( byteLength );
+		const ptr = draco._malloc( byteLength );
 		decoder.GetAttributeDataArrayForAllPoints( dracoGeometry, attribute, dataType, byteLength, ptr );
-		var array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
+		const array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
 		draco._free( ptr );
 
 		return {
@@ -603,38 +582,6 @@ DRACOLoader.DRACOWorker = function () {
 
 	}
 
-};
-
-DRACOLoader.taskCache = new WeakMap();
-
-/** Deprecated static methods */
-
-/** @deprecated */
-DRACOLoader.setDecoderPath = function () {
-
-	console.warn( 'THREE.DRACOLoader: The .setDecoderPath() method has been removed. Use instance methods.' );
-
-};
-
-/** @deprecated */
-DRACOLoader.setDecoderConfig = function () {
-
-	console.warn( 'THREE.DRACOLoader: The .setDecoderConfig() method has been removed. Use instance methods.' );
-
-};
-
-/** @deprecated */
-DRACOLoader.releaseDecoderModule = function () {
-
-	console.warn( 'THREE.DRACOLoader: The .releaseDecoderModule() method has been removed. Use instance methods.' );
-
-};
-
-/** @deprecated */
-DRACOLoader.getDecoderModule = function () {
-
-	console.warn( 'THREE.DRACOLoader: The .getDecoderModule() method has been removed. Use instance methods.' );
-
-};
+}
 
 export { DRACOLoader };

+ 12 - 14
examples/jsm/loaders/EXRLoader.js

@@ -87,19 +87,17 @@ import * as fflate from '../libs/fflate.module.min.js';
 
 // // End of OpenEXR license -------------------------------------------------
 
-var EXRLoader = function ( manager ) {
+class EXRLoader extends DataTextureLoader {
 
-	DataTextureLoader.call( this, manager );
+	constructor( manager ) {
 
-	this.type = FloatType;
+		super( manager );
 
-};
+		this.type = FloatType;
 
-EXRLoader.prototype = Object.assign( Object.create( DataTextureLoader.prototype ), {
-
-	constructor: EXRLoader,
+	}
 
-	parse: function ( buffer ) {
+	parse( buffer ) {
 
 		const USHORT_RANGE = ( 1 << 16 );
 		const BITMAP_SIZE = ( USHORT_RANGE >> 3 );
@@ -2366,16 +2364,16 @@ EXRLoader.prototype = Object.assign( Object.create( DataTextureLoader.prototype
 			type: this.type
 		};
 
-	},
+	}
 
-	setDataType: function ( value ) {
+	setDataType( value ) {
 
 		this.type = value;
 		return this;
 
-	},
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
 		function onLoadCallback( texture, texData ) {
 
@@ -2406,10 +2404,10 @@ EXRLoader.prototype = Object.assign( Object.create( DataTextureLoader.prototype
 
 		}
 
-		return DataTextureLoader.prototype.load.call( this, url, onLoadCallback, onProgress, onError );
+		return super.load( url, onLoadCallback, onProgress, onError );
 
 	}
 
-} );
+}
 
 export { EXRLoader };

ファイルの差分が大きいため隠しています
+ 776 - 784
examples/jsm/loaders/FBXLoader.js


+ 11 - 13
examples/jsm/loaders/GCodeLoader.js

@@ -18,23 +18,21 @@ import {
  * @param {Manager} manager Loading manager.
  */
 
-var GCodeLoader = function ( manager ) {
+class GCodeLoader extends Loader {
 
-	Loader.call( this, manager );
+	constructor( manager ) {
 
-	this.splitLayer = false;
+		super( manager );
 
-};
+		this.splitLayer = false;
 
-GCodeLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
-
-	constructor: GCodeLoader,
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var scope = this;
+		const scope = this;
 
-		var loader = new FileLoader( scope.manager );
+		const loader = new FileLoader( scope.manager );
 		loader.setPath( scope.path );
 		loader.setRequestHeader( scope.requestHeader );
 		loader.setWithCredentials( scope.withCredentials );
@@ -62,9 +60,9 @@ GCodeLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}, onProgress, onError );
 
-	},
+	}
 
-	parse: function ( data ) {
+	parse( data ) {
 
 		var state = { x: 0, y: 0, z: 0, e: 0, f: 0, extruding: false, relative: false };
 		var layers = [];
@@ -260,6 +258,6 @@ GCodeLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 	}
 
-} );
+}
 
 export { GCodeLoader };

ファイルの差分が大きいため隠しています
+ 897 - 879
examples/jsm/loaders/LDrawLoader.js


ファイルの差分が大きいため隠しています
+ 619 - 636
examples/jsm/loaders/MMDLoader.js


+ 169 - 173
examples/jsm/loaders/NRRDLoader.js

@@ -7,21 +7,19 @@ import {
 import * as fflate from '../libs/fflate.module.min.js';
 import { Volume } from '../misc/Volume.js';
 
-var NRRDLoader = function ( manager ) {
+class NRRDLoader extends Loader {
 
-	Loader.call( this, manager );
+	constructor( manager ) {
 
-};
-
-NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
+		super( manager );
 
-	constructor: NRRDLoader,
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var scope = this;
+		const scope = this;
 
-		var loader = new FileLoader( scope.manager );
+		const loader = new FileLoader( scope.manager );
 		loader.setPath( scope.path );
 		loader.setResponseType( 'arraybuffer' );
 		loader.setRequestHeader( scope.requestHeader );
@@ -50,21 +48,21 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}, onProgress, onError );
 
-	},
+	}
 
-	parse: function ( data ) {
+	parse( data ) {
 
 		// this parser is largely inspired from the XTK NRRD parser : https://github.com/xtk/X
 
-		var _data = data;
+		let _data = data;
 
-		var _dataPointer = 0;
+		let _dataPointer = 0;
 
-		var _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
+		const _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
 
-		var _littleEndian = true;
+		const _littleEndian = true;
 
-		var headerObject = {};
+		const headerObject = {};
 
 		function scan( type, chunks ) {
 
@@ -74,8 +72,8 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 			}
 
-			var _chunkSize = 1;
-			var _array_type = Uint8Array;
+			let _chunkSize = 1;
+			let _array_type = Uint8Array;
 
 			switch ( type ) {
 
@@ -119,7 +117,7 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 			}
 
 			// increase the data pointer in-place
-			var _bytes = new _array_type( _data.slice( _dataPointer,
+			let _bytes = new _array_type( _data.slice( _dataPointer,
 				_dataPointer += chunks * _chunkSize ) );
 
 			// if required, flip the endianness of the bytes
@@ -146,12 +144,12 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		function flipEndianness( array, chunkSize ) {
 
-			var u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
-			for ( var i = 0; i < array.byteLength; i += chunkSize ) {
+			const u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
+			for ( let i = 0; i < array.byteLength; i += chunkSize ) {
 
-				for ( var j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
+				for ( let j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
 
-					var tmp = u8[ k ];
+					const tmp = u8[ k ];
 					u8[ k ] = u8[ j ];
 					u8[ j ] = tmp;
 
@@ -166,8 +164,8 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		//parse the header
 		function parseHeader( header ) {
 
-			var data, field, fn, i, l, lines, m, _i, _len;
-			lines = header.split( /\r?\n/ );
+			let data, field, fn, i, l, m, _i, _len;
+			const lines = header.split( /\r?\n/ );
 			for ( _i = 0, _len = lines.length; _i < _len; _i ++ ) {
 
 				l = lines[ _i ];
@@ -180,7 +178,7 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 					field = m[ 1 ].trim();
 					data = m[ 2 ].trim();
-					fn = NRRDLoader.prototype.fieldFunctions[ field ];
+					fn = _fieldFunctions[ field ];
 					if ( fn ) {
 
 						fn.call( headerObject, data );
@@ -233,34 +231,34 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		//parse the data when registred as one of this type : 'text', 'ascii', 'txt'
 		function parseDataAsText( data, start, end ) {
 
-			var number = '';
+			let number = '';
 			start = start || 0;
 			end = end || data.length;
-			var value;
+			let value;
 			//length of the result is the product of the sizes
-			var lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
+			const lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
 
 				return previous * current;
 
 			}, 1 );
 
-			var base = 10;
+			let base = 10;
 			if ( headerObject.encoding === 'hex' ) {
 
 				base = 16;
 
 			}
 
-			var result = new headerObject.__array( lengthOfTheResult );
-			var resultIndex = 0;
-			var parsingFunction = parseInt;
+			const result = new headerObject.__array( lengthOfTheResult );
+			let resultIndex = 0;
+			let parsingFunction = parseInt;
 			if ( headerObject.__array === Float32Array || headerObject.__array === Float64Array ) {
 
 				parsingFunction = parseFloat;
 
 			}
 
-			for ( var i = start; i < end; i ++ ) {
+			for ( let i = start; i < end; i ++ ) {
 
 				value = data[ i ];
 				//if value is not a space
@@ -294,11 +292,11 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}
 
-		var _bytes = scan( 'uchar', data.byteLength );
-		var _length = _bytes.length;
-		var _header = null;
-		var _data_start = 0;
-		var i;
+		const _bytes = scan( 'uchar', data.byteLength );
+		const _length = _bytes.length;
+		let _header = null;
+		let _data_start = 0;
+		let i;
 		for ( i = 1; i < _length; i ++ ) {
 
 			if ( _bytes[ i - 1 ] == 10 && _bytes[ i ] == 10 ) {
@@ -317,7 +315,7 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		// parse the header
 		parseHeader( _header );
 
-		var _data = _bytes.subarray( _data_start ); // the data without header
+		_data = _bytes.subarray( _data_start ); // the data without header
 		if ( headerObject.encoding.substring( 0, 2 ) === 'gz' ) {
 
 			// we need to decompress the datastream
@@ -331,9 +329,9 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		} else if ( headerObject.encoding === 'raw' ) {
 
 			//we need to copy the array to create a new array buffer, else we retrieve the original arraybuffer with the header
-			var _copy = new Uint8Array( _data.length );
+			const _copy = new Uint8Array( _data.length );
 
-			for ( var i = 0; i < _data.length; i ++ ) {
+			for ( let i = 0; i < _data.length; i ++ ) {
 
 				_copy[ i ] = _data[ i ];
 
@@ -346,16 +344,16 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		// .. let's use the underlying array buffer
 		_data = _data.buffer;
 
-		var volume = new Volume();
+		const volume = new Volume();
 		volume.header = headerObject;
 		//
 		// parse the (unzipped) data to a datastream of the correct type
 		//
 		volume.data = new headerObject.__array( _data );
 		// get the min and max intensities
-		var min_max = volume.computeMinMax();
-		var min = min_max[ 0 ];
-		var max = min_max[ 1 ];
+		const min_max = volume.computeMinMax();
+		const min = min_max[ 0 ];
+		const max = min_max[ 1 ];
 		// attach the scalar range to the volume
 		volume.windowLow = min;
 		volume.windowHigh = max;
@@ -366,11 +364,11 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		volume.yLength = volume.dimensions[ 1 ];
 		volume.zLength = volume.dimensions[ 2 ];
 		// spacing
-		var spacingX = ( new Vector3( headerObject.vectors[ 0 ][ 0 ], headerObject.vectors[ 0 ][ 1 ],
+		const spacingX = ( new Vector3( headerObject.vectors[ 0 ][ 0 ], headerObject.vectors[ 0 ][ 1 ],
 			headerObject.vectors[ 0 ][ 2 ] ) ).length();
-		var spacingY = ( new Vector3( headerObject.vectors[ 1 ][ 0 ], headerObject.vectors[ 1 ][ 1 ],
+		const spacingY = ( new Vector3( headerObject.vectors[ 1 ][ 0 ], headerObject.vectors[ 1 ][ 1 ],
 			headerObject.vectors[ 1 ][ 2 ] ) ).length();
-		var spacingZ = ( new Vector3( headerObject.vectors[ 2 ][ 0 ], headerObject.vectors[ 2 ][ 1 ],
+		const spacingZ = ( new Vector3( headerObject.vectors[ 2 ][ 0 ], headerObject.vectors[ 2 ][ 1 ],
 			headerObject.vectors[ 2 ][ 2 ] ) ).length();
 		volume.spacing = [ spacingX, spacingY, spacingZ ];
 
@@ -378,9 +376,9 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 		// Create IJKtoRAS matrix
 		volume.matrix = new Matrix4();
 
-		var _spaceX = 1;
-		var _spaceY = 1;
-		var _spaceZ = 1;
+		let _spaceX = 1;
+		let _spaceY = 1;
+		const _spaceZ = 1;
 
 		if ( headerObject.space == 'left-posterior-superior' ) {
 
@@ -404,7 +402,7 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		} else {
 
-			var v = headerObject.vectors;
+			const v = headerObject.vectors;
 
 			volume.matrix.set(
 				_spaceX * v[ 0 ][ 0 ], _spaceX * v[ 1 ][ 0 ], _spaceX * v[ 2 ][ 0 ], 0,
@@ -434,9 +432,9 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return volume;
 
-	},
+	}
 
-	parseChars: function ( array, start, end ) {
+	parseChars( array, start, end ) {
 
 		// without borders, use the whole array
 		if ( start === undefined ) {
@@ -451,9 +449,9 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}
 
-		var output = '';
+		let output = '';
 		// create and append the chars
-		var i = 0;
+		let i = 0;
 		for ( i = start; i < end; ++ i ) {
 
 			output += String.fromCharCode( array[ i ] );
@@ -462,178 +460,176 @@ NRRDLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		return output;
 
-	},
+	}
 
-	fieldFunctions: {
+}
 
-		type: function ( data ) {
+const _fieldFunctions = {
 
-			switch ( data ) {
+	type: function ( data ) {
 
-				case 'uchar':
-				case 'unsigned char':
-				case 'uint8':
-				case 'uint8_t':
-					this.__array = Uint8Array;
-					break;
-				case 'signed char':
-				case 'int8':
-				case 'int8_t':
-					this.__array = Int8Array;
-					break;
-				case 'short':
-				case 'short int':
-				case 'signed short':
-				case 'signed short int':
-				case 'int16':
-				case 'int16_t':
-					this.__array = Int16Array;
-					break;
-				case 'ushort':
-				case 'unsigned short':
-				case 'unsigned short int':
-				case 'uint16':
-				case 'uint16_t':
-					this.__array = Uint16Array;
-					break;
-				case 'int':
-				case 'signed int':
-				case 'int32':
-				case 'int32_t':
-					this.__array = Int32Array;
-					break;
-				case 'uint':
-				case 'unsigned int':
-				case 'uint32':
-				case 'uint32_t':
-					this.__array = Uint32Array;
-					break;
-				case 'float':
-					this.__array = Float32Array;
-					break;
-				case 'double':
-					this.__array = Float64Array;
-					break;
-				default:
-					throw new Error( 'Unsupported NRRD data type: ' + data );
+		switch ( data ) {
 
-			}
+			case 'uchar':
+			case 'unsigned char':
+			case 'uint8':
+			case 'uint8_t':
+				this.__array = Uint8Array;
+				break;
+			case 'signed char':
+			case 'int8':
+			case 'int8_t':
+				this.__array = Int8Array;
+				break;
+			case 'short':
+			case 'short int':
+			case 'signed short':
+			case 'signed short int':
+			case 'int16':
+			case 'int16_t':
+				this.__array = Int16Array;
+				break;
+			case 'ushort':
+			case 'unsigned short':
+			case 'unsigned short int':
+			case 'uint16':
+			case 'uint16_t':
+				this.__array = Uint16Array;
+				break;
+			case 'int':
+			case 'signed int':
+			case 'int32':
+			case 'int32_t':
+				this.__array = Int32Array;
+				break;
+			case 'uint':
+			case 'unsigned int':
+			case 'uint32':
+			case 'uint32_t':
+				this.__array = Uint32Array;
+				break;
+			case 'float':
+				this.__array = Float32Array;
+				break;
+			case 'double':
+				this.__array = Float64Array;
+				break;
+			default:
+				throw new Error( 'Unsupported NRRD data type: ' + data );
 
-			return this.type = data;
+		}
 
-		},
+		return this.type = data;
 
-		endian: function ( data ) {
+	},
 
-			return this.endian = data;
+	endian: function ( data ) {
 
-		},
+		return this.endian = data;
 
-		encoding: function ( data ) {
+	},
 
-			return this.encoding = data;
+	encoding: function ( data ) {
 
-		},
+		return this.encoding = data;
 
-		dimension: function ( data ) {
+	},
 
-			return this.dim = parseInt( data, 10 );
+	dimension: function ( data ) {
 
-		},
+		return this.dim = parseInt( data, 10 );
 
-		sizes: function ( data ) {
+	},
 
-			var i;
-			return this.sizes = ( function () {
+	sizes: function ( data ) {
 
-				var _i, _len, _ref, _results;
-				_ref = data.split( /\s+/ );
-				_results = [];
+		let i;
+		return this.sizes = ( function () {
 
-				for ( _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
+			const _ref = data.split( /\s+/ );
+			const _results = [];
 
-					i = _ref[ _i ];
-					_results.push( parseInt( i, 10 ) );
+			for ( let _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
 
-				}
+				i = _ref[ _i ];
+				_results.push( parseInt( i, 10 ) );
 
-				return _results;
+			}
 
-			} )();
+			return _results;
 
-		},
+		} )();
 
-		space: function ( data ) {
+	},
 
-			return this.space = data;
+	space: function ( data ) {
 
-		},
+		return this.space = data;
 
-		'space origin': function ( data ) {
+	},
 
-			return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
+	'space origin': function ( data ) {
 
-		},
+		return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
 
-		'space directions': function ( data ) {
+	},
 
-			var f, parts, v;
-			parts = data.match( /\(.*?\)/g );
-			return this.vectors = ( function () {
+	'space directions': function ( data ) {
 
-				var _i, _len, _results;
-				_results = [];
+		let f, v;
+		const parts = data.match( /\(.*?\)/g );
+		return this.vectors = ( function () {
 
-				for ( _i = 0, _len = parts.length; _i < _len; _i ++ ) {
+			const _results = [];
 
-					v = parts[ _i ];
-					_results.push( ( function () {
+			for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
 
-						var _j, _len2, _ref, _results2;
-						_ref = v.slice( 1, - 1 ).split( /,/ );
-						_results2 = [];
+				v = parts[ _i ];
+				_results.push( ( function () {
 
-						for ( _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
+					const _ref = v.slice( 1, - 1 ).split( /,/ );
+					const _results2 = [];
 
-							f = _ref[ _j ];
-							_results2.push( parseFloat( f ) );
+					for ( let _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
 
-						}
+						f = _ref[ _j ];
+						_results2.push( parseFloat( f ) );
 
-						return _results2;
+					}
 
-					} )() );
+					return _results2;
 
-				}
+				} )() );
+
+			}
 
-				return _results;
+			return _results;
 
-			} )();
+		} )();
 
-		},
+	},
 
-		spacings: function ( data ) {
+	spacings: function ( data ) {
 
-			var f, parts;
-			parts = data.split( /\s+/ );
-			return this.spacings = ( function () {
+		let f;
+		const parts = data.split( /\s+/ );
+		return this.spacings = ( function () {
 
-				var _i, _len, _results = [];
+			const _results = [];
 
-				for ( _i = 0, _len = parts.length; _i < _len; _i ++ ) {
+			for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
 
-					f = parts[ _i ];
-					_results.push( parseFloat( f ) );
+				f = parts[ _i ];
+				_results.push( parseFloat( f ) );
 
-				}
+			}
 
-				return _results;
+			return _results;
 
-			} )();
+		} )();
 
-		}
 	}
 
-} );
+};
 
 export { NRRDLoader };

+ 82 - 89
examples/jsm/loaders/NodeMaterialLoader.js

@@ -1,81 +1,29 @@
 import {
-	DefaultLoadingManager,
+	Loader,
 	FileLoader
 } from '../../../build/three.module.js';
 
 import * as Nodes from '../nodes/Nodes.js';
 
-var NodeMaterialLoader = function ( manager, library ) {
+class NodeMaterialLoader extends Loader {
 
-	this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager;
+	constructor( manager, library = {} ) {
 
-	this.nodes = {};
-	this.materials = {};
-	this.passes = {};
-	this.names = {};
-	this.library = library || {};
+		super( manager );
 
-};
-
-var NodeMaterialLoaderUtils = {
-
-	replaceUUIDObject: function ( object, uuid, value, recursive ) {
-
-		recursive = recursive !== undefined ? recursive : true;
-
-		if ( typeof uuid === 'object' ) uuid = uuid.uuid;
-
-		if ( typeof object === 'object' ) {
-
-			var keys = Object.keys( object );
-
-			for ( var i = 0; i < keys.length; i ++ ) {
-
-				var key = keys[ i ];
-
-				if ( recursive ) {
-
-					object[ key ] = this.replaceUUIDObject( object[ key ], uuid, value );
-
-				}
-
-				if ( key === uuid ) {
-
-					object[ uuid ] = object[ key ];
-
-					delete object[ key ];
-
-				}
-
-			}
-
-		}
-
-		return object === uuid ? value : object;
-
-	},
-
-	replaceUUID: function ( json, uuid, value ) {
-
-		this.replaceUUIDObject( json, uuid, value, false );
-		this.replaceUUIDObject( json.nodes, uuid, value );
-		this.replaceUUIDObject( json.materials, uuid, value );
-		this.replaceUUIDObject( json.passes, uuid, value );
-		this.replaceUUIDObject( json.library, uuid, value, false );
-
-		return json;
+		this.nodes = {};
+		this.materials = {};
+		this.passes = {};
+		this.names = {};
+		this.library = library;
 
 	}
 
-};
-
-Object.assign( NodeMaterialLoader.prototype, {
-
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var scope = this;
+		const scope = this;
 
-		var loader = new FileLoader( scope.manager );
+		const loader = new FileLoader( scope.manager );
 		loader.setPath( scope.path );
 		loader.load( url, function ( text ) {
 
@@ -85,22 +33,15 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 		return this;
 
-	},
-
-	setPath: function ( value ) {
-
-		this.path = value;
-		return this;
-
-	},
+	}
 
-	getObjectByName: function ( uuid ) {
+	getObjectByName( uuid ) {
 
 		return this.names[ uuid ];
 
-	},
+	}
 
-	getObjectById: function ( uuid ) {
+	getObjectById( uuid ) {
 
 		return this.library[ uuid ] ||
 			this.nodes[ uuid ] ||
@@ -108,11 +49,11 @@ Object.assign( NodeMaterialLoader.prototype, {
 			this.passes[ uuid ] ||
 			this.names[ uuid ];
 
-	},
+	}
 
-	getNode: function ( uuid ) {
+	getNode( uuid ) {
 
-		var object = this.getObjectById( uuid );
+		const object = this.getObjectById( uuid );
 
 		if ( ! object ) {
 
@@ -122,9 +63,9 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 		return object;
 
-	},
+	}
 
-	resolve: function ( json ) {
+	resolve( json ) {
 
 		switch ( typeof json ) {
 
@@ -147,7 +88,7 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 				if ( Array.isArray( json ) ) {
 
-					for ( var i = 0; i < json.length; i ++ ) {
+					for ( let i = 0; i < json.length; i ++ ) {
 
 						json[ i ] = this.resolve( json[ i ] );
 
@@ -155,7 +96,7 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 				} else {
 
-					for ( var prop in json ) {
+					for ( const prop in json ) {
 
 						if ( prop === 'uuid' ) continue;
 
@@ -169,11 +110,11 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 		return json;
 
-	},
+	}
 
-	declare: function ( json ) {
+	declare( json ) {
 
-		var uuid, node, object;
+		let uuid, node, object;
 
 		for ( uuid in json.nodes ) {
 
@@ -235,11 +176,11 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 		return json;
 
-	},
+	}
 
-	parse: function ( json ) {
+	parse( json ) {
 
-		var uuid;
+		let uuid;
 
 		json = this.resolve( this.declare( json ) );
 
@@ -265,6 +206,58 @@ Object.assign( NodeMaterialLoader.prototype, {
 
 	}
 
-} );
+}
+
+class NodeMaterialLoaderUtils {
+
+	static replaceUUIDObject( object, uuid, value, recursive ) {
+
+		recursive = recursive !== undefined ? recursive : true;
+
+		if ( typeof uuid === 'object' ) uuid = uuid.uuid;
+
+		if ( typeof object === 'object' ) {
+
+			const keys = Object.keys( object );
+
+			for ( let i = 0; i < keys.length; i ++ ) {
+
+				const key = keys[ i ];
+
+				if ( recursive ) {
+
+					object[ key ] = this.replaceUUIDObject( object[ key ], uuid, value );
+
+				}
+
+				if ( key === uuid ) {
+
+					object[ uuid ] = object[ key ];
+
+					delete object[ key ];
+
+				}
+
+			}
+
+		}
+
+		return object === uuid ? value : object;
+
+	}
+
+	static replaceUUID( json, uuid, value ) {
+
+		this.replaceUUIDObject( json, uuid, value, false );
+		this.replaceUUIDObject( json.nodes, uuid, value );
+		this.replaceUUIDObject( json.materials, uuid, value );
+		this.replaceUUIDObject( json.passes, uuid, value );
+		this.replaceUUIDObject( json.library, uuid, value, false );
+
+		return json;
+
+	}
+
+}
 
 export { NodeMaterialLoader, NodeMaterialLoaderUtils };

+ 18 - 20
examples/jsm/loaders/VTKLoader.js

@@ -8,21 +8,19 @@ import {
 } from '../../../build/three.module.js';
 import * as fflate from '../libs/fflate.module.min.js';
 
-var VTKLoader = function ( manager ) {
+class VTKLoader extends Loader {
 
-	Loader.call( this, manager );
+	constructor( manager ) {
 
-};
+		super( manager );
 
-VTKLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
-
-	constructor: VTKLoader,
+	}
 
-	load: function ( url, onLoad, onProgress, onError ) {
+	load( url, onLoad, onProgress, onError ) {
 
-		var scope = this;
+		const scope = this;
 
-		var loader = new FileLoader( scope.manager );
+		const loader = new FileLoader( scope.manager );
 		loader.setPath( scope.path );
 		loader.setResponseType( 'arraybuffer' );
 		loader.setRequestHeader( scope.requestHeader );
@@ -51,9 +49,9 @@ VTKLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		}, onProgress, onError );
 
-	},
+	}
 
-	parse: function ( data ) {
+	parse( data ) {
 
 		function parseASCII( data ) {
 
@@ -553,23 +551,23 @@ VTKLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 		function Float32Concat( first, second ) {
 
-		    var firstLength = first.length, result = new Float32Array( firstLength + second.length );
+			const firstLength = first.length, result = new Float32Array( firstLength + second.length );
 
-		    result.set( first );
-		    result.set( second, firstLength );
+			result.set( first );
+			result.set( second, firstLength );
 
-		    return result;
+			return result;
 
 		}
 
 		function Int32Concat( first, second ) {
 
-		    var firstLength = first.length, result = new Int32Array( firstLength + second.length );
+			var firstLength = first.length, result = new Int32Array( firstLength + second.length );
 
-		    result.set( first );
-		    result.set( second, firstLength );
+			result.set( first );
+			result.set( second, firstLength );
 
-		    return result;
+			return result;
 
 		}
 
@@ -1179,6 +1177,6 @@ VTKLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
 
 	}
 
-} );
+}
 
 export { VTKLoader };

この差分においてかなりの量のファイルが変更されているため、一部のファイルを表示していません