|
@@ -365,7 +365,7 @@ THREE.GLTFExporter.prototype = {
|
|
|
*/
|
|
|
function applyTextureTransform( mapDef, texture ) {
|
|
|
|
|
|
- var didTransform = false
|
|
|
+ var didTransform = false;
|
|
|
var transformDef = {};
|
|
|
|
|
|
if ( texture.offset.x !== 0 || texture.offset.y !== 0 ) {
|
|
@@ -385,7 +385,7 @@ THREE.GLTFExporter.prototype = {
|
|
|
if ( texture.repeat.x !== 1 || texture.repeat.y !== 1 ) {
|
|
|
|
|
|
transformDef.scale = texture.repeat.toArray();
|
|
|
- didTransform = true;
|
|
|
+ didTransform = true;
|
|
|
|
|
|
}
|
|
|
|
|
@@ -981,7 +981,10 @@ THREE.GLTFExporter.prototype = {
|
|
|
// occlusionTexture
|
|
|
if ( material.aoMap ) {
|
|
|
|
|
|
- var occlusionMapDef = { index: processTexture( material.aoMap ) };
|
|
|
+ var occlusionMapDef = {
|
|
|
+ index: processTexture( material.aoMap ),
|
|
|
+ texCoord: 1
|
|
|
+ };
|
|
|
|
|
|
if ( material.aoMapIntensity !== 1.0 ) {
|
|
|
|
|
@@ -1076,6 +1079,8 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
if ( ! geometry.isBufferGeometry ) {
|
|
|
|
|
|
+ console.warn( 'GLTFExporter: Exporting THREE.Geometry will increase file size. Use THREE.BufferGeometry instead.' );
|
|
|
+
|
|
|
var geometryTemp = new THREE.BufferGeometry();
|
|
|
geometryTemp.fromGeometry( geometry );
|
|
|
geometry = geometryTemp;
|
|
@@ -1227,9 +1232,9 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
var baseAttribute = geometry.attributes[ attributeName ];
|
|
|
|
|
|
- if ( cachedData.attributes.has( baseAttribute ) ) {
|
|
|
+ if ( cachedData.attributes.has( attribute ) ) {
|
|
|
|
|
|
- target[ gltfAttributeName ] = cachedData.attributes.get( baseAttribute );
|
|
|
+ target[ gltfAttributeName ] = cachedData.attributes.get( attribute );
|
|
|
continue;
|
|
|
|
|
|
}
|
|
@@ -1405,7 +1410,7 @@ THREE.GLTFExporter.prototype = {
|
|
|
gltfCamera.perspective = {
|
|
|
|
|
|
aspectRatio: camera.aspect,
|
|
|
- yfov: THREE.Math.degToRad( camera.fov ) / camera.aspect,
|
|
|
+ yfov: THREE.Math.degToRad( camera.fov ),
|
|
|
zfar: camera.far <= 0 ? 0.001 : camera.far,
|
|
|
znear: camera.near < 0 ? 0 : camera.near
|
|
|
|
|
@@ -1443,12 +1448,15 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
}
|
|
|
|
|
|
+ clip = THREE.GLTFExporter.Utils.mergeMorphTargetTracks( clip.clone(), root );
|
|
|
+
|
|
|
+ var tracks = clip.tracks;
|
|
|
var channels = [];
|
|
|
var samplers = [];
|
|
|
|
|
|
- for ( var i = 0; i < clip.tracks.length; ++ i ) {
|
|
|
+ for ( var i = 0; i < tracks.length; ++ i ) {
|
|
|
|
|
|
- var track = clip.tracks[ i ];
|
|
|
+ var track = tracks[ i ];
|
|
|
var trackBinding = THREE.PropertyBinding.parseTrackName( track.name );
|
|
|
var trackNode = THREE.PropertyBinding.findNode( root, trackBinding.nodeName );
|
|
|
var trackProperty = PATH_PROPERTIES[ trackBinding.propertyName ];
|
|
@@ -1479,16 +1487,6 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
if ( trackProperty === PATH_PROPERTIES.morphTargetInfluences ) {
|
|
|
|
|
|
- if ( trackNode.morphTargetInfluences.length !== 1 &&
|
|
|
- trackBinding.propertyIndex !== undefined ) {
|
|
|
-
|
|
|
- console.warn( 'THREE.GLTFExporter: Skipping animation track "%s". ' +
|
|
|
- 'Morph target keyframe tracks must target all available morph targets ' +
|
|
|
- 'for the given mesh.', track.name );
|
|
|
- continue;
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
outputItemSize /= trackNode.morphTargetInfluences.length;
|
|
|
|
|
|
}
|
|
@@ -1591,6 +1589,59 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
}
|
|
|
|
|
|
+ function processLight( light ) {
|
|
|
+
|
|
|
+ var lightDef = {};
|
|
|
+
|
|
|
+ if ( light.name ) lightDef.name = light.name;
|
|
|
+
|
|
|
+ lightDef.color = light.color.toArray();
|
|
|
+
|
|
|
+ lightDef.intensity = light.intensity;
|
|
|
+
|
|
|
+ if ( light.isDirectionalLight ) {
|
|
|
+
|
|
|
+ lightDef.type = 'directional';
|
|
|
+
|
|
|
+ } else if ( light.isPointLight ) {
|
|
|
+
|
|
|
+ lightDef.type = 'point';
|
|
|
+ if ( light.distance > 0 ) lightDef.range = light.distance;
|
|
|
+
|
|
|
+ } else if ( light.isSpotLight ) {
|
|
|
+
|
|
|
+ lightDef.type = 'spot';
|
|
|
+ if ( light.distance > 0 ) lightDef.range = light.distance;
|
|
|
+ lightDef.spot = {};
|
|
|
+ lightDef.spot.innerConeAngle = ( light.penumbra - 1.0 ) * light.angle * - 1.0;
|
|
|
+ lightDef.spot.outerConeAngle = light.angle;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ if ( light.decay !== undefined && light.decay !== 2 ) {
|
|
|
+
|
|
|
+ console.warn( 'THREE.GLTFExporter: Light decay may be lost. glTF is physically-based, '
|
|
|
+ + 'and expects light.decay=2.' );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ if ( light.target
|
|
|
+ && ( light.target.parent !== light
|
|
|
+ || light.target.position.x !== 0
|
|
|
+ || light.target.position.y !== 0
|
|
|
+ || light.target.position.z !== - 1 ) ) {
|
|
|
+
|
|
|
+ console.warn( 'THREE.GLTFExporter: Light direction may be lost. For best results, '
|
|
|
+ + 'make light.target a child of the light with position 0,0,-1.' );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ var lights = outputJSON.extensions[ 'KHR_lights_punctual' ].lights;
|
|
|
+ lights.push( lightDef );
|
|
|
+ return lights.length - 1;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
/**
|
|
|
* Process Object3D node
|
|
|
* @param {THREE.Object3D} node Object3D to processNode
|
|
@@ -1598,13 +1649,6 @@ THREE.GLTFExporter.prototype = {
|
|
|
*/
|
|
|
function processNode( object ) {
|
|
|
|
|
|
- if ( object.isLight ) {
|
|
|
-
|
|
|
- console.warn( 'GLTFExporter: Unsupported node type:', object.constructor.name );
|
|
|
- return null;
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
if ( ! outputJSON.nodes ) {
|
|
|
|
|
|
outputJSON.nodes = [];
|
|
@@ -1675,6 +1719,24 @@ THREE.GLTFExporter.prototype = {
|
|
|
|
|
|
gltfNode.camera = processCamera( object );
|
|
|
|
|
|
+ } else if ( object.isDirectionalLight || object.isPointLight || object.isSpotLight ) {
|
|
|
+
|
|
|
+ if ( ! extensionsUsed[ 'KHR_lights_punctual' ] ) {
|
|
|
+
|
|
|
+ outputJSON.extensions = outputJSON.extensions || {};
|
|
|
+ outputJSON.extensions[ 'KHR_lights_punctual' ] = { lights: [] };
|
|
|
+ extensionsUsed[ 'KHR_lights_punctual' ] = true;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ gltfNode.extensions = gltfNode.extensions || {};
|
|
|
+ gltfNode.extensions[ 'KHR_lights_punctual' ] = { light: processLight( object ) };
|
|
|
+
|
|
|
+ } else if ( object.isLight ) {
|
|
|
+
|
|
|
+ console.warn( 'THREE.GLTFExporter: Only directional, point, and spot lights are supported.' );
|
|
|
+ return null;
|
|
|
+
|
|
|
}
|
|
|
|
|
|
if ( object.isSkinnedMesh ) {
|
|
@@ -1942,3 +2004,196 @@ THREE.GLTFExporter.prototype = {
|
|
|
}
|
|
|
|
|
|
};
|
|
|
+
|
|
|
+THREE.GLTFExporter.Utils = {
|
|
|
+
|
|
|
+ insertKeyframe: function ( track, time ) {
|
|
|
+
|
|
|
+ var tolerance = 0.001; // 1ms
|
|
|
+ var valueSize = track.getValueSize();
|
|
|
+
|
|
|
+ var times = new track.TimeBufferType( track.times.length + 1 );
|
|
|
+ var values = new track.ValueBufferType( track.values.length + valueSize );
|
|
|
+ var interpolant = track.createInterpolant( new track.ValueBufferType( valueSize ) );
|
|
|
+
|
|
|
+ var index;
|
|
|
+
|
|
|
+ if ( track.times.length === 0 ) {
|
|
|
+
|
|
|
+ times[ 0 ] = time;
|
|
|
+
|
|
|
+ for ( var i = 0; i < valueSize; i ++ ) {
|
|
|
+
|
|
|
+ values[ i ] = 0;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ index = 0;
|
|
|
+
|
|
|
+ } else if ( time < track.times[ 0 ] ) {
|
|
|
+
|
|
|
+ if ( Math.abs( track.times[ 0 ] - time ) < tolerance ) return 0;
|
|
|
+
|
|
|
+ times[ 0 ] = time;
|
|
|
+ times.set( track.times, 1 );
|
|
|
+
|
|
|
+ values.set( interpolant.evaluate( time ), 0 );
|
|
|
+ values.set( track.values, valueSize );
|
|
|
+
|
|
|
+ index = 0;
|
|
|
+
|
|
|
+ } else if ( time > track.times[ track.times.length - 1 ] ) {
|
|
|
+
|
|
|
+ if ( Math.abs( track.times[ track.times.length - 1 ] - time ) < tolerance ) {
|
|
|
+
|
|
|
+ return track.times.length - 1;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ times[ times.length - 1 ] = time;
|
|
|
+ times.set( track.times, 0 );
|
|
|
+
|
|
|
+ values.set( track.values, 0 );
|
|
|
+ values.set( interpolant.evaluate( time ), track.values.length );
|
|
|
+
|
|
|
+ index = times.length - 1;
|
|
|
+
|
|
|
+ } else {
|
|
|
+
|
|
|
+ for ( var i = 0; i < track.times.length; i ++ ) {
|
|
|
+
|
|
|
+ if ( Math.abs( track.times[ i ] - time ) < tolerance ) return i;
|
|
|
+
|
|
|
+ if ( track.times[ i ] < time && track.times[ i + 1 ] > time ) {
|
|
|
+
|
|
|
+ times.set( track.times.slice( 0, i + 1 ), 0 );
|
|
|
+ times[ i + 1 ] = time;
|
|
|
+ times.set( track.times.slice( i + 1 ), i + 2 );
|
|
|
+
|
|
|
+ values.set( track.values.slice( 0, ( i + 1 ) * valueSize ), 0 );
|
|
|
+ values.set( interpolant.evaluate( time ), ( i + 1 ) * valueSize );
|
|
|
+ values.set( track.values.slice( ( i + 1 ) * valueSize ), ( i + 2 ) * valueSize );
|
|
|
+
|
|
|
+ index = i + 1;
|
|
|
+
|
|
|
+ break;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ track.times = times;
|
|
|
+ track.values = values;
|
|
|
+
|
|
|
+ return index;
|
|
|
+
|
|
|
+ },
|
|
|
+
|
|
|
+ mergeMorphTargetTracks: function ( clip, root ) {
|
|
|
+
|
|
|
+ var tracks = [];
|
|
|
+ var mergedTracks = {};
|
|
|
+ var sourceTracks = clip.tracks;
|
|
|
+
|
|
|
+ for ( var i = 0; i < sourceTracks.length; ++ i ) {
|
|
|
+
|
|
|
+ var sourceTrack = sourceTracks[ i ];
|
|
|
+ var sourceTrackBinding = THREE.PropertyBinding.parseTrackName( sourceTrack.name );
|
|
|
+ var sourceTrackNode = THREE.PropertyBinding.findNode( root, sourceTrackBinding.nodeName );
|
|
|
+
|
|
|
+ if ( sourceTrackBinding.propertyName !== 'morphTargetInfluences' || sourceTrackBinding.propertyIndex === undefined ) {
|
|
|
+
|
|
|
+ // Tracks that don't affect morph targets, or that affect all morph targets together, can be left as-is.
|
|
|
+ tracks.push( sourceTrack );
|
|
|
+ continue;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ if ( sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodDiscrete
|
|
|
+ && sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodLinear ) {
|
|
|
+
|
|
|
+ if ( sourceTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) {
|
|
|
+
|
|
|
+ // This should never happen, because glTF morph target animations
|
|
|
+ // affect all targets already.
|
|
|
+ throw new Error( 'THREE.GLTFExporter: Cannot merge tracks with glTF CUBICSPLINE interpolation.' );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ console.warn( 'THREE.GLTFExporter: Morph target interpolation mode not yet supported. Using LINEAR instead.' );
|
|
|
+
|
|
|
+ sourceTrack = sourceTrack.clone();
|
|
|
+ sourceTrack.setInterpolation( InterpolateLinear );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ var targetCount = sourceTrackNode.morphTargetInfluences.length;
|
|
|
+ var targetIndex = sourceTrackNode.morphTargetDictionary[ sourceTrackBinding.propertyIndex ];
|
|
|
+
|
|
|
+ if ( targetIndex === undefined ) {
|
|
|
+
|
|
|
+ throw new Error( 'THREE.GLTFExporter: Morph target name not found: ' + sourceTrackBinding.propertyIndex );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ var mergedTrack;
|
|
|
+
|
|
|
+ // If this is the first time we've seen this object, create a new
|
|
|
+ // track to store merged keyframe data for each morph target.
|
|
|
+ if ( mergedTracks[ sourceTrackNode.uuid ] === undefined ) {
|
|
|
+
|
|
|
+ mergedTrack = sourceTrack.clone();
|
|
|
+
|
|
|
+ var values = new mergedTrack.ValueBufferType( targetCount * mergedTrack.times.length );
|
|
|
+
|
|
|
+ for ( var j = 0; j < mergedTrack.times.length; j ++ ) {
|
|
|
+
|
|
|
+ values[ j * targetCount + targetIndex ] = mergedTrack.values[ j ];
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ mergedTrack.name = '.morphTargetInfluences';
|
|
|
+ mergedTrack.values = values;
|
|
|
+
|
|
|
+ mergedTracks[ sourceTrackNode.uuid ] = mergedTrack;
|
|
|
+ tracks.push( mergedTrack );
|
|
|
+
|
|
|
+ continue;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ var mergedKeyframeIndex = 0;
|
|
|
+ var sourceKeyframeIndex = 0;
|
|
|
+ var sourceInterpolant = sourceTrack.createInterpolant( new sourceTrack.ValueBufferType( 1 ) );
|
|
|
+
|
|
|
+ mergedTrack = mergedTracks[ sourceTrackNode.uuid ];
|
|
|
+
|
|
|
+ // For every existing keyframe of the merged track, write a (possibly
|
|
|
+ // interpolated) value from the source track.
|
|
|
+ for ( var j = 0; j < mergedTrack.times.length; j ++ ) {
|
|
|
+
|
|
|
+ mergedTrack.values[ j * targetCount + targetIndex ] = sourceInterpolant.evaluate( mergedTrack.times[ j ] );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ // For every existing keyframe of the source track, write a (possibly
|
|
|
+ // new) keyframe to the merged track. Values from the previous loop may
|
|
|
+ // be written again, but keyframes are de-duplicated.
|
|
|
+ for ( var j = 0; j < sourceTrack.times.length; j ++ ) {
|
|
|
+
|
|
|
+ var keyframeIndex = this.insertKeyframe( mergedTrack, sourceTrack.times[ j ] );
|
|
|
+ mergedTrack.values[ keyframeIndex * targetCount + targetIndex ] = sourceTrack.values[ j ];
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ clip.tracks = tracks;
|
|
|
+
|
|
|
+ return clip;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+};
|