|
@@ -149,6 +149,32 @@ def get_up_vector(scene):
|
|
|
tmp = [0,0,0]
|
|
|
tmp[up_vector[0] - 1] = up_vector[1] * 1
|
|
|
return FbxVector4(tmp[0], tmp[1], tmp[2], 1)
|
|
|
+
|
|
|
+def generate_bounding_box(vertices):
|
|
|
+ minx = 0
|
|
|
+ miny = 0
|
|
|
+ minz = 0
|
|
|
+ maxx = 0
|
|
|
+ maxy = 0
|
|
|
+ maxz = 0
|
|
|
+
|
|
|
+ for vertex in vertices:
|
|
|
+ if vertex[0] < minx:
|
|
|
+ minx = vertex[0]
|
|
|
+ if vertex[1] < miny:
|
|
|
+ miny = vertex[1]
|
|
|
+ if vertex[2] < minz:
|
|
|
+ minz = vertex[2]
|
|
|
+
|
|
|
+ if vertex[0] > maxx:
|
|
|
+ maxx = vertex[0]
|
|
|
+ if vertex[1] > maxy:
|
|
|
+ maxy = vertex[1]
|
|
|
+ if vertex[2] > maxz:
|
|
|
+ maxz = vertex[2]
|
|
|
+
|
|
|
+ return [minx, miny, minz], [maxx, maxy, maxz]
|
|
|
+
|
|
|
|
|
|
# #####################################################
|
|
|
# Generate - Triangles
|
|
@@ -466,6 +492,19 @@ def extract_fbx_vertex_positions(mesh):
|
|
|
for i in range(control_points_count):
|
|
|
positions.append(convert_fbx_vec3(control_points[i]))
|
|
|
|
|
|
+ node = mesh.GetNode()
|
|
|
+ if node and option_geometry:
|
|
|
+ # FbxMeshes are local to their node, we need the vertices in global space
|
|
|
+ # when scene nodes are not exported
|
|
|
+ transform = node.EvaluateGlobalTransform()
|
|
|
+ transform = FbxMatrix(transform)
|
|
|
+
|
|
|
+ for i in range(len(positions)):
|
|
|
+ v = positions[i]
|
|
|
+ position = FbxVector4(v[0], v[1], v[2])
|
|
|
+ position = transform.MultNormalize(position)
|
|
|
+ positions[i] = convert_fbx_vec3(position)
|
|
|
+
|
|
|
return positions
|
|
|
|
|
|
def extract_fbx_vertex_normals(mesh):
|
|
@@ -501,6 +540,20 @@ def extract_fbx_vertex_normals(mesh):
|
|
|
normal = convert_fbx_vec3(normals_array.GetAt(i))
|
|
|
normal_values.append(normal)
|
|
|
|
|
|
+ node = mesh.GetNode()
|
|
|
+ if node and option_geometry:
|
|
|
+ # FbxMeshes are local to their node, we need the normals in global space
|
|
|
+ # when scene nodes are not exported
|
|
|
+ transform = node.EvaluateGlobalTransform()
|
|
|
+ transform.SetT(FbxVector4(0,0,0,0))
|
|
|
+ transform = FbxMatrix(transform)
|
|
|
+
|
|
|
+ for i in range(len(normal_values)):
|
|
|
+ n = normal_values[i]
|
|
|
+ normal = FbxVector4(n[0], n[1], n[2])
|
|
|
+ normal = transform.MultNormalize(normal)
|
|
|
+ normal_values[i] = convert_fbx_vec3(normal)
|
|
|
+
|
|
|
# indices
|
|
|
vertexId = 0
|
|
|
for p in range(poly_count):
|
|
@@ -682,52 +735,100 @@ def extract_fbx_vertex_uvs(mesh):
|
|
|
return layered_uv_values, layered_uv_indices
|
|
|
|
|
|
# #####################################################
|
|
|
-# Generate - Mesh String
|
|
|
+# Generate - Mesh String (for scene output)
|
|
|
# #####################################################
|
|
|
-def generate_mesh_bounding_box(mesh):
|
|
|
- control_points_count = mesh.GetControlPointsCount()
|
|
|
- control_points = mesh.GetControlPoints()
|
|
|
+def generate_mesh_string_for_scene_output(node):
|
|
|
+ mesh = node.GetNodeAttribute()
|
|
|
+ mesh_list = [ mesh ]
|
|
|
|
|
|
- minx = 0
|
|
|
- miny = 0
|
|
|
- minz = 0
|
|
|
- maxx = 0
|
|
|
- maxy = 0
|
|
|
- maxz = 0
|
|
|
+ vertices, vertex_offsets = process_mesh_vertices(mesh_list)
|
|
|
+ materials, material_offsets = process_mesh_materials(mesh_list)
|
|
|
|
|
|
- for i in range(control_points_count):
|
|
|
- vertex = control_points[i]
|
|
|
+ normals_to_indices = generate_unique_normals_dictionary(mesh_list)
|
|
|
+ colors_to_indices = generate_unique_colors_dictionary(mesh_list)
|
|
|
+ uvs_to_indices_list = generate_unique_uvs_dictionary_layers(mesh_list)
|
|
|
+
|
|
|
+ normal_values = generate_normals_from_dictionary(normals_to_indices)
|
|
|
+ color_values = generate_colors_from_dictionary(colors_to_indices)
|
|
|
+ uv_values = generate_uvs_from_dictionary_layers(uvs_to_indices_list)
|
|
|
|
|
|
- if vertex[0] < minx:
|
|
|
- minx = vertex[0]
|
|
|
- if vertex[1] < miny:
|
|
|
- miny = vertex[1]
|
|
|
- if vertex[2] < minz:
|
|
|
- minz = vertex[2]
|
|
|
+ faces = process_mesh_polygons(mesh_list,
|
|
|
+ normals_to_indices,
|
|
|
+ colors_to_indices,
|
|
|
+ uvs_to_indices_list,
|
|
|
+ vertex_offsets,
|
|
|
+ material_offsets)
|
|
|
|
|
|
- if vertex[0] > maxx:
|
|
|
- maxx = vertex[0]
|
|
|
- if vertex[1] > maxy:
|
|
|
- maxy = vertex[1]
|
|
|
- if vertex[2] > maxz:
|
|
|
- maxz = vertex[2]
|
|
|
+ nuvs = []
|
|
|
+ for layer_index, uvs in enumerate(uv_values):
|
|
|
+ nuvs.append(str(len(uvs)))
|
|
|
|
|
|
- return [minx, miny, minz], [maxx, maxy, maxz]
|
|
|
+ nvertices = len(vertices)
|
|
|
+ nnormals = len(normal_values)
|
|
|
+ ncolors = len(color_values)
|
|
|
+ nfaces = len(faces)
|
|
|
+ nuvs = ",".join(nuvs)
|
|
|
+
|
|
|
+ aabb_min, aabb_max = generate_bounding_box(vertices)
|
|
|
+ aabb_min = ",".join(str(f) for f in aabb_min)
|
|
|
+ aabb_max = ",".join(str(f) for f in aabb_max)
|
|
|
|
|
|
-def generate_scene_mesh_string(node):
|
|
|
- mesh = node.GetNodeAttribute()
|
|
|
- mesh_list = [ mesh ]
|
|
|
+ vertices = ",".join(Vector3String(v, True) for v in vertices)
|
|
|
+ normals = ",".join(Vector3String(v, True) for v in normal_values)
|
|
|
+ colors = ",".join(Vector3String(v, True) for v in color_values)
|
|
|
+ faces = ",".join(faces)
|
|
|
+ uvs = generate_uvs(uv_values)
|
|
|
+
|
|
|
+ output = [
|
|
|
+
|
|
|
+ '\t' + LabelString( getEmbedName( node, True ) ) + ' : {',
|
|
|
+ ' "metadata" : {',
|
|
|
+ ' "vertices" : ' + str(nvertices) + ',',
|
|
|
+ ' "normals" : ' + str(nnormals) + ',',
|
|
|
+ ' "colors" : ' + str(ncolors) + ',',
|
|
|
+ ' "faces" : ' + str(nfaces) + ',',
|
|
|
+ ' "uvs" : ' + ArrayString(nuvs),
|
|
|
+ ' },',
|
|
|
+ ' "boundingBox" : {',
|
|
|
+ ' "min" : ' + ArrayString(aabb_min) + ',',
|
|
|
+ ' "max" : ' + ArrayString(aabb_max),
|
|
|
+ ' },',
|
|
|
+ ' "scale" : ' + str( 1 ) + ',',
|
|
|
+ ' "materials" : ' + ArrayString("") + ',',
|
|
|
+ ' "vertices" : ' + ArrayString(vertices) + ',',
|
|
|
+ ' "normals" : ' + ArrayString(normals) + ',',
|
|
|
+ ' "colors" : ' + ArrayString(colors) + ',',
|
|
|
+ ' "uvs" : ' + ArrayString(uvs) + ',',
|
|
|
+ ' "faces" : ' + ArrayString(faces),
|
|
|
+ '}'
|
|
|
+
|
|
|
+ ]
|
|
|
+
|
|
|
+ return generateMultiLineString( output, '\n\t\t', 0 )
|
|
|
+
|
|
|
+# #####################################################
|
|
|
+# Generate - Mesh String (for non-scene output)
|
|
|
+# #####################################################
|
|
|
+def generate_mesh_string_for_non_scene_output(scene):
|
|
|
+ mesh_list = generate_mesh_list(scene)
|
|
|
|
|
|
- # These functions merge multiple meshes into one
|
|
|
vertices, vertex_offsets = process_mesh_vertices(mesh_list)
|
|
|
materials, material_offsets = process_mesh_materials(mesh_list)
|
|
|
|
|
|
- # These functions merge meshes and remove duplicate data
|
|
|
- normal_values, normal_indices = process_mesh_normals(mesh_list)
|
|
|
- color_values, color_indices = process_mesh_colors(mesh_list)
|
|
|
- uv_values, uv_indices = process_mesh_uv_layers(mesh_list)
|
|
|
+ normals_to_indices = generate_unique_normals_dictionary(mesh_list)
|
|
|
+ colors_to_indices = generate_unique_colors_dictionary(mesh_list)
|
|
|
+ uvs_to_indices_list = generate_unique_uvs_dictionary_layers(mesh_list)
|
|
|
+
|
|
|
+ normal_values = generate_normals_from_dictionary(normals_to_indices)
|
|
|
+ color_values = generate_colors_from_dictionary(colors_to_indices)
|
|
|
+ uv_values = generate_uvs_from_dictionary_layers(uvs_to_indices_list)
|
|
|
|
|
|
- faces = process_mesh_polygons(mesh_list, normal_indices, color_indices, uv_indices, vertex_offsets, material_offsets)
|
|
|
+ faces = process_mesh_polygons(mesh_list,
|
|
|
+ normals_to_indices,
|
|
|
+ colors_to_indices,
|
|
|
+ uvs_to_indices_list,
|
|
|
+ vertex_offsets,
|
|
|
+ material_offsets)
|
|
|
|
|
|
nuvs = []
|
|
|
for layer_index, uvs in enumerate(uv_values):
|
|
@@ -739,21 +840,23 @@ def generate_scene_mesh_string(node):
|
|
|
nfaces = len(faces)
|
|
|
nuvs = ",".join(nuvs)
|
|
|
|
|
|
+ aabb_min, aabb_max = generate_bounding_box(vertices)
|
|
|
+ aabb_min = ",".join(str(f) for f in aabb_min)
|
|
|
+ aabb_max = ",".join(str(f) for f in aabb_max)
|
|
|
+
|
|
|
vertices = ",".join(Vector3String(v, True) for v in vertices)
|
|
|
normals = ",".join(Vector3String(v, True) for v in normal_values)
|
|
|
colors = ",".join(Vector3String(v, True) for v in color_values)
|
|
|
faces = ",".join(faces)
|
|
|
uvs = generate_uvs(uv_values)
|
|
|
|
|
|
- #TODO: this should take in the vertices array
|
|
|
- aabb_min, aabb_max = generate_mesh_bounding_box(mesh)
|
|
|
- aabb_min = ",".join(str(f) for f in aabb_min)
|
|
|
- aabb_max = ",".join(str(f) for f in aabb_max)
|
|
|
-
|
|
|
output = [
|
|
|
|
|
|
- '\t' + LabelString( getEmbedName( node, True ) ) + ' : {',
|
|
|
+ '{',
|
|
|
' "metadata" : {',
|
|
|
+ ' "formatVersion" : 3.2,',
|
|
|
+ ' "type" : "geometry",',
|
|
|
+ ' "generatedBy" : "convert-to-threejs.py"' + ',',
|
|
|
' "vertices" : ' + str(nvertices) + ',',
|
|
|
' "normals" : ' + str(nnormals) + ',',
|
|
|
' "colors" : ' + str(ncolors) + ',',
|
|
@@ -774,8 +877,8 @@ def generate_scene_mesh_string(node):
|
|
|
'}'
|
|
|
|
|
|
]
|
|
|
-
|
|
|
- return generateMultiLineString( output, '\n\t\t', 0 )
|
|
|
+
|
|
|
+ return generateMultiLineString( output, '\n', 0 )
|
|
|
|
|
|
# #####################################################
|
|
|
# Process - Mesh Geometry
|
|
@@ -788,28 +891,9 @@ def generate_color_key(color):
|
|
|
|
|
|
def generate_uv_key(uv):
|
|
|
return (round(uv[0], 6), round(uv[1], 6))
|
|
|
-
|
|
|
-def append_non_duplicate_normals(source_normals, dest_normals, count):
|
|
|
- for normal in source_normals:
|
|
|
- key = generate_normal_key(normal)
|
|
|
- if key not in dest_normals:
|
|
|
- dest_normals[key] = count
|
|
|
- count += 1
|
|
|
-
|
|
|
- return count
|
|
|
-
|
|
|
-def append_non_duplicate_colors(source_colors, dest_colors, count):
|
|
|
- for color in source_colors:
|
|
|
- key = generate_color_key(color)
|
|
|
- if key not in dest_colors:
|
|
|
- dest_colors[key] = count
|
|
|
- count += 1
|
|
|
-
|
|
|
- return count
|
|
|
|
|
|
def append_non_duplicate_uvs(source_uvs, dest_uvs, counts):
|
|
|
source_layer_count = len(source_uvs)
|
|
|
-
|
|
|
for layer_index in range(source_layer_count):
|
|
|
|
|
|
dest_layer_count = len(dest_uvs)
|
|
@@ -821,7 +905,7 @@ def append_non_duplicate_uvs(source_uvs, dest_uvs, counts):
|
|
|
counts.append(count)
|
|
|
else:
|
|
|
dest_uv_layer = dest_uvs[layer_index]
|
|
|
- count = nuvs[layer_index]
|
|
|
+ count = counts[layer_index]
|
|
|
|
|
|
source_uv_layer = source_uvs[layer_index]
|
|
|
|
|
@@ -835,146 +919,135 @@ def append_non_duplicate_uvs(source_uvs, dest_uvs, counts):
|
|
|
|
|
|
return counts
|
|
|
|
|
|
-def process_mesh_normals(mesh_list):
|
|
|
+def generate_unique_normals_dictionary(mesh_list):
|
|
|
normals_dictionary = {}
|
|
|
nnormals = 0
|
|
|
|
|
|
# Merge meshes, remove duplicate data
|
|
|
for mesh in mesh_list:
|
|
|
+ node = mesh.GetNode()
|
|
|
normal_values, normal_indices = extract_fbx_vertex_normals(mesh)
|
|
|
|
|
|
+ if len(normal_values) > 0:
|
|
|
+ for normal in normal_values:
|
|
|
+ key = generate_normal_key(normal)
|
|
|
+ if key not in normals_dictionary:
|
|
|
+ normals_dictionary[key] = nnormals
|
|
|
+ nnormals += 1
|
|
|
|
|
|
- # Remove the Fbx indices, we will make our own
|
|
|
- mesh_normals = []
|
|
|
- for poly in normal_indices:
|
|
|
- for index in poly:
|
|
|
- mesh_normals.append(normal_values[index])
|
|
|
+ return normals_dictionary
|
|
|
|
|
|
- if len(mesh_normals) > 0:
|
|
|
- nnormals = append_non_duplicate_normals(mesh_normals, normals_dictionary, nnormals)
|
|
|
-
|
|
|
- # Build index list
|
|
|
- merged_normal_indices = []
|
|
|
+def generate_unique_colors_dictionary(mesh_list):
|
|
|
+ colors_dictionary = {}
|
|
|
+ ncolors = 0
|
|
|
+
|
|
|
+ # Merge meshes, remove duplicate data
|
|
|
for mesh in mesh_list:
|
|
|
- normal_values, normal_indices = extract_fbx_vertex_normals(mesh)
|
|
|
+ color_values, color_indices = extract_fbx_vertex_colors(mesh)
|
|
|
|
|
|
- for source_poly in normal_indices:
|
|
|
- dest_poly = []
|
|
|
+ if len(color_values) > 0:
|
|
|
+ for color in color_values:
|
|
|
+ key = generate_color_key(color)
|
|
|
+ if key not in colors_dictionary:
|
|
|
+ colors_dictionary[key] = count
|
|
|
+ count += 1
|
|
|
|
|
|
- for source_index in source_poly:
|
|
|
- normal = normal_values[source_index]
|
|
|
- key = generate_normal_key(normal)
|
|
|
+ return colors_dictionary
|
|
|
|
|
|
- dest_index = normals_dictionary[key]
|
|
|
- dest_poly.append(dest_index)
|
|
|
+def generate_unique_uvs_dictionary_layers(mesh_list):
|
|
|
+ uvs_dictionary_layers = []
|
|
|
+ nuvs_list = []
|
|
|
|
|
|
- merged_normal_indices.append(dest_poly)
|
|
|
+ # Merge meshes, remove duplicate data
|
|
|
+ for mesh in mesh_list:
|
|
|
+ uv_values, uv_indices = extract_fbx_vertex_uvs(mesh)
|
|
|
|
|
|
- # Build values array
|
|
|
- merged_normal_values = []
|
|
|
- for key, index in sorted(normals_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
- merged_normal_values.append(key)
|
|
|
+ if len(uv_values) > 0:
|
|
|
+ nuvs_list = append_non_duplicate_uvs(uv_values, uvs_dictionary_layers, nuvs_list)
|
|
|
|
|
|
- return merged_normal_values, merged_normal_indices
|
|
|
+ return uvs_dictionary_layers
|
|
|
|
|
|
-def process_mesh_colors(mesh_list):
|
|
|
- colors_dictionary = {}
|
|
|
- ncolors = 0
|
|
|
+def generate_normals_from_dictionary(normals_dictionary):
|
|
|
+ normal_values = []
|
|
|
+ for key, index in sorted(normals_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
+ normal_values.append(key)
|
|
|
|
|
|
- # Merge meshes, remove duplicate data
|
|
|
- for mesh in mesh_list:
|
|
|
- color_values, color_indices = extract_fbx_vertex_colors(mesh)
|
|
|
+ return normal_values
|
|
|
|
|
|
- # Remove the Fbx indices, we will make our own
|
|
|
- mesh_colors = []
|
|
|
- for poly in color_indices:
|
|
|
- for index in poly:
|
|
|
- mesh_colors.append(color_values[index])
|
|
|
+def generate_colors_from_dictionary(colors_dictionary):
|
|
|
+ color_values = []
|
|
|
+ for key, index in sorted(colors_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
+ color_values.append(key)
|
|
|
|
|
|
- if len(mesh_colors) > 0:
|
|
|
- ncolors = append_non_duplicate_colors(mesh_colors, colors_dictionary, ncolors)
|
|
|
+ return color_values
|
|
|
|
|
|
- # Build index list
|
|
|
- merged_color_indices = []
|
|
|
- for mesh in mesh_list:
|
|
|
- color_values, color_indices = extract_fbx_vertex_colors(mesh)
|
|
|
+def generate_uvs_from_dictionary_layers(uvs_dictionary_layers):
|
|
|
+ uv_values = []
|
|
|
+ for uvs_dictionary in uvs_dictionary_layers:
|
|
|
+ uv_values_layer = []
|
|
|
+ for key, index in sorted(uvs_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
+ uv_values_layer.append(key)
|
|
|
+ uv_values.append(uv_values_layer)
|
|
|
|
|
|
- for source_poly in color_indices:
|
|
|
- dest_poly = []
|
|
|
+ return uv_values
|
|
|
|
|
|
- for source_index in source_poly:
|
|
|
- color = color_values[source_index]
|
|
|
- key = generate_color_key(color)
|
|
|
+def generate_normal_indices_for_poly(poly_index, mesh_normal_values, mesh_normal_indices, normals_to_indices):
|
|
|
+ if len(mesh_normal_indices) <= 0:
|
|
|
+ return []
|
|
|
|
|
|
- dest_index = colors_dictionary[key]
|
|
|
- dest_poly.append(dest_index)
|
|
|
+ poly_normal_indices = mesh_normal_indices[poly_index]
|
|
|
+ poly_size = len(poly_normal_indices)
|
|
|
|
|
|
- merged_color_indices.append(dest_poly)
|
|
|
+ output_poly_normal_indices = []
|
|
|
+ for v in range(poly_size):
|
|
|
+ normal_index = poly_normal_indices[v]
|
|
|
+ normal_value = mesh_normal_values[normal_index]
|
|
|
|
|
|
- # Build values array
|
|
|
- merged_color_values = []
|
|
|
- for key, index in sorted(colors_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
- merged_color_values.append(key)
|
|
|
+ key = generate_normal_key(normal_value)
|
|
|
|
|
|
- return merged_color_values, merged_color_indices
|
|
|
+ output_index = normals_to_indices[key]
|
|
|
+ output_poly_normal_indices.append(output_index)
|
|
|
|
|
|
-def process_mesh_uv_layers(mesh_list):
|
|
|
- uvs_dictionary_layers = []
|
|
|
- nuvs_list = []
|
|
|
+ return output_poly_normal_indices
|
|
|
|
|
|
- # Merge meshes, remove duplicate data
|
|
|
- for mesh in mesh_list:
|
|
|
- uv_values, uv_indices = extract_fbx_vertex_uvs(mesh)
|
|
|
+def generate_color_indices_for_poly(poly_index, mesh_color_values, mesh_color_indices, colors_to_indices):
|
|
|
+ if len(mesh_color_indices) <= 0:
|
|
|
+ return []
|
|
|
|
|
|
- # Remove the Fbx indices, we will make our own
|
|
|
- mesh_uvs = []
|
|
|
- for l in range(len(uv_indices)):
|
|
|
- dest_uv_indices_layer = []
|
|
|
- source_uv_values_layer = uv_values[l]
|
|
|
- source_uv_indices_layer = uv_indices[l]
|
|
|
+ poly_color_indices = mesh_color_indices[poly_index]
|
|
|
+ poly_size = len(poly_color_indices)
|
|
|
|
|
|
- for source_poly in source_uv_indices_layer:
|
|
|
- for source_index in source_poly:
|
|
|
- dest_uv_indices_layer.append(source_uv_values_layer[source_index])
|
|
|
- mesh_uvs.append(dest_uv_indices_layer)
|
|
|
+ output_poly_color_indices = []
|
|
|
+ for v in range(poly_size):
|
|
|
+ color_index = poly_color_indices[v]
|
|
|
+ color_value = mesh_color_values[color_index]
|
|
|
|
|
|
- if len(mesh_uvs) > 0:
|
|
|
- nuvs_list = append_non_duplicate_uvs(mesh_uvs, uvs_dictionary_layers, nuvs_list)
|
|
|
+ key = generate_color_key(color_value)
|
|
|
|
|
|
- # Build index list
|
|
|
- merged_uv_indices = []
|
|
|
- for mesh in mesh_list:
|
|
|
- uv_values, uv_indices = extract_fbx_vertex_uvs(mesh)
|
|
|
+ output_index = colors_to_indices[key]
|
|
|
+ output_poly_color_indices.append(output_index)
|
|
|
|
|
|
- for layer_index in range(len(uv_indices)):
|
|
|
- dest_uv_indices_layer = []
|
|
|
- source_uv_values_layer = uv_values[layer_index]
|
|
|
- source_uv_indices_layer = uv_indices[layer_index]
|
|
|
- uvs_dictionary = uvs_dictionary_layers[layer_index]
|
|
|
+ return output_poly_color_indices
|
|
|
|
|
|
- for source_poly in source_uv_indices_layer:
|
|
|
- dest_poly = []
|
|
|
+def generate_uv_indices_for_poly(poly_index, mesh_uv_values, mesh_uv_indices, uvs_to_indices):
|
|
|
+ if len(mesh_uv_indices) <= 0:
|
|
|
+ return []
|
|
|
|
|
|
- for source_index in source_poly:
|
|
|
- uv = source_uv_values_layer[source_index]
|
|
|
- key = generate_uv_key(uv)
|
|
|
+ poly_uv_indices = mesh_uv_indices[poly_index]
|
|
|
+ poly_size = len(poly_uv_indices)
|
|
|
|
|
|
- dest_index = uvs_dictionary[key]
|
|
|
- dest_poly.append(dest_index)
|
|
|
+ output_poly_uv_indices = []
|
|
|
+ for v in range(poly_size):
|
|
|
+ uv_index = poly_uv_indices[v]
|
|
|
+ uv_value = mesh_uv_values[uv_index]
|
|
|
|
|
|
- dest_uv_indices_layer.append(dest_poly)
|
|
|
- merged_uv_indices.append(dest_uv_indices_layer)
|
|
|
+ key = generate_uv_key(uv_value)
|
|
|
|
|
|
- # Build values array
|
|
|
- merged_uv_values = []
|
|
|
- for uvs_dictionary in uvs_dictionary_layers:
|
|
|
- merged_uv_values_layer = []
|
|
|
- for key, index in sorted(uvs_dictionary.items(), key = operator.itemgetter(1)):
|
|
|
- merged_uv_values_layer.append(key)
|
|
|
- merged_uv_values.append(merged_uv_values_layer)
|
|
|
+ output_index = uvs_to_indices[key]
|
|
|
+ output_poly_uv_indices.append(output_index)
|
|
|
+
|
|
|
+ return output_poly_uv_indices
|
|
|
|
|
|
- return merged_uv_values, merged_uv_indices
|
|
|
-
|
|
|
def process_mesh_vertices(mesh_list):
|
|
|
vertex_offset = 0
|
|
|
vertex_offset_list = [0]
|
|
@@ -982,18 +1055,6 @@ def process_mesh_vertices(mesh_list):
|
|
|
for mesh in mesh_list:
|
|
|
node = mesh.GetNode()
|
|
|
mesh_vertices = extract_fbx_vertex_positions(mesh)
|
|
|
-
|
|
|
- if option_geometry:
|
|
|
- # FbxMeshes are local to their node, we need the vertices in global space
|
|
|
- # when scene nodes are not exported
|
|
|
- transform = node.EvaluateGlobalTransform()
|
|
|
- transform = FbxMatrix(transform)
|
|
|
-
|
|
|
- for i in range(len(mesh_vertices)):
|
|
|
- v = mesh_vertices[i]
|
|
|
- position = FbxVector4(v[0], v[1], v[2])
|
|
|
- position = transform.MultNormalize(position)
|
|
|
- mesh_vertices[i] = convert_fbx_vec3(position)
|
|
|
|
|
|
vertices.extend(mesh_vertices[:])
|
|
|
vertex_offset += len(mesh_vertices)
|
|
@@ -1028,34 +1089,55 @@ def process_mesh_materials(mesh_list):
|
|
|
|
|
|
return materials_list, material_offset_list
|
|
|
|
|
|
-def process_mesh_polygons(mesh_list, normals, colors, uv_layers, vertex_offset_list, material_offset_list):
|
|
|
+def process_mesh_polygons(mesh_list, normals_to_indices, colors_to_indices, uvs_to_indices_list, vertex_offset_list, material_offset_list):
|
|
|
faces = []
|
|
|
for mesh_index in range(len(mesh_list)):
|
|
|
mesh = mesh_list[mesh_index]
|
|
|
poly_count = mesh.GetPolygonCount()
|
|
|
control_points = mesh.GetControlPoints()
|
|
|
|
|
|
+ normal_values, normal_indices = extract_fbx_vertex_normals(mesh)
|
|
|
+ color_values, color_indices = extract_fbx_vertex_colors(mesh)
|
|
|
+ uv_values_layers, uv_indices_layers = extract_fbx_vertex_uvs(mesh)
|
|
|
+
|
|
|
for poly_index in range(poly_count):
|
|
|
poly_size = mesh.GetPolygonSize(poly_index)
|
|
|
- vertex_indices = []
|
|
|
|
|
|
+ face_normals = generate_normal_indices_for_poly(poly_index, normal_values, normal_indices, normals_to_indices)
|
|
|
+ face_colors = generate_color_indices_for_poly(poly_index, color_values, color_indices, colors_to_indices)
|
|
|
+
|
|
|
+ face_uv_layers = []
|
|
|
+ for l in range(len(uv_indices_layers)):
|
|
|
+ uv_values = uv_values_layers[l]
|
|
|
+ uv_indices = uv_indices_layers[l]
|
|
|
+ face_uv_indices = generate_uv_indices_for_poly(poly_index, uv_values, uv_indices, uvs_to_indices_list[l])
|
|
|
+ face_uv_layers.append(face_uv_indices)
|
|
|
+
|
|
|
+ face_vertices = []
|
|
|
for vertex_index in range(poly_size):
|
|
|
control_point_index = mesh.GetPolygonVertex(poly_index, vertex_index)
|
|
|
- vertex_indices.append(control_point_index)
|
|
|
+ face_vertices.append(control_point_index)
|
|
|
+
|
|
|
+ #TODO: assign a default material to any mesh without one
|
|
|
+ if len(material_offset_list) <= mesh_index:
|
|
|
+ material_offset = 0
|
|
|
+ else:
|
|
|
+ material_offset = material_offset_list[mesh_index]
|
|
|
|
|
|
vertex_offset = vertex_offset_list[mesh_index]
|
|
|
- material_offset = material_offset_list[mesh_index]
|
|
|
|
|
|
face = generate_mesh_face(mesh,
|
|
|
poly_index,
|
|
|
- vertex_indices,
|
|
|
- normals, colors,
|
|
|
- uv_layers,
|
|
|
+ face_vertices,
|
|
|
+ face_normals,
|
|
|
+ face_colors,
|
|
|
+ face_uv_layers,
|
|
|
vertex_offset,
|
|
|
material_offset)
|
|
|
|
|
|
faces.append(face)
|
|
|
|
|
|
+
|
|
|
return faces
|
|
|
|
|
|
def generate_mesh_face(mesh, polygon_index, vertex_indices, normals, colors, uv_layers, vertex_offset, material_offset):
|
|
@@ -1100,7 +1182,9 @@ def generate_mesh_face(mesh, polygon_index, vertex_indices, normals, colors, uv_
|
|
|
|
|
|
faceData.append(faceType)
|
|
|
|
|
|
+ tmp = []
|
|
|
for i in range(nVertices):
|
|
|
+ tmp.append(vertex_indices[i])
|
|
|
index = vertex_indices[i] + vertex_offset
|
|
|
faceData.append(index)
|
|
|
|
|
@@ -1115,22 +1199,19 @@ def generate_mesh_face(mesh, polygon_index, vertex_indices, normals, colors, uv_
|
|
|
faceData.append( material_id )
|
|
|
|
|
|
if hasFaceVertexUvs:
|
|
|
- for layer_index, uvs in enumerate(uv_layers):
|
|
|
- polygon_uvs = uvs[polygon_index]
|
|
|
+ for polygon_uvs in uv_layers:
|
|
|
for i in range(nVertices):
|
|
|
index = polygon_uvs[i]
|
|
|
faceData.append(index)
|
|
|
|
|
|
if hasFaceVertexNormals:
|
|
|
- polygon_normals = normals[polygon_index]
|
|
|
for i in range(nVertices):
|
|
|
- index = polygon_normals[i]
|
|
|
+ index = normals[i]
|
|
|
faceData.append(index)
|
|
|
|
|
|
if hasFaceVertexColors:
|
|
|
- polygon_colors = colors[polygon_index]
|
|
|
for i in range(nVertices):
|
|
|
- index = polygon_colors[i]
|
|
|
+ index = colors[i]
|
|
|
faceData.append(index)
|
|
|
|
|
|
return ",".join( map(str, faceData) )
|
|
@@ -1152,7 +1233,7 @@ def generate_mesh_list_from_hierarchy(node, mesh_list):
|
|
|
if attribute_type != FbxNodeAttribute.eMesh:
|
|
|
converter.TriangulateInPlace(node);
|
|
|
|
|
|
- mesh_list.append(node)
|
|
|
+ mesh_list.append(node.GetNodeAttribute())
|
|
|
|
|
|
for i in range(node.GetChildCount()):
|
|
|
generate_embed_list_from_hierarchy(node.GetChild(i), mesh_list)
|
|
@@ -1163,7 +1244,7 @@ def generate_mesh_list(scene):
|
|
|
if node:
|
|
|
for i in range(node.GetChildCount()):
|
|
|
generate_mesh_list_from_hierarchy(node.GetChild(i), mesh_list)
|
|
|
- return embed_list
|
|
|
+ return mesh_list
|
|
|
|
|
|
# #####################################################
|
|
|
# Generate - Embeds
|
|
@@ -1181,7 +1262,7 @@ def generate_embed_list_from_hierarchy(node, embed_list):
|
|
|
if attribute_type != FbxNodeAttribute.eMesh:
|
|
|
converter.TriangulateInPlace(node);
|
|
|
|
|
|
- embed_string = generate_scene_mesh_string(node)
|
|
|
+ embed_string = generate_mesh_string_for_scene_output(node)
|
|
|
embed_list.append(embed_string)
|
|
|
|
|
|
for i in range(node.GetChildCount()):
|
|
@@ -1593,34 +1674,14 @@ def generate_scene_objects_string(scene):
|
|
|
return "\n".join(object_list), object_count
|
|
|
|
|
|
# #####################################################
|
|
|
-# Parse - Geometry
|
|
|
+# Parse - Geometry (non-scene output)
|
|
|
# #####################################################
|
|
|
def extract_geometry(scene, filename):
|
|
|
-
|
|
|
- embeds_list = generate_embed_list(scene)
|
|
|
-
|
|
|
- #TODO: update the Three.js Geometry Format to support multiple geometries?
|
|
|
- embeds = [ embeds_list[0] ] if len(embeds_list) > 0 else []
|
|
|
- embeds = generateMultiLineString( embeds_list, ",\n\n\t", 0 )
|
|
|
-
|
|
|
- output = [
|
|
|
-
|
|
|
- '{',
|
|
|
- ' "metadata": {',
|
|
|
- ' "formatVersion" : 3.2,',
|
|
|
- ' "type" : "geometry",',
|
|
|
- ' "generatedBy" : "convert-to-threejs.py"',
|
|
|
- ' },',
|
|
|
- '',
|
|
|
- '\t' + embeds,
|
|
|
- '}'
|
|
|
-
|
|
|
- ]
|
|
|
-
|
|
|
- return "\n".join(output)
|
|
|
+ mesh_string = generate_mesh_string_for_non_scene_output(scene)
|
|
|
+ return mesh_string
|
|
|
|
|
|
# #####################################################
|
|
|
-# Parse - Scene
|
|
|
+# Parse - Scene (scene output)
|
|
|
# #####################################################
|
|
|
def extract_scene(scene, filename):
|
|
|
global_settings = scene.GetGlobalSettings()
|