|
@@ -25,1423 +25,138 @@ http://www.godotengine.org
|
|
|
"""
|
|
|
|
|
|
import os
|
|
|
-import time
|
|
|
-import math
|
|
|
-import shutil
|
|
|
+import logging
|
|
|
import bpy
|
|
|
-import bmesh
|
|
|
-from mathutils import Vector, Matrix
|
|
|
|
|
|
-#sections (in this order)
|
|
|
-S_EXTERNAL_RES = 0
|
|
|
-S_INTERNAL_RES = 1
|
|
|
-S_NODES = 2
|
|
|
+from . import structures
|
|
|
+from . import converters
|
|
|
|
|
|
-CMP_EPSILON = 0.0001
|
|
|
+logging.basicConfig(level=logging.INFO, format="[%(levelname)s]: %(message)s")
|
|
|
|
|
|
|
|
|
-def snap_tup(tup):
|
|
|
- ret = ()
|
|
|
- for x in tup:
|
|
|
- ret += (x - math.fmod(x, 0.0001), )
|
|
|
|
|
|
- return tup
|
|
|
+def find_godot_project_dir(export_path):
|
|
|
+ """Finds the project.godot file assuming that the export path
|
|
|
+ is inside a project (looks for a project.godot file)"""
|
|
|
+ project_dir = export_path
|
|
|
|
|
|
-
|
|
|
-def fix_matrix(mtx):
|
|
|
-
|
|
|
- tr = Matrix(mtx)
|
|
|
- up_axis = 2
|
|
|
-
|
|
|
- for i in range(3):
|
|
|
- tr[1][i], tr[up_axis][i] = tr[up_axis][i], tr[1][i]
|
|
|
- for i in range(3):
|
|
|
- tr[i][1], tr[i][up_axis] = tr[i][up_axis], tr[i][1]
|
|
|
-
|
|
|
- tr[1][3], tr[up_axis][3] = tr[up_axis][3], tr[1][3]
|
|
|
-
|
|
|
- tr[up_axis][0] = -tr[up_axis][0];
|
|
|
- tr[up_axis][1] = -tr[up_axis][1];
|
|
|
- tr[0][up_axis] = -tr[0][up_axis];
|
|
|
- tr[1][up_axis] = -tr[1][up_axis];
|
|
|
- tr[up_axis][3] = -tr[up_axis][3]
|
|
|
-
|
|
|
- return tr
|
|
|
-
|
|
|
-def fix_vertex(vtx):
|
|
|
- return Vector((vtx.x,vtx.z,-vtx.y))
|
|
|
-
|
|
|
-
|
|
|
-def strmtx(mtx):
|
|
|
- mtx = fix_matrix(mtx)
|
|
|
- s = ""
|
|
|
- for x in range(3):
|
|
|
- for y in range(3):
|
|
|
- if (x!=0 or y!=0):
|
|
|
- s+=", "
|
|
|
- s += "{} ".format(mtx[x][y])
|
|
|
-
|
|
|
- for x in range(3):
|
|
|
- s += ",{} ".format(mtx[x][3])
|
|
|
-
|
|
|
- s = "Transform( {} )".format(s)
|
|
|
- return s
|
|
|
-
|
|
|
-
|
|
|
-def numarr(a, mult=1.0):
|
|
|
- s = " "
|
|
|
- for x in a:
|
|
|
- s += " {}".format(x * mult)
|
|
|
- s += " "
|
|
|
- return s
|
|
|
-
|
|
|
-
|
|
|
-def numarr_alpha(a, mult=1.0):
|
|
|
- s = " "
|
|
|
- for x in a:
|
|
|
- s += " {}".format(x * mult)
|
|
|
- if len(a) == 3:
|
|
|
- s += " 1.0"
|
|
|
- s += " "
|
|
|
- return s
|
|
|
-
|
|
|
-
|
|
|
-def strarr(arr):
|
|
|
- s = " "
|
|
|
- for x in arr:
|
|
|
- s += " {}".format(x)
|
|
|
- s += " "
|
|
|
- return s
|
|
|
+ while not os.path.isfile(os.path.join(project_dir, "project.godot")):
|
|
|
+ project_dir = os.path.split(project_dir)[0]
|
|
|
+ if project_dir == "/" or len(project_dir) < 3:
|
|
|
+ logging.error("Unable to find godot project file")
|
|
|
+ return None
|
|
|
+ logging.info("Found godot project directory at %s", project_dir)
|
|
|
+ return project_dir
|
|
|
|
|
|
|
|
|
class GodotExporter:
|
|
|
-
|
|
|
- def validate_id(self, d):
|
|
|
- if (d.find("id-") == 0):
|
|
|
- return "z{}".format(d)
|
|
|
- return d
|
|
|
-
|
|
|
-
|
|
|
- def new_resource_id(self):
|
|
|
- self.last_res_id += 1
|
|
|
- return self.last_res_id
|
|
|
-
|
|
|
- def new_external_resource_id(self):
|
|
|
- self.last_ext_res_id += 1
|
|
|
- return self.last_ext_res_id
|
|
|
-
|
|
|
- class Vertex:
|
|
|
-
|
|
|
- def close_to(self, v):
|
|
|
- if self.vertex - v.vertex.length() > CMP_EPSILON:
|
|
|
- return False
|
|
|
- if self.normal - v.normal.length() > CMP_EPSILON:
|
|
|
- return False
|
|
|
- if self.uv - v.uv.length() > CMP_EPSILON:
|
|
|
- return False
|
|
|
- if self.uv2 - v.uv2.length() > CMP_EPSILON:
|
|
|
- return False
|
|
|
-
|
|
|
- return True
|
|
|
-
|
|
|
- def get_tup(self):
|
|
|
- tup = (self.vertex.x, self.vertex.y, self.vertex.z, self.normal.x,
|
|
|
- self.normal.y, self.normal.z)
|
|
|
- for t in self.uv:
|
|
|
- tup = tup + (t.x, t.y)
|
|
|
- if self.color is not None:
|
|
|
- tup = tup + (self.color.x, self.color.y, self.color.z)
|
|
|
- if self.tangent is not None:
|
|
|
- tup = tup + (self.tangent.x, self.tangent.y, self.tangent.z)
|
|
|
- if self.bitangent is not None:
|
|
|
- tup = tup + (self.bitangent.x, self.bitangent.y,
|
|
|
- self.bitangent.z)
|
|
|
- for t in self.bones:
|
|
|
- tup = tup + (float(t), )
|
|
|
- for t in self.weights:
|
|
|
- tup = tup + (float(t), )
|
|
|
-
|
|
|
- return tup
|
|
|
-
|
|
|
- __slots__ = ("vertex", "normal", "tangent", "bitangent", "color", "uv",
|
|
|
- "uv2", "bones", "weights")
|
|
|
-
|
|
|
- def __init__(self):
|
|
|
- self.vertex = Vector((0.0, 0.0, 0.0))
|
|
|
- self.normal = Vector((0.0, 0.0, 0.0))
|
|
|
- self.tangent = None
|
|
|
- self.bitangent = None
|
|
|
- self.color = None
|
|
|
- self.uv = []
|
|
|
- self.uv2 = Vector((0.0, 0.0))
|
|
|
- self.bones = []
|
|
|
- self.weights = []
|
|
|
-
|
|
|
- def writel(self, section, indent, text):
|
|
|
- if (not (section in self.sections)):
|
|
|
- self.sections[section] = []
|
|
|
- line = "{}{}".format(indent * "\t", text)
|
|
|
- self.sections[section].append(line)
|
|
|
-
|
|
|
- def purge_empty_nodes(self):
|
|
|
- sections = {}
|
|
|
- for k, v in self.sections.items():
|
|
|
- if not (len(v) == 2 and v[0][1:] == v[1][2:]):
|
|
|
- sections[k] = v
|
|
|
- self.sections = sections
|
|
|
-
|
|
|
- def to_color(self,color):
|
|
|
- return "Color( {}, {}, {}, 1.0 )".format(color[0],color[1],color[2])
|
|
|
-
|
|
|
- def export_image(self, image):
|
|
|
- img_id = self.image_cache.get(image)
|
|
|
- if img_id:
|
|
|
- return img_id
|
|
|
-
|
|
|
- imgpath = image.filepath
|
|
|
- if imgpath.startswith("//"):
|
|
|
- imgpath = bpy.path.abspath(imgpath)
|
|
|
-
|
|
|
- try:
|
|
|
- imgpath = os.path.relpath(imgpath, os.path.dirname(self.path)).replace("\\", "/")
|
|
|
- except:
|
|
|
- # TODO: Review, not sure why it fails
|
|
|
- pass
|
|
|
-
|
|
|
- imgid = str(self.new_external_resource_id())
|
|
|
-
|
|
|
- self.image_cache[image]=imgid
|
|
|
- self.writel(S_EXTERNAL_RES, 0,'[ext_resource path="'+imgpath+'" type="Texture" id='+imgid+']')
|
|
|
- return imgid
|
|
|
-
|
|
|
- def export_material(self, material, double_sided_hint=True):
|
|
|
- material_id = self.material_cache.get(material)
|
|
|
- if material_id:
|
|
|
- return material_id
|
|
|
-
|
|
|
- material_id = str(self.new_resource_id())
|
|
|
- self.material_cache[material]=material_id
|
|
|
-
|
|
|
- self.writel(S_INTERNAL_RES,0,'\n[sub_resource type="SpatialMaterial" id='+material_id+']\n')
|
|
|
- return material_id
|
|
|
-
|
|
|
-
|
|
|
- class Surface:
|
|
|
- def __init__(self):
|
|
|
- self.vertices = []
|
|
|
- self.vertex_map = {}
|
|
|
- self.indices = []
|
|
|
-
|
|
|
- def make_arrays(self, node, armature, mesh_lines, ret_materials, skeyindex=-1):
|
|
|
-
|
|
|
- mesh = node.to_mesh(self.scene, self.config["use_mesh_modifiers"],
|
|
|
- "RENDER") # TODO: Review
|
|
|
- self.temp_meshes.add(mesh)
|
|
|
-
|
|
|
- if (True): # Triangulate, always
|
|
|
- bm = bmesh.new()
|
|
|
- bm.from_mesh(mesh)
|
|
|
- bmesh.ops.triangulate(bm, faces=bm.faces)
|
|
|
- bm.to_mesh(mesh)
|
|
|
- bm.free()
|
|
|
-
|
|
|
-
|
|
|
- surfaces = []
|
|
|
- material_to_surface = {}
|
|
|
-
|
|
|
- mesh.update(calc_tessface=True)
|
|
|
-
|
|
|
- si = None
|
|
|
- if armature is not None:
|
|
|
- si = self.skeleton_info[armature]
|
|
|
-
|
|
|
- # TODO: Implement automatic tangent detection
|
|
|
- has_tangents = True # always use tangents, we are grown up now.
|
|
|
-
|
|
|
- has_colors = len(mesh.vertex_colors)
|
|
|
- mat_assign = []
|
|
|
-
|
|
|
- uv_layer_count = len(mesh.uv_textures)
|
|
|
- if (uv_layer_count>2):
|
|
|
- uv_layer_count=2
|
|
|
-
|
|
|
- if has_tangents and len(mesh.uv_textures):
|
|
|
- try:
|
|
|
- mesh.calc_tangents()
|
|
|
- except:
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"},
|
|
|
- "CalcTangets failed for mesh \"{}\", no tangets will be "
|
|
|
- "exported.".format(mesh.name))
|
|
|
- mesh.calc_normals_split()
|
|
|
- has_tangents = False
|
|
|
-
|
|
|
- else:
|
|
|
- mesh.calc_normals_split()
|
|
|
- has_tangents = False
|
|
|
-
|
|
|
-
|
|
|
- for fi in range(len(mesh.polygons)):
|
|
|
- f = mesh.polygons[fi]
|
|
|
-
|
|
|
- if not (f.material_index in material_to_surface):
|
|
|
- material_to_surface[f.material_index] = len(surfaces)
|
|
|
- surfaces.append( self.Surface() )
|
|
|
-
|
|
|
- try:
|
|
|
- # TODO: Review, understand why it throws
|
|
|
- mat = mesh.materials[f.material_index]
|
|
|
- except:
|
|
|
- mat = None
|
|
|
-
|
|
|
- if (mat is not None):
|
|
|
- ret_materials.append(self.export_material(
|
|
|
- mat, mesh.show_double_sided))
|
|
|
- else:
|
|
|
- ret_materials.append(None)
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- surface = surfaces[material_to_surface[f.material_index]]
|
|
|
- vi = []
|
|
|
-
|
|
|
- for lt in range(f.loop_total):
|
|
|
- loop_index = f.loop_start + lt
|
|
|
- ml = mesh.loops[loop_index]
|
|
|
- mv = mesh.vertices[ml.vertex_index]
|
|
|
-
|
|
|
- v = self.Vertex()
|
|
|
- v.vertex = fix_vertex(Vector(mv.co))
|
|
|
-
|
|
|
- for xt in mesh.uv_layers:
|
|
|
- v.uv.append(Vector(xt.data[loop_index].uv))
|
|
|
-
|
|
|
- if (has_colors):
|
|
|
- v.color = Vector(
|
|
|
- mesh.vertex_colors[0].data[loop_index].color)
|
|
|
-
|
|
|
- v.normal = fix_vertex(Vector(ml.normal))
|
|
|
-
|
|
|
- if (has_tangents):
|
|
|
- v.tangent = fix_vertex(Vector(ml.tangent))
|
|
|
- v.bitangent = fix_vertex(Vector(ml.bitangent))
|
|
|
-
|
|
|
- if armature is not None:
|
|
|
- wsum = 0.0
|
|
|
-
|
|
|
- for vg in mv.groups:
|
|
|
- if vg.group >= len(node.vertex_groups):
|
|
|
- continue
|
|
|
- name = node.vertex_groups[vg.group].name
|
|
|
-
|
|
|
- if (name in si["bone_index"]):
|
|
|
- # TODO: Try using 0.0001 since Blender uses
|
|
|
- # zero weight
|
|
|
- if (vg.weight > 0.001):
|
|
|
- v.bones.append(si["bone_index"][name])
|
|
|
- v.weights.append(vg.weight)
|
|
|
- wsum += vg.weight
|
|
|
- if (wsum == 0.0):
|
|
|
- if not self.wrongvtx_report:
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"},
|
|
|
- "Mesh for object \"{}\" has unassigned "
|
|
|
- "weights. This may look wrong in exported "
|
|
|
- "model.".format(node.name))
|
|
|
- self.wrongvtx_report = True
|
|
|
-
|
|
|
- # TODO: Explore how to deal with zero-weight bones,
|
|
|
- # which remain local
|
|
|
- v.bones.append(0)
|
|
|
- v.weights.append(1)
|
|
|
-
|
|
|
- tup = v.get_tup()
|
|
|
- idx = 0
|
|
|
- # Do not optmize if using shapekeys
|
|
|
- if (skeyindex == -1 and tup in surface.vertex_map):
|
|
|
- idx = surface.vertex_map[tup]
|
|
|
- else:
|
|
|
- idx = len(surface.vertices)
|
|
|
- surface.vertices.append(v)
|
|
|
- surface.vertex_map[tup] = idx
|
|
|
-
|
|
|
- vi.append(idx)
|
|
|
-
|
|
|
- if (len(vi) > 2): # Only triangles and above
|
|
|
- surface.indices.append(vi)
|
|
|
-
|
|
|
-
|
|
|
- for s in surfaces:
|
|
|
- surface_lines=[]
|
|
|
-
|
|
|
- #Vertices
|
|
|
- float_values = "Vector3Array("
|
|
|
- first=""
|
|
|
- for v in s.vertices:
|
|
|
- float_values += first+" {}, {}, {}".format(
|
|
|
- v.vertex.x, v.vertex.y, v.vertex.z)
|
|
|
- first=","
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
-
|
|
|
- # Normals Array
|
|
|
- float_values = "Vector3Array("
|
|
|
- first=""
|
|
|
- for v in s.vertices:
|
|
|
- float_values += first+" {}, {}, {}".format(
|
|
|
- v.normal.x, v.normal.y, v.normal.z)
|
|
|
- first=","
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
-
|
|
|
-
|
|
|
- if (has_tangents):
|
|
|
- float_values = "FloatArray("
|
|
|
- first=""
|
|
|
- for v in s.vertices:
|
|
|
- cr = [(v.normal.y * v.tangent.z) - (v.normal.z * v.tangent.y),
|
|
|
- (v.normal.z * v.tangent.x) - (v.normal.x * v.tangent.z),
|
|
|
- (v.normal.x * v.tangent.y) - (v.normal.y * v.tangent.x)]
|
|
|
- dp = cr[0]*v.bitangent.x + cr[1]*v.bitangent.y + cr[2]*v.bitangent.z
|
|
|
- if (dp>0):
|
|
|
- dp=1.0
|
|
|
- else:
|
|
|
- dp=-1.0
|
|
|
-
|
|
|
- float_values += first+" {}, {}, {}, {}".format(
|
|
|
- v.tangent.x, v.tangent.y, v.tangent.z,dp)
|
|
|
- first=","
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
- else:
|
|
|
- surface_lines.append("null, ; No Tangents")
|
|
|
-
|
|
|
- # Color Arrays
|
|
|
- if (has_colors):
|
|
|
- float_values = "ColorArray("
|
|
|
- first=""
|
|
|
- for v in s.vertices:
|
|
|
- float_values += first+" {}, {}, {}".format(
|
|
|
- v.color.x, v.color.y, v.color.z)
|
|
|
- first=","
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
- else:
|
|
|
- surface_lines.append("null, ; No Colors")
|
|
|
-
|
|
|
- # UV Arrays
|
|
|
- for i in range(2):
|
|
|
- if (i >= uv_layer_count):
|
|
|
- surface_lines.append("null, ; No UV"+str(i+1))
|
|
|
- continue
|
|
|
- float_values = "Vector2Array("
|
|
|
- first=","
|
|
|
- for v in s.vertices:
|
|
|
- try:
|
|
|
- float_values += " {}, {}".format(v.uv[i].x, v.uv[i].y)+first
|
|
|
- except:
|
|
|
- # TODO: Review, understand better the multi-uv-layer API
|
|
|
- float_values += " 0, 0 "
|
|
|
-
|
|
|
- first=""
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
-
|
|
|
- # Bones and Weights
|
|
|
- # Export armature data (if armature exists)
|
|
|
- if (armature is not None):
|
|
|
- # Skin Weights!
|
|
|
- float_values = "FloatArray("
|
|
|
- float_valuesw = "FloatArray("
|
|
|
- first=True
|
|
|
- for v in s.vertices:
|
|
|
- skin_weights_total += len(v.weights)
|
|
|
- w = []
|
|
|
- for i in len(v.bones):
|
|
|
- w += (v.bones[i],v.weights[i])
|
|
|
-
|
|
|
- w = sorted( w, key=lambda x: -x[1])
|
|
|
- totalw = 0.0
|
|
|
- for x in w:
|
|
|
- totalw+=x[1]
|
|
|
- if (totalw==0.0):
|
|
|
- totalw=0.000000001
|
|
|
-
|
|
|
-
|
|
|
- for i in range(4):
|
|
|
- if (i>0):
|
|
|
- float_values+=","
|
|
|
- float_valuesw+=","
|
|
|
- if (i<len(w)):
|
|
|
- float_values+=" {}".format(w[i][0])
|
|
|
- float_valuesw+=" {}".format(w[i][1]/totalw)
|
|
|
- else:
|
|
|
- float_values+=" 0"
|
|
|
- float_valuesw+=" 0.0"
|
|
|
-
|
|
|
- if (not first):
|
|
|
- float_values+=","
|
|
|
- float_valuesw+=","
|
|
|
- else:
|
|
|
- first=False
|
|
|
-
|
|
|
- float_values+="),"
|
|
|
- surface_lines.append(float_values)
|
|
|
- float_valuesw+="),"
|
|
|
- surface_lines.append(float_valuesw)
|
|
|
-
|
|
|
- else:
|
|
|
- surface_lines.append("null, ; No Bones")
|
|
|
- surface_lines.append("null, ; No Weights")
|
|
|
-
|
|
|
-
|
|
|
- # Indices
|
|
|
- int_values = "IntArray("
|
|
|
- first=""
|
|
|
- for v in s.indices:
|
|
|
- int_values += first+" {}, {}, {} ".format(v[0],v[2],v[1]) #flip order as godot uses front is clockwise
|
|
|
- first=","
|
|
|
-
|
|
|
- int_values+="),"
|
|
|
- surface_lines.append(int_values)
|
|
|
- mesh_lines.append(surface_lines)
|
|
|
-
|
|
|
-
|
|
|
- def export_mesh(self, node, armature=None, skeyindex=-1, skel_source=None,
|
|
|
- custom_name=None):
|
|
|
- mesh = node.data
|
|
|
-
|
|
|
- if (node.data in self.mesh_cache):
|
|
|
- return self.mesh_cache[mesh]
|
|
|
-
|
|
|
- morph_target_arrays=[]
|
|
|
- morph_target_names= []
|
|
|
-
|
|
|
- if (mesh.shape_keys is not None and len(
|
|
|
- mesh.shape_keys.key_blocks)):
|
|
|
- values = []
|
|
|
- morph_targets = []
|
|
|
- md = None
|
|
|
- for k in range(0, len(mesh.shape_keys.key_blocks)):
|
|
|
- shape = node.data.shape_keys.key_blocks[k]
|
|
|
- values += [shape.value]
|
|
|
- shape.value = 0
|
|
|
-
|
|
|
- mid = self.new_id("morph")
|
|
|
-
|
|
|
- for k in range(0, len(mesh.shape_keys.key_blocks)):
|
|
|
- shape = node.data.shape_keys.key_blocks[k]
|
|
|
- node.show_only_shape_key = True
|
|
|
- node.active_shape_key_index = k
|
|
|
- shape.value = 1.0
|
|
|
- mesh.update()
|
|
|
- p = node.data
|
|
|
- v = node.to_mesh(bpy.context.scene, True, "RENDER")
|
|
|
- self.temp_meshes.add(v)
|
|
|
- node.data = v
|
|
|
- node.data.update()
|
|
|
-
|
|
|
- morph_target_lines = []
|
|
|
- md = self.make_arrays(node, None, morph_target_lines, [], k)
|
|
|
-
|
|
|
- morph_target_names.append(shape.name)
|
|
|
- morph_target_arrays.append(morph_target_lines)
|
|
|
-
|
|
|
- morph_targ
|
|
|
- node.data = p
|
|
|
- node.data.update()
|
|
|
- shape.value = 0.0
|
|
|
-
|
|
|
- node.show_only_shape_key = False
|
|
|
- node.active_shape_key_index = 0
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- mesh_lines = []
|
|
|
- mesh_materials = []
|
|
|
- self.make_arrays(node, armature, mesh_lines, mesh_materials)
|
|
|
-
|
|
|
- mesh_id = str(self.new_resource_id())
|
|
|
- self.mesh_cache[mesh]=mesh_id
|
|
|
-
|
|
|
- self.writel(S_INTERNAL_RES,0,'\n[sub_resource type="ArrayMesh" id='+mesh_id+']\n')
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- for i in range(len(mesh_lines)):
|
|
|
- pfx = "surfaces/"+str(i)+"/"
|
|
|
- self.writel(S_INTERNAL_RES,0,"surfaces/"+str(i)+"={")
|
|
|
- if (mesh_materials[i]!=None):
|
|
|
- self.writel(S_INTERNAL_RES,1,"\"material\":SubResource("+str(mesh_materials[i])+"),")
|
|
|
- self.writel(S_INTERNAL_RES,1,"\"primitive\":4,")
|
|
|
- self.writel(S_INTERNAL_RES,1,"\"arrays\":[")
|
|
|
- for sline in mesh_lines[i]:
|
|
|
- self.writel(S_INTERNAL_RES,2,sline)
|
|
|
- self.writel(S_INTERNAL_RES,1,"],")
|
|
|
- self.writel(S_INTERNAL_RES,1,"\"morph_arrays\":[]")
|
|
|
- self.writel(S_INTERNAL_RES,0,"}")
|
|
|
-
|
|
|
- return mesh_id
|
|
|
-
|
|
|
- def export_mesh_node(self, node, parent_path):
|
|
|
- if (node.data is None):
|
|
|
- return
|
|
|
-
|
|
|
- armature = None
|
|
|
- armcount = 0
|
|
|
- for n in node.modifiers:
|
|
|
- if (n.type == "ARMATURE"):
|
|
|
- armcount += 1
|
|
|
-
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="MeshInstance" parent="'+parent_path+'"]\n')
|
|
|
-
|
|
|
- """ Armature should happen just by direct relationship, since godot supports it the same way as Blender now
|
|
|
- if (node.parent is not None):
|
|
|
- if (node.parent.type == "ARMATURE"):
|
|
|
- armature = node.parent
|
|
|
- if (armcount > 1):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"}, "Object \"{}\" refers "
|
|
|
- "to more than one armature! "
|
|
|
- "This is unsupported.".format(node.name))
|
|
|
- if (armcount == 0):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"}, "Object \"{}\" is child "
|
|
|
- "of an armature, but has no armature modifier.".format(
|
|
|
- node.name))
|
|
|
-
|
|
|
- if (armcount > 0 and not armature):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"},
|
|
|
- "Object \"{}\" has armature modifier, but is not a child of "
|
|
|
- "an armature. This is unsupported.".format(node.name))
|
|
|
- """
|
|
|
-
|
|
|
- if (node.data.shape_keys is not None):
|
|
|
- sk = node.data.shape_keys
|
|
|
- if (sk.animation_data):
|
|
|
- for d in sk.animation_data.drivers:
|
|
|
- if (d.driver):
|
|
|
- for v in d.driver.variables:
|
|
|
- for t in v.targets:
|
|
|
- if (t.id is not None and
|
|
|
- t.id.name in self.scene.objects):
|
|
|
- self.armature_for_morph[
|
|
|
- node] = self.scene.objects[t.id.name]
|
|
|
-
|
|
|
- meshdata = self.export_mesh(node, armature)
|
|
|
-
|
|
|
- self.writel(S_NODES,0, 'mesh=SubResource('+str(meshdata)+")")
|
|
|
-
|
|
|
- close_controller = False
|
|
|
-
|
|
|
- """
|
|
|
- Rest of armature/morph stuff
|
|
|
- if ("skin_id" in meshdata):
|
|
|
- close_controller = True
|
|
|
- self.writel(
|
|
|
- S_NODES, il, "<instance_controller url=\"#{}\">".format(
|
|
|
- meshdata["skin_id"]))
|
|
|
- for sn in self.skeleton_info[armature]["skeleton_nodes"]:
|
|
|
- self.writel(
|
|
|
- S_NODES, il + 1, "<skeleton>#{}</skeleton>".format(sn))
|
|
|
- elif ("morph_id" in meshdata):
|
|
|
- self.writel(
|
|
|
- S_NODES, il, "<instance_controller url=\"#{}\">".format(
|
|
|
- meshdata["morph_id"]))
|
|
|
- close_controller = True
|
|
|
- elif (armature is None):
|
|
|
- self.writel(S_NODES, il, "<instance_geometry url=\"#{}\">".format(
|
|
|
- meshdata["id"]))
|
|
|
-
|
|
|
- if (len(meshdata["material_assign"]) > 0):
|
|
|
- self.writel(S_NODES, il + 1, "<bind_material>")
|
|
|
- self.writel(S_NODES, il + 2, "<technique_common>")
|
|
|
- for m in meshdata["material_assign"]:
|
|
|
- self.writel(
|
|
|
- S_NODES, il + 3,
|
|
|
- "<instance_material symbol=\"{}\" target=\"#{}\"/>".format(
|
|
|
- m[1], m[0]))
|
|
|
-
|
|
|
- self.writel(S_NODES, il + 2, "</technique_common>")
|
|
|
- self.writel(S_NODES, il + 1, "</bind_material>")
|
|
|
-
|
|
|
- if (close_controller):
|
|
|
- self.writel(S_NODES, il, "</instance_controller>")
|
|
|
- else:
|
|
|
- self.writel(S_NODES, il, "</instance_geometry>")
|
|
|
- """
|
|
|
-
|
|
|
- """
|
|
|
- def export_armature_bone(self, bone, il, si):
|
|
|
- is_ctrl_bone = (
|
|
|
- bone.name.startswith("ctrl") and
|
|
|
- self.config["use_exclude_ctrl_bones"])
|
|
|
- if (bone.parent is None and is_ctrl_bone is True):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"}, "Root bone cannot be a control bone.")
|
|
|
- is_ctrl_bone = False
|
|
|
-
|
|
|
- if (is_ctrl_bone is False):
|
|
|
- boneid = self.new_id("bone")
|
|
|
- boneidx = si["bone_count"]
|
|
|
- si["bone_count"] += 1
|
|
|
- bonesid = "{}-{}".format(si["id"], boneidx)
|
|
|
- if (bone.name in self.used_bones):
|
|
|
- if (self.config["use_anim_action_all"]):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"}, "Bone name \"{}\" used in more than one "
|
|
|
- "skeleton. Actions might export wrong.".format(
|
|
|
- bone.name))
|
|
|
- else:
|
|
|
- self.used_bones.append(bone.name)
|
|
|
-
|
|
|
- si["bone_index"][bone.name] = boneidx
|
|
|
- si["bone_ids"][bone] = boneid
|
|
|
- si["bone_names"].append(bonesid)
|
|
|
- self.writel(
|
|
|
- S_NODES, il, "<node id=\"{}\" sid=\"{}\" name=\"{}\" "
|
|
|
- "type=\"JOINT\">".format(boneid, bonesid, bone.name))
|
|
|
-
|
|
|
- if (is_ctrl_bone is False):
|
|
|
- il += 1
|
|
|
-
|
|
|
- xform = bone.matrix_local
|
|
|
- if (is_ctrl_bone is False):
|
|
|
- si["bone_bind_poses"].append(
|
|
|
- (si["armature_xform"] * xform).inverted_safe())
|
|
|
-
|
|
|
- if (bone.parent is not None):
|
|
|
- xform = bone.parent.matrix_local.inverted_safe() * xform
|
|
|
- else:
|
|
|
- si["skeleton_nodes"].append(boneid)
|
|
|
-
|
|
|
- if (is_ctrl_bone is False):
|
|
|
- self.writel(
|
|
|
- S_NODES, il, "<matrix sid=\"transform\">{}</matrix>".format(
|
|
|
- strmtx(xform)))
|
|
|
-
|
|
|
- for c in bone.children:
|
|
|
- self.export_armature_bone(c, il, si)
|
|
|
-
|
|
|
- if (is_ctrl_bone is False):
|
|
|
- il -= 1
|
|
|
- self.writel(S_NODES, il, "</node>")
|
|
|
-
|
|
|
- def export_armature_node(self, node, il, parent_path):
|
|
|
- if (node.data is None):
|
|
|
- return
|
|
|
-
|
|
|
- self.skeletons.append(node)
|
|
|
-
|
|
|
- armature = node.data
|
|
|
- self.skeleton_info[node] = {
|
|
|
- "bone_count": 0,
|
|
|
- "id": self.new_id("skelbones"),
|
|
|
- "name": node.name,
|
|
|
- "bone_index": {},
|
|
|
- "bone_ids": {},
|
|
|
- "bone_names": [],
|
|
|
- "bone_bind_poses": [],
|
|
|
- "skeleton_nodes": [],
|
|
|
- "armature_xform": node.matrix_world
|
|
|
- }
|
|
|
-
|
|
|
- for b in armature.bones:
|
|
|
- if (b.parent is not None):
|
|
|
- continue
|
|
|
- self.export_armature_bone(b, il, self.skeleton_info[node])
|
|
|
-
|
|
|
- if (node.pose):
|
|
|
- for b in node.pose.bones:
|
|
|
- for x in b.constraints:
|
|
|
- if (x.type == "ACTION"):
|
|
|
- self.action_constraints.append(x.action)
|
|
|
- """
|
|
|
- def export_camera_node(self, node, parent_path):
|
|
|
- if (node.data is None):
|
|
|
- return
|
|
|
-
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="Camera" parent="'+parent_path+'"]\n')
|
|
|
- camera = node.data
|
|
|
-
|
|
|
- if (camera.type == "PERSP"):
|
|
|
- self.writel(S_NODES,0, "projection=0")
|
|
|
- self.writel(S_NODES,0, "fov="+str(math.degrees(camera.angle)))
|
|
|
- self.writel(S_NODES,0, "far="+str(math.degrees(camera.clip_end)))
|
|
|
- self.writel(S_NODES,0, "near="+str(math.degrees(camera.clip_start)))
|
|
|
-
|
|
|
- else:
|
|
|
- self.writel(S_NODES,0, "projection=1")
|
|
|
- self.writel(S_NODES,0, "size="+str(math.degrees(camera.ortho_scale * 0.5)))
|
|
|
- self.writel(S_NODES,0, "far="+str(math.degrees(camera.clip_end)))
|
|
|
- self.writel(S_NODES,0, "near="+str(math.degrees(camera.clip_start)))
|
|
|
-
|
|
|
-
|
|
|
- def export_lamp_node(self, node, parent_path):
|
|
|
- if (node.data is None):
|
|
|
- return
|
|
|
-
|
|
|
- light = node.data
|
|
|
-
|
|
|
- if (light.type == "POINT"):
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="OmniLight" parent="'+parent_path+'"]\n')
|
|
|
-
|
|
|
- self.writel(S_NODES, 0,"light_color="+self.to_color(light.color))
|
|
|
- if (light.use_sphere):
|
|
|
- self.writel(S_NODES, "omni_range={}".format(strarr(light.distance)))
|
|
|
-
|
|
|
- elif (light.type == "SPOT"):
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="SpotLight" parent="'+parent_path+'"]\n')
|
|
|
-
|
|
|
- self.writel(S_NODES,0, "light_color="+self.to_color(light.color))
|
|
|
-
|
|
|
- else: # Write a sun lamp for everything else (not supported)
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="DirectionalLight" parent="'+parent_path+'"]\n')
|
|
|
- self.writel(S_NODES,0, "light_color="+self.to_color(light.color))
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- def export_empty_node(self, node, il, parent_path):
|
|
|
- self.writel(S_NODES, '\n[node name="'+node.name+'" type="Position3D" parent="'+parent_path+'"]\n')
|
|
|
-
|
|
|
- """
|
|
|
- def export_curve(self, curve):
|
|
|
- splineid = self.new_id("spline")
|
|
|
-
|
|
|
- self.writel(
|
|
|
- S_GEOM, 1, "<geometry id=\"{}\" name=\"{}\">".format(
|
|
|
- splineid, curve.name))
|
|
|
- self.writel(S_GEOM, 2, "<spline closed=\"0\">")
|
|
|
-
|
|
|
- points = []
|
|
|
- interps = []
|
|
|
- handles_in = []
|
|
|
- handles_out = []
|
|
|
- tilts = []
|
|
|
-
|
|
|
- for cs in curve.splines:
|
|
|
-
|
|
|
- if (cs.type == "BEZIER"):
|
|
|
- for s in cs.bezier_points:
|
|
|
- points.append(s.co[0])
|
|
|
- points.append(s.co[1])
|
|
|
- points.append(s.co[2])
|
|
|
-
|
|
|
- handles_in.append(s.handle_left[0])
|
|
|
- handles_in.append(s.handle_left[1])
|
|
|
- handles_in.append(s.handle_left[2])
|
|
|
-
|
|
|
- handles_out.append(s.handle_right[0])
|
|
|
- handles_out.append(s.handle_right[1])
|
|
|
- handles_out.append(s.handle_right[2])
|
|
|
-
|
|
|
- tilts.append(s.tilt)
|
|
|
- interps.append("BEZIER")
|
|
|
- else:
|
|
|
-
|
|
|
- for s in cs.points:
|
|
|
- points.append(s.co[0])
|
|
|
- points.append(s.co[1])
|
|
|
- points.append(s.co[2])
|
|
|
- handles_in.append(s.co[0])
|
|
|
- handles_in.append(s.co[1])
|
|
|
- handles_in.append(s.co[2])
|
|
|
- handles_out.append(s.co[0])
|
|
|
- handles_out.append(s.co[1])
|
|
|
- handles_out.append(s.co[2])
|
|
|
- tilts.append(s.tilt)
|
|
|
- interps.append("LINEAR")
|
|
|
-
|
|
|
- self.writel(S_GEOM, 3, "<source id=\"{}-positions\">".format(splineid))
|
|
|
- position_values = ""
|
|
|
- for x in points:
|
|
|
- position_values += " {}".format(x)
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<float_array id=\"{}-positions-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- splineid, len(points), position_values))
|
|
|
- self.writel(S_GEOM, 4, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<accessor source=\"#{}-positions-array\" "
|
|
|
- "count=\"{}\" stride=\"3\">".format(splineid, len(points) / 3))
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"X\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Y\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Z\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 4, "</accessor>")
|
|
|
- self.writel(S_GEOM, 3, "</source>")
|
|
|
-
|
|
|
- self.writel(
|
|
|
- S_GEOM, 3, "<source id=\"{}-intangents\">".format(splineid))
|
|
|
- intangent_values = ""
|
|
|
- for x in handles_in:
|
|
|
- intangent_values += " {}".format(x)
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<float_array id=\"{}-intangents-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- splineid, len(points), intangent_values))
|
|
|
- self.writel(S_GEOM, 4, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<accessor source=\"#{}-intangents-array\" "
|
|
|
- "count=\"{}\" stride=\"3\">".format(splineid, len(points) / 3))
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"X\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Y\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Z\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 4, "</accessor>")
|
|
|
- self.writel(S_GEOM, 3, "</source>")
|
|
|
-
|
|
|
- self.writel(S_GEOM, 3, "<source id=\"{}-outtangents\">".format(
|
|
|
- splineid))
|
|
|
- outtangent_values = ""
|
|
|
- for x in handles_out:
|
|
|
- outtangent_values += " {}".format(x)
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<float_array id=\"{}-outtangents-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- splineid, len(points), outtangent_values))
|
|
|
- self.writel(S_GEOM, 4, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<accessor source=\"#{}-outtangents-array\" "
|
|
|
- "count=\"{}\" stride=\"3\">".format(splineid, len(points) / 3))
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"X\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Y\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"Z\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 4, "</accessor>")
|
|
|
- self.writel(S_GEOM, 3, "</source>")
|
|
|
-
|
|
|
- self.writel(
|
|
|
- S_GEOM, 3, "<source id=\"{}-interpolations\">".format(splineid))
|
|
|
- interpolation_values = ""
|
|
|
- for x in interps:
|
|
|
- interpolation_values += " {}".format(x)
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<Name_array id=\"{}-interpolations-array\" "
|
|
|
- "count=\"{}\">{}</Name_array>"
|
|
|
- .format(splineid, len(interps), interpolation_values))
|
|
|
- self.writel(S_GEOM, 4, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<accessor source=\"#{}-interpolations-array\" "
|
|
|
- "count=\"{}\" stride=\"1\">".format(splineid, len(interps)))
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"INTERPOLATION\" type=\"name\"/>")
|
|
|
- self.writel(S_GEOM, 4, "</accessor>")
|
|
|
- self.writel(S_GEOM, 3, "</source>")
|
|
|
-
|
|
|
- self.writel(S_GEOM, 3, "<source id=\"{}-tilts\">".format(splineid))
|
|
|
- tilt_values = ""
|
|
|
- for x in tilts:
|
|
|
- tilt_values += " {}".format(x)
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4,
|
|
|
- "<float_array id=\"{}-tilts-array\" count=\"{}\">{}</float_array>"
|
|
|
- .format(splineid, len(tilts), tilt_values))
|
|
|
- self.writel(S_GEOM, 4, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<accessor source=\"#{}-tilts-array\" "
|
|
|
- "count=\"{}\" stride=\"1\">".format(splineid, len(tilts)))
|
|
|
- self.writel(S_GEOM, 5, "<param name=\"TILT\" type=\"float\"/>")
|
|
|
- self.writel(S_GEOM, 4, "</accessor>")
|
|
|
- self.writel(S_GEOM, 3, "</source>")
|
|
|
-
|
|
|
- self.writel(S_GEOM, 3, "<control_vertices>")
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4,
|
|
|
- "<input semantic=\"POSITION\" source=\"#{}-positions\"/>"
|
|
|
- .format(splineid))
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4,
|
|
|
- "<input semantic=\"IN_TANGENT\" source=\"#{}-intangents\"/>"
|
|
|
- .format(splineid))
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<input semantic=\"OUT_TANGENT\" "
|
|
|
- "source=\"#{}-outtangents\"/>".format(splineid))
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<input semantic=\"INTERPOLATION\" "
|
|
|
- "source=\"#{}-interpolations\"/>".format(splineid))
|
|
|
- self.writel(
|
|
|
- S_GEOM, 4, "<input semantic=\"TILT\" source=\"#{}-tilts\"/>"
|
|
|
- .format(splineid))
|
|
|
- self.writel(S_GEOM, 3, "</control_vertices>")
|
|
|
-
|
|
|
- self.writel(S_GEOM, 2, "</spline>")
|
|
|
- self.writel(S_GEOM, 1, "</geometry>")
|
|
|
-
|
|
|
- return splineid
|
|
|
- def export_curve_node(self, node, il):
|
|
|
- if (node.data is None):
|
|
|
- return
|
|
|
-
|
|
|
- curveid = self.export_curve(node.data)
|
|
|
-
|
|
|
- self.writel(S_NODES, il, "<instance_geometry url=\"#{}\">".format(
|
|
|
- curveid))
|
|
|
- self.writel(S_NODES, il, "</instance_geometry>")
|
|
|
- """
|
|
|
+ """Handles picking what nodes to export and kicks off the export process"""
|
|
|
|
|
|
def export_node(self, node, parent_path):
|
|
|
- if (node not in self.valid_nodes):
|
|
|
+ """Recursively export a node. It calls the export_node function on
|
|
|
+ all of the nodes children. If you have heirarchies more than 1000 nodes
|
|
|
+ deep, this will fail with a recursion error"""
|
|
|
+ if node not in self.valid_nodes:
|
|
|
return
|
|
|
+ logging.info("Exporting Blender Object: %s", node.name)
|
|
|
|
|
|
-
|
|
|
prev_node = bpy.context.scene.objects.active
|
|
|
bpy.context.scene.objects.active = node
|
|
|
-
|
|
|
- node_name = node.name
|
|
|
-
|
|
|
|
|
|
- if (node.type == "MESH"):
|
|
|
- self.export_mesh_node(node, parent_path)
|
|
|
- #elif (node.type == "CURVE"):
|
|
|
- # self.export_curve_node(node, il)
|
|
|
- #elif (node.type == "ARMATURE"):
|
|
|
- # self.export_armature_node(node, il, node_name, parent_path)
|
|
|
- elif (node.type == "CAMERA"):
|
|
|
- self.export_camera_node(node, parent_path)
|
|
|
- elif (node.type == "LAMP"):
|
|
|
- self.export_lamp_node(node, parent_path)
|
|
|
- elif (node.type == "EMPTY"):
|
|
|
- self.export_empty_node(node, parent_path)
|
|
|
+ # Figure out what function will perform the export of this object
|
|
|
+ if node.type in converters.BLENDER_TYPE_TO_EXPORTER:
|
|
|
+ exporter = converters.BLENDER_TYPE_TO_EXPORTER[node.type]
|
|
|
else:
|
|
|
- self.writel(S_NODES,0, '\n[node name="'+node.name+'" type="Spatial" parent="'+parent_path+'"]\n')
|
|
|
-
|
|
|
+ logging.warning(
|
|
|
+ "Unknown object type. Treating as empty: %s", node.name
|
|
|
+ )
|
|
|
+ exporter = converters.BLENDER_TYPE_TO_EXPORTER["EMPTY"]
|
|
|
|
|
|
- self.writel(
|
|
|
- S_NODES, 0, "transform="+strmtx(node.matrix_local))
|
|
|
+ # Perform the export
|
|
|
+ parent_path = exporter(self.escn_file, self.config, node, parent_path)
|
|
|
|
|
|
- if (parent_path=="."):
|
|
|
- parent_path = node_name
|
|
|
- else:
|
|
|
- parent_path = parent_path+"/"+node_name
|
|
|
-
|
|
|
- for x in node.children:
|
|
|
- self.export_node(x, parent_path)
|
|
|
+ for child in node.children:
|
|
|
+ self.export_node(child, parent_path)
|
|
|
|
|
|
bpy.context.scene.objects.active = prev_node
|
|
|
|
|
|
- def is_node_valid(self, node):
|
|
|
- if (node.type not in self.config["object_types"]):
|
|
|
+ def should_export_node(self, node):
|
|
|
+ """Checks if a node should be exported:"""
|
|
|
+ if node.type not in self.config["object_types"]:
|
|
|
return False
|
|
|
|
|
|
- if (self.config["use_active_layers"]):
|
|
|
+ if self.config["use_active_layers"]:
|
|
|
valid = False
|
|
|
for i in range(20):
|
|
|
- if (node.layers[i] and self.scene.layers[i]):
|
|
|
+ if node.layers[i] and self.scene.layers[i]:
|
|
|
valid = True
|
|
|
break
|
|
|
- if (not valid):
|
|
|
+ if not valid:
|
|
|
return False
|
|
|
|
|
|
- if (self.config["use_export_selected"] and not node.select):
|
|
|
+ if self.config["use_export_selected"] and not node.select:
|
|
|
return False
|
|
|
|
|
|
return True
|
|
|
|
|
|
def export_scene(self):
|
|
|
+ """Decide what objects to export, and export them!"""
|
|
|
+ self.escn_file.add_node(structures.SectionHeading(
|
|
|
+ "node", type="Spatial", name=self.scene.name
|
|
|
+ ))
|
|
|
+ logging.info("Exporting scene: %s", self.scene.name)
|
|
|
|
|
|
- print("esporting scene "+str(len(self.scene.objects)))
|
|
|
+ # Decide what objects to export
|
|
|
for obj in self.scene.objects:
|
|
|
- print("OBJ: "+obj.name)
|
|
|
- if (obj in self.valid_nodes):
|
|
|
+ if obj in self.valid_nodes:
|
|
|
continue
|
|
|
- if (self.is_node_valid(obj)):
|
|
|
- n = obj
|
|
|
- while (n is not None):
|
|
|
- if (n not in self.valid_nodes):
|
|
|
- self.valid_nodes.append(n)
|
|
|
- print("VALID: "+n.name)
|
|
|
- n = n.parent
|
|
|
+ if self.should_export_node(obj):
|
|
|
+ # Ensure all parents are also going to be exported
|
|
|
+ node = obj
|
|
|
+ while node is not None:
|
|
|
+ if node not in self.valid_nodes:
|
|
|
+ self.valid_nodes.append(node)
|
|
|
+ node = node.parent
|
|
|
|
|
|
- self.writel(S_NODES,0, '\n[node name="scene" type="Spatial"]\n')
|
|
|
+ logging.info("Exporting %d objects", len(self.valid_nodes))
|
|
|
|
|
|
for obj in self.scene.objects:
|
|
|
- if (obj in self.valid_nodes and obj.parent is None):
|
|
|
- self.export_node(obj,".")
|
|
|
-
|
|
|
-
|
|
|
- """
|
|
|
- def export_animation_transform_channel(self, target, keys, matrices=True):
|
|
|
- frame_total = len(keys)
|
|
|
- anim_id = self.new_id("anim")
|
|
|
- self.writel(S_ANIM, 1, "<animation id=\"{}\">".format(anim_id))
|
|
|
- source_frames = ""
|
|
|
- source_transforms = ""
|
|
|
- source_interps = ""
|
|
|
-
|
|
|
- for k in keys:
|
|
|
- source_frames += " {}".format(k[0])
|
|
|
- if (matrices):
|
|
|
- source_transforms += " {}".format(strmtx(k[1]))
|
|
|
- else:
|
|
|
- source_transforms += " {}".format(k[1])
|
|
|
-
|
|
|
- source_interps += " LINEAR"
|
|
|
-
|
|
|
- # Time Source
|
|
|
- self.writel(S_ANIM, 2, "<source id=\"{}-input\">".format(anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<float_array id=\"{}-input-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- anim_id, frame_total, source_frames))
|
|
|
- self.writel(S_ANIM, 3, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_ANIM, 4, "<accessor source=\"#{}-input-array\" "
|
|
|
- "count=\"{}\" stride=\"1\">".format(anim_id, frame_total))
|
|
|
- self.writel(S_ANIM, 5, "<param name=\"TIME\" type=\"float\"/>")
|
|
|
- self.writel(S_ANIM, 4, "</accessor>")
|
|
|
- self.writel(S_ANIM, 3, "</technique_common>")
|
|
|
- self.writel(S_ANIM, 2, "</source>")
|
|
|
-
|
|
|
- if (matrices):
|
|
|
- # Transform Source
|
|
|
- self.writel(
|
|
|
- S_ANIM, 2, "<source id=\"{}-transform-output\">".format(
|
|
|
- anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<float_array id=\"{}-transform-output-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- anim_id, frame_total * 16, source_transforms))
|
|
|
- self.writel(S_ANIM, 3, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_ANIM, 4,
|
|
|
- "<accessor source=\"#{}-transform-output-array\" count=\"{}\" "
|
|
|
- "stride=\"16\">".format(anim_id, frame_total))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 5, "<param name=\"TRANSFORM\" type=\"float4x4\"/>")
|
|
|
- self.writel(S_ANIM, 4, "</accessor>")
|
|
|
- self.writel(S_ANIM, 3, "</technique_common>")
|
|
|
- self.writel(S_ANIM, 2, "</source>")
|
|
|
- else:
|
|
|
- # Value Source
|
|
|
- self.writel(
|
|
|
- S_ANIM, 2,
|
|
|
- "<source id=\"{}-transform-output\">".format(anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<float_array id=\"{}-transform-output-array\" "
|
|
|
- "count=\"{}\">{}</float_array>".format(
|
|
|
- anim_id, frame_total, source_transforms))
|
|
|
- self.writel(S_ANIM, 3, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_ANIM, 4, "<accessor source=\"#{}-transform-output-array\" "
|
|
|
- "count=\"{}\" stride=\"1\">".format(anim_id, frame_total))
|
|
|
- self.writel(S_ANIM, 5, "<param name=\"X\" type=\"float\"/>")
|
|
|
- self.writel(S_ANIM, 4, "</accessor>")
|
|
|
- self.writel(S_ANIM, 3, "</technique_common>")
|
|
|
- self.writel(S_ANIM, 2, "</source>")
|
|
|
-
|
|
|
- # Interpolation Source
|
|
|
- self.writel(
|
|
|
- S_ANIM, 2, "<source id=\"{}-interpolation-output\">".format(
|
|
|
- anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<Name_array id=\"{}-interpolation-output-array\" "
|
|
|
- "count=\"{}\">{}</Name_array>".format(
|
|
|
- anim_id, frame_total, source_interps))
|
|
|
- self.writel(S_ANIM, 3, "<technique_common>")
|
|
|
- self.writel(
|
|
|
- S_ANIM, 4, "<accessor source=\"#{}-interpolation-output-array\" "
|
|
|
- "count=\"{}\" stride=\"1\">".format(anim_id, frame_total))
|
|
|
- self.writel(S_ANIM, 5, "<param name=\"INTERPOLATION\" type=\"Name\"/>")
|
|
|
- self.writel(S_ANIM, 4, "</accessor>")
|
|
|
- self.writel(S_ANIM, 3, "</technique_common>")
|
|
|
- self.writel(S_ANIM, 2, "</source>")
|
|
|
-
|
|
|
- self.writel(S_ANIM, 2, "<sampler id=\"{}-sampler\">".format(anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3,
|
|
|
- "<input semantic=\"INPUT\" source=\"#{}-input\"/>".format(anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<input semantic=\"OUTPUT\" "
|
|
|
- "source=\"#{}-transform-output\"/>".format(anim_id))
|
|
|
- self.writel(
|
|
|
- S_ANIM, 3, "<input semantic=\"INTERPOLATION\" "
|
|
|
- "source=\"#{}-interpolation-output\"/>".format(anim_id))
|
|
|
- self.writel(S_ANIM, 2, "</sampler>")
|
|
|
- if (matrices):
|
|
|
- self.writel(
|
|
|
- S_ANIM, 2, "<channel source=\"#{}-sampler\" "
|
|
|
- "target=\"{}/transform\"/>".format(anim_id, target))
|
|
|
- else:
|
|
|
- self.writel(
|
|
|
- S_ANIM, 2, "<channel source=\"#{}-sampler\" "
|
|
|
- "target=\"{}\"/>".format(anim_id, target))
|
|
|
- self.writel(S_ANIM, 1, "</animation>")
|
|
|
-
|
|
|
- return [anim_id]
|
|
|
-
|
|
|
- def export_animation(self, start, end, allowed=None):
|
|
|
- # TODO: Blender -> Collada frames needs a little work
|
|
|
- # Collada starts from 0, blender usually from 1.
|
|
|
- # The last frame must be included also
|
|
|
-
|
|
|
- frame_orig = self.scene.frame_current
|
|
|
+ if obj in self.valid_nodes and obj.parent is None:
|
|
|
+ self.export_node(obj, ".")
|
|
|
|
|
|
- frame_len = 1.0 / self.scene.render.fps
|
|
|
- frame_sub = 0
|
|
|
- if (start > 0):
|
|
|
- frame_sub = start * frame_len
|
|
|
-
|
|
|
- tcn = []
|
|
|
- xform_cache = {}
|
|
|
- blend_cache = {}
|
|
|
-
|
|
|
- # Change frames first, export objects last, boosts performance
|
|
|
- for t in range(start, end + 1):
|
|
|
- self.scene.frame_set(t)
|
|
|
- key = t * frame_len - frame_sub
|
|
|
-
|
|
|
- for node in self.scene.objects:
|
|
|
- if (node not in self.valid_nodes):
|
|
|
- continue
|
|
|
- if (allowed is not None and not (node in allowed)):
|
|
|
- if (node.type == "MESH" and node.data is not None and
|
|
|
- (node in self.armature_for_morph) and (
|
|
|
- self.armature_for_morph[node] in allowed)):
|
|
|
- pass
|
|
|
- else:
|
|
|
- continue
|
|
|
- if (node.type == "MESH" and node.data is not None and
|
|
|
- node.data.shape_keys is not None and (
|
|
|
- node.data in self.mesh_cache) and len(
|
|
|
- node.data.shape_keys.key_blocks)):
|
|
|
- target = self.mesh_cache[node.data]["morph_id"]
|
|
|
- for i in range(len(node.data.shape_keys.key_blocks)):
|
|
|
-
|
|
|
- if (i == 0):
|
|
|
- continue
|
|
|
-
|
|
|
- name = "{}-morph-weights({})".format(target, i - 1)
|
|
|
- if (not (name in blend_cache)):
|
|
|
- blend_cache[name] = []
|
|
|
-
|
|
|
- blend_cache[name].append(
|
|
|
- (key, node.data.shape_keys.key_blocks[i].value))
|
|
|
-
|
|
|
- if (node.type == "MESH" and node.parent and
|
|
|
- node.parent.type == "ARMATURE"):
|
|
|
- # In Collada, nodes that have skin modifier must not export
|
|
|
- # animation, animate the skin instead
|
|
|
- continue
|
|
|
-
|
|
|
- if (len(node.constraints) > 0 or
|
|
|
- node.animation_data is not None):
|
|
|
- # If the node has constraints, or animation data, then
|
|
|
- # export a sampled animation track
|
|
|
- name = self.validate_id(node.name)
|
|
|
- if (not (name in xform_cache)):
|
|
|
- xform_cache[name] = []
|
|
|
-
|
|
|
- mtx = node.matrix_world.copy()
|
|
|
- if (node.parent):
|
|
|
- mtx = node.parent.matrix_world.inverted_safe() * mtx
|
|
|
-
|
|
|
- xform_cache[name].append((key, mtx))
|
|
|
-
|
|
|
- if (node.type == "ARMATURE"):
|
|
|
- # All bones exported for now
|
|
|
- for bone in node.data.bones:
|
|
|
- if((bone.name.startswith("ctrl") and
|
|
|
- self.config["use_exclude_ctrl_bones"])):
|
|
|
- continue
|
|
|
-
|
|
|
- bone_name = self.skeleton_info[node]["bone_ids"][bone]
|
|
|
-
|
|
|
- if (not (bone_name in xform_cache)):
|
|
|
- xform_cache[bone_name] = []
|
|
|
-
|
|
|
- posebone = node.pose.bones[bone.name]
|
|
|
- parent_posebone = None
|
|
|
-
|
|
|
- mtx = posebone.matrix.copy()
|
|
|
- if (bone.parent):
|
|
|
- if (self.config["use_exclude_ctrl_bones"]):
|
|
|
- current_parent_posebone = bone.parent
|
|
|
- while (current_parent_posebone.name
|
|
|
- .startswith("ctrl") and
|
|
|
- current_parent_posebone.parent):
|
|
|
- current_parent_posebone = (
|
|
|
- current_parent_posebone.parent)
|
|
|
- parent_posebone = node.pose.bones[
|
|
|
- current_parent_posebone.name]
|
|
|
- else:
|
|
|
- parent_posebone = node.pose.bones[
|
|
|
- bone.parent.name]
|
|
|
- parent_invisible = False
|
|
|
-
|
|
|
- for i in range(3):
|
|
|
- if (parent_posebone.scale[i] == 0.0):
|
|
|
- parent_invisible = True
|
|
|
-
|
|
|
- if (not parent_invisible):
|
|
|
- mtx = (
|
|
|
- parent_posebone.matrix
|
|
|
- .inverted_safe() * mtx)
|
|
|
-
|
|
|
- xform_cache[bone_name].append((key, mtx))
|
|
|
-
|
|
|
- self.scene.frame_set(frame_orig)
|
|
|
-
|
|
|
- # Export animation XML
|
|
|
- for nid in xform_cache:
|
|
|
- tcn += self.export_animation_transform_channel(
|
|
|
- nid, xform_cache[nid], True)
|
|
|
- for nid in blend_cache:
|
|
|
- tcn += self.export_animation_transform_channel(
|
|
|
- nid, blend_cache[nid], False)
|
|
|
-
|
|
|
- return tcn
|
|
|
-
|
|
|
- def export_animations(self):
|
|
|
- tmp_mat = []
|
|
|
- for s in self.skeletons:
|
|
|
- tmp_bone_mat = []
|
|
|
- for bone in s.pose.bones:
|
|
|
- tmp_bone_mat.append(Matrix(bone.matrix_basis))
|
|
|
- bone.matrix_basis = Matrix()
|
|
|
- tmp_mat.append([Matrix(s.matrix_local), tmp_bone_mat])
|
|
|
-
|
|
|
- self.writel(S_ANIM, 0, "<library_animations>")
|
|
|
-
|
|
|
- if (self.config["use_anim_action_all"] and len(self.skeletons)):
|
|
|
-
|
|
|
- cached_actions = {}
|
|
|
-
|
|
|
- for s in self.skeletons:
|
|
|
- if s.animation_data and s.animation_data.action:
|
|
|
- cached_actions[s] = s.animation_data.action.name
|
|
|
-
|
|
|
- self.writel(S_ANIM_CLIPS, 0, "<library_animation_clips>")
|
|
|
-
|
|
|
- for x in bpy.data.actions[:]:
|
|
|
- if x.users == 0 or x in self.action_constraints:
|
|
|
- continue
|
|
|
- if (self.config["use_anim_skip_noexp"] and
|
|
|
- x.name.endswith("-noexp")):
|
|
|
- continue
|
|
|
-
|
|
|
- bones = []
|
|
|
- # Find bones used
|
|
|
- for p in x.fcurves:
|
|
|
- dp = p.data_path
|
|
|
- base = "pose.bones[\""
|
|
|
- if dp.startswith(base):
|
|
|
- dp = dp[len(base):]
|
|
|
- if (dp.find("\"") != -1):
|
|
|
- dp = dp[:dp.find("\"")]
|
|
|
- if (dp not in bones):
|
|
|
- bones.append(dp)
|
|
|
-
|
|
|
- allowed_skeletons = []
|
|
|
- for i, y in enumerate(self.skeletons):
|
|
|
- if (y.animation_data):
|
|
|
- for z in y.pose.bones:
|
|
|
- if (z.bone.name in bones):
|
|
|
- if (y not in allowed_skeletons):
|
|
|
- allowed_skeletons.append(y)
|
|
|
- y.animation_data.action = x
|
|
|
-
|
|
|
- y.matrix_local = tmp_mat[i][0]
|
|
|
- for j, bone in enumerate(s.pose.bones):
|
|
|
- bone.matrix_basis = Matrix()
|
|
|
-
|
|
|
- tcn = self.export_animation(int(x.frame_range[0]), int(
|
|
|
- x.frame_range[1] + 0.5), allowed_skeletons)
|
|
|
- framelen = (1.0 / self.scene.render.fps)
|
|
|
- start = x.frame_range[0] * framelen
|
|
|
- end = x.frame_range[1] * framelen
|
|
|
- self.writel(
|
|
|
- S_ANIM_CLIPS, 1, "<animation_clip name=\"{}\" "
|
|
|
- "start=\"{}\" end=\"{}\">".format(x.name, start, end))
|
|
|
- for z in tcn:
|
|
|
- self.writel(S_ANIM_CLIPS, 2,
|
|
|
- "<instance_animation url=\"#{}\"/>".format(z))
|
|
|
- self.writel(S_ANIM_CLIPS, 1, "</animation_clip>")
|
|
|
- if (len(tcn) == 0):
|
|
|
- self.operator.report(
|
|
|
- {"WARNING"}, "Animation clip \"{}\" contains no "
|
|
|
- "tracks.".format(x.name))
|
|
|
-
|
|
|
- self.writel(S_ANIM_CLIPS, 0, "</library_animation_clips>")
|
|
|
-
|
|
|
- for i, s in enumerate(self.skeletons):
|
|
|
- if (s.animation_data is None):
|
|
|
- continue
|
|
|
- if s in cached_actions:
|
|
|
- s.animation_data.action = bpy.data.actions[
|
|
|
- cached_actions[s]]
|
|
|
- else:
|
|
|
- s.animation_data.action = None
|
|
|
- for j, bone in enumerate(s.pose.bones):
|
|
|
- bone.matrix_basis = tmp_mat[i][1][j]
|
|
|
-
|
|
|
- else:
|
|
|
- self.export_animation(self.scene.frame_start, self.scene.frame_end)
|
|
|
-
|
|
|
- self.writel(S_ANIM, 0, "</library_animations>")
|
|
|
- """
|
|
|
def export(self):
|
|
|
+ """Begin the export"""
|
|
|
+ self.escn_file = structures.ESCNFile(
|
|
|
+ structures.SectionHeading("gd_scene", load_steps=1, format=2)
|
|
|
+ )
|
|
|
|
|
|
self.export_scene()
|
|
|
- self.purge_empty_nodes()
|
|
|
-
|
|
|
- #if (self.config["use_anim"]):
|
|
|
- # self.export_animations()
|
|
|
-
|
|
|
- try:
|
|
|
- f = open(self.path, "wb")
|
|
|
- except:
|
|
|
- return False
|
|
|
-
|
|
|
-
|
|
|
- f.write(bytes("[gd_scene load_steps=1 format=2]\n\n", "UTF-8")) # TOODO count nodes and resources written for proper steps, though this is kinda useless on import anyway
|
|
|
-
|
|
|
- if (S_EXTERNAL_RES in self.sections):
|
|
|
- for l in self.sections[S_EXTERNAL_RES]:
|
|
|
- f.write(bytes(l + "\n", "UTF-8"))
|
|
|
-
|
|
|
- if (S_INTERNAL_RES in self.sections):
|
|
|
- for l in self.sections[S_INTERNAL_RES]:
|
|
|
- f.write(bytes(l + "\n", "UTF-8"))
|
|
|
-
|
|
|
- for l in self.sections[S_NODES]:
|
|
|
- f.write(bytes(l + "\n", "UTF-8"))
|
|
|
+ self.escn_file.fix_paths(self.config)
|
|
|
+ with open(self.path, 'w') as out_file:
|
|
|
+ out_file.write(self.escn_file.to_string())
|
|
|
|
|
|
return True
|
|
|
|
|
|
- __slots__ = ("operator", "scene", "last_res_id", "last_ext_res_id", "sections",
|
|
|
- "path", "mesh_cache", "curve_cache", "material_cache",
|
|
|
- "image_cache", "skeleton_info", "config", "valid_nodes",
|
|
|
- "armature_for_morph", "used_bones", "wrongvtx_report",
|
|
|
- "skeletons", "action_constraints", "temp_meshes")
|
|
|
-
|
|
|
def __init__(self, path, kwargs, operator):
|
|
|
+ self.path = path
|
|
|
self.operator = operator
|
|
|
self.scene = bpy.context.scene
|
|
|
- self.last_res_id = 0
|
|
|
- self.last_ext_res_id = 0
|
|
|
- self.sections = {}
|
|
|
- self.path = path
|
|
|
- self.mesh_cache = {}
|
|
|
- self.temp_meshes = set()
|
|
|
- self.curve_cache = {}
|
|
|
- self.material_cache = {}
|
|
|
- self.image_cache = {}
|
|
|
- self.skeleton_info = {}
|
|
|
self.config = kwargs
|
|
|
+ self.config["path"] = path
|
|
|
+ self.config["project_path"] = find_godot_project_dir(path)
|
|
|
self.valid_nodes = []
|
|
|
- self.armature_for_morph = {}
|
|
|
- self.used_bones = []
|
|
|
- self.wrongvtx_report = False
|
|
|
- self.skeletons = []
|
|
|
- self.action_constraints = []
|
|
|
+
|
|
|
+ self.escn_file = None
|
|
|
|
|
|
def __enter__(self):
|
|
|
return self
|
|
|
|
|
|
def __exit__(self, *exc):
|
|
|
- for mesh in self.temp_meshes:
|
|
|
- bpy.data.meshes.remove(mesh)
|
|
|
+ pass
|
|
|
|
|
|
|
|
|
-def save(operator, context, filepath="", use_selection=False, **kwargs):
|
|
|
+def save(operator, context, filepath="", **kwargs):
|
|
|
+ """Begin the export"""
|
|
|
with GodotExporter(filepath, kwargs, operator) as exp:
|
|
|
exp.export()
|
|
|
|