2
0
Эх сурвалжийг харах

Merge branch 'master' into master

Kim Kulling 6 жил өмнө
parent
commit
5a85ceaf18

+ 545 - 547
port/PyAssimp/pyassimp/core.py

@@ -1,547 +1,545 @@
-"""
-PyAssimp
-
-This is the main-module of PyAssimp.
-"""
-
-import sys
-if sys.version_info < (2,6):
-    raise 'pyassimp: need python 2.6 or newer'
-
-# xrange was renamed range in Python 3 and the original range from Python 2 was removed.
-# To keep compatibility with both Python 2 and 3, xrange is set to range for version 3.0 and up.
-if sys.version_info >= (3,0):
-    xrange = range
-
-import ctypes
-import os
-
-try: import numpy
-except: numpy = None
-
-import logging
-logger = logging.getLogger("pyassimp")
-# attach default null handler to logger so it doesn't complain
-# even if you don't attach another handler to logger
-logger.addHandler(logging.NullHandler())
-
-from . import structs
-from . import helper
-from . import postprocess
-from .errors import AssimpError
-
-class AssimpLib(object):
-    """
-    Assimp-Singleton
-    """
-    load, load_mem, export, export_blob, release, dll = helper.search_library()
-_assimp_lib = AssimpLib()
-
-def make_tuple(ai_obj, type = None):
-    res = None
-
-    #notes:
-    # ai_obj._fields_ = [ ("attr", c_type), ... ]
-    # getattr(ai_obj, e[0]).__class__ == float
-
-    if isinstance(ai_obj, structs.Matrix4x4):
-        if numpy:
-            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((4,4))
-            #import pdb;pdb.set_trace()
-        else:
-            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
-            res = [res[i:i+4] for i in xrange(0,16,4)]
-    elif isinstance(ai_obj, structs.Matrix3x3):
-        if numpy:
-            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((3,3))
-        else:
-            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
-            res = [res[i:i+3] for i in xrange(0,9,3)]
-    else:
-        if numpy:
-            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_])
-        else:
-            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
-
-    return res
-
-# Returns unicode object for Python 2, and str object for Python 3.
-def _convert_assimp_string(assimp_string):
-    try:
-        return unicode(assimp_string.data, errors='ignore')
-    except:
-        return str(assimp_string.data, errors='ignore')
-
-# It is faster and more correct to have an init function for each assimp class
-def _init_face(aiFace):
-    aiFace.indices = [aiFace.mIndices[i] for i in range(aiFace.mNumIndices)]
-assimp_struct_inits =  { structs.Face : _init_face }
-
-def call_init(obj, caller = None):
-    if helper.hasattr_silent(obj,'contents'): #pointer
-        _init(obj.contents, obj, caller)
-    else:
-        _init(obj,parent=caller)
-
-def _is_init_type(obj):
-    if helper.hasattr_silent(obj,'contents'): #pointer
-        return _is_init_type(obj[0])
-    # null-pointer case that arises when we reach a mesh attribute
-    # like mBitangents which use mNumVertices rather than mNumBitangents
-    # so it breaks the 'is iterable' check.
-    # Basically:
-    # FIXME!
-    elif not bool(obj):
-        return False
-    tname = obj.__class__.__name__
-    return not (tname[:2] == 'c_' or tname == 'Structure' \
-            or tname == 'POINTER') and not isinstance(obj,int)
-
-def _init(self, target = None, parent = None):
-    """
-    Custom initialize() for C structs, adds safely accessible member functionality.
-
-    :param target: set the object which receive the added methods. Useful when manipulating
-    pointers, to skip the intermediate 'contents' deferencing.
-    """
-    if not target:
-        target = self
-
-    dirself = dir(self)
-    for m in dirself:
-
-        if m.startswith("_"):
-            continue
-
-        if m.startswith('mNum'):
-            if 'm' + m[4:] in dirself:
-                continue # will be processed later on
-            else:
-                name = m[1:].lower()
-
-                obj = getattr(self, m)
-                setattr(target, name, obj)
-                continue
-
-        if m == 'mName':
-            target.name = str(_convert_assimp_string(self.mName))
-            target.__class__.__repr__ = lambda x: str(x.__class__) + "(" + getattr(x, 'name','') + ")"
-            target.__class__.__str__ = lambda x: getattr(x, 'name', '')
-            continue
-
-        name = m[1:].lower()
-
-        obj = getattr(self, m)
-
-        # Create tuples
-        if isinstance(obj, structs.assimp_structs_as_tuple):
-            setattr(target, name, make_tuple(obj))
-            logger.debug(str(self) + ": Added array " + str(getattr(target, name)) +  " as self." + name.lower())
-            continue
-
-        if m.startswith('m'):
-
-            if name == "parent":
-                setattr(target, name, parent)
-                logger.debug("Added a parent as self." + name)
-                continue
-
-            if helper.hasattr_silent(self, 'mNum' + m[1:]):
-
-                length =  getattr(self, 'mNum' + m[1:])
-
-                # -> special case: properties are
-                # stored as a dict.
-                if m == 'mProperties':
-                    setattr(target, name, _get_properties(obj, length))
-                    continue
-
-
-                if not length: # empty!
-                    setattr(target, name, [])
-                    logger.debug(str(self) + ": " + name + " is an empty list.")
-                    continue
-
-
-                try:
-                    if obj._type_ in structs.assimp_structs_as_tuple:
-                        if numpy:
-                            setattr(target, name, numpy.array([make_tuple(obj[i]) for i in range(length)], dtype=numpy.float32))
-
-                            logger.debug(str(self) + ": Added an array of numpy arrays (type "+ str(type(obj)) + ") as self." + name)
-                        else:
-                            setattr(target, name, [make_tuple(obj[i]) for i in range(length)])
-
-                            logger.debug(str(self) + ": Added a list of lists (type "+ str(type(obj)) + ") as self." + name)
-
-                    else:
-                        setattr(target, name, [obj[i] for i in range(length)]) #TODO: maybe not necessary to recreate an array?
-
-                        logger.debug(str(self) + ": Added list of " + str(obj) + " " + name + " as self." + name + " (type: " + str(type(obj)) + ")")
-
-                        # initialize array elements
-                        try:
-                            init = assimp_struct_inits[type(obj[0])]
-                        except KeyError:
-                            if _is_init_type(obj[0]):
-                                for e in getattr(target, name):
-                                    call_init(e, target)
-                        else:
-                            for e in getattr(target, name):
-                                init(e)
-
-
-                except IndexError:
-                    logger.error("in " + str(self) +" : mismatch between mNum" + name + " and the actual amount of data in m" + name + ". This may be due to version mismatch between libassimp and pyassimp. Quitting now.")
-                    sys.exit(1)
-
-                except ValueError as e:
-
-                    logger.error("In " + str(self) +  "->" + name + ": " + str(e) + ". Quitting now.")
-                    if "setting an array element with a sequence" in str(e):
-                        logger.error("Note that pyassimp does not currently "
-                                     "support meshes with mixed triangles "
-                                     "and quads. Try to load your mesh with"
-                                     " a post-processing to triangulate your"
-                                     " faces.")
-                    raise e
-
-
-
-            else: # starts with 'm' but not iterable
-                setattr(target, name, obj)
-                logger.debug("Added " + name + " as self." + name + " (type: " + str(type(obj)) + ")")
-
-                if _is_init_type(obj):
-                    call_init(obj, target)
-
-    if isinstance(self, structs.Mesh):
-        _finalize_mesh(self, target)
-
-    if isinstance(self, structs.Texture):
-        _finalize_texture(self, target)
-
-    if isinstance(self, structs.Metadata):
-        _finalize_metadata(self, target)
-
-
-    return self
-
-
-def pythonize_assimp(type, obj, scene):
-    """ This method modify the Assimp data structures
-    to make them easier to work with in Python.
-
-    Supported operations:
-     - MESH: replace a list of mesh IDs by reference to these meshes
-     - ADDTRANSFORMATION: add a reference to an object's transformation taken from their associated node.
-
-    :param type: the type of modification to operate (cf above)
-    :param obj: the input object to modify
-    :param scene: a reference to the whole scene
-    """
-
-    if type == "MESH":
-        meshes = []
-        for i in obj:
-            meshes.append(scene.meshes[i])
-        return meshes
-
-    if type == "ADDTRANSFORMATION":
-        def getnode(node, name):
-            if node.name == name: return node
-            for child in node.children:
-                n = getnode(child, name)
-                if n: return n
-
-        node = getnode(scene.rootnode, obj.name)
-        if not node:
-            raise AssimpError("Object " + str(obj) + " has no associated node!")
-        setattr(obj, "transformation", node.transformation)
-
-def recur_pythonize(node, scene):
-    '''
-    Recursively call pythonize_assimp on
-    nodes tree to apply several post-processing to
-    pythonize the assimp datastructures.
-    '''
-    node.meshes = pythonize_assimp("MESH", node.meshes, scene)
-    for mesh in node.meshes:
-        mesh.material = scene.materials[mesh.materialindex]
-    for cam in scene.cameras:
-        pythonize_assimp("ADDTRANSFORMATION", cam, scene)
-    for c in node.children:
-        recur_pythonize(c, scene)
-
-def load(filename,
-         file_type  = None,
-         processing = postprocess.aiProcess_Triangulate):
-    '''
-    Load a model into a scene. On failure throws AssimpError.
-
-    Arguments
-    ---------
-    filename:   Either a filename or a file object to load model from.
-                If a file object is passed, file_type MUST be specified
-                Otherwise Assimp has no idea which importer to use.
-                This is named 'filename' so as to not break legacy code.
-    processing: assimp postprocessing parameters. Verbose keywords are imported
-                from postprocessing, and the parameters can be combined bitwise to
-                generate the final processing value. Note that the default value will
-                triangulate quad faces. Example of generating other possible values:
-                processing = (pyassimp.postprocess.aiProcess_Triangulate |
-                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
-    file_type:  string of file extension, such as 'stl'
-
-    Returns
-    ---------
-    Scene object with model data
-    '''
-
-    if hasattr(filename, 'read'):
-        # This is the case where a file object has been passed to load.
-        # It is calling the following function:
-        # const aiScene* aiImportFileFromMemory(const char* pBuffer,
-        #                                      unsigned int pLength,
-        #                                      unsigned int pFlags,
-        #                                      const char* pHint)
-        if file_type == None:
-            raise AssimpError('File type must be specified when passing file objects!')
-        data  = filename.read()
-        model = _assimp_lib.load_mem(data,
-                                     len(data),
-                                     processing,
-                                     file_type)
-    else:
-        # a filename string has been passed
-        model = _assimp_lib.load(filename.encode(sys.getfilesystemencoding()), processing)
-
-    if not model:
-        raise AssimpError('Could not import file!')
-    scene = _init(model.contents)
-    recur_pythonize(scene.rootnode, scene)
-    return scene
-
-def export(scene,
-           filename,
-           file_type  = None,
-           processing = postprocess.aiProcess_Triangulate):
-    '''
-    Export a scene. On failure throws AssimpError.
-
-    Arguments
-    ---------
-    scene: scene to export.
-    filename: Filename that the scene should be exported to.
-    file_type: string of file exporter to use. For example "collada".
-    processing: assimp postprocessing parameters. Verbose keywords are imported
-                from postprocessing, and the parameters can be combined bitwise to
-                generate the final processing value. Note that the default value will
-                triangulate quad faces. Example of generating other possible values:
-                processing = (pyassimp.postprocess.aiProcess_Triangulate |
-                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
-
-    '''
-
-    from ctypes import pointer
-    exportStatus = _assimp_lib.export(pointer(scene), file_type.encode("ascii"), filename.encode(sys.getfilesystemencoding()), processing)
-
-    if exportStatus != 0:
-        raise AssimpError('Could not export scene!')
-
-def export_blob(scene,
-                file_type = None,
-                processing = postprocess.aiProcess_Triangulate):
-    '''
-    Export a scene and return a blob in the correct format. On failure throws AssimpError.
-
-    Arguments
-    ---------
-    scene: scene to export.
-    file_type: string of file exporter to use. For example "collada".
-    processing: assimp postprocessing parameters. Verbose keywords are imported
-                from postprocessing, and the parameters can be combined bitwise to
-                generate the final processing value. Note that the default value will
-                triangulate quad faces. Example of generating other possible values:
-                processing = (pyassimp.postprocess.aiProcess_Triangulate |
-                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
-    Returns
-    ---------
-    Pointer to structs.ExportDataBlob
-    '''
-    from ctypes import pointer
-    exportBlobPtr = _assimp_lib.export_blob(pointer(scene), file_type.encode("ascii"), processing)
-
-    if exportBlobPtr == 0:
-        raise AssimpError('Could not export scene to blob!')
-    return exportBlobPtr
-
-def release(scene):
-    from ctypes import pointer
-    _assimp_lib.release(pointer(scene))
-
-def _finalize_texture(tex, target):
-    setattr(target, "achformathint", tex.achFormatHint)
-    if numpy:
-        data = numpy.array([make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)])
-    else:
-        data = [make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)]
-    setattr(target, "data", data)
-
-def _finalize_mesh(mesh, target):
-    """ Building of meshes is a bit specific.
-
-    We override here the various datasets that can
-    not be process as regular fields.
-
-    For instance, the length of the normals array is
-    mNumVertices (no mNumNormals is available)
-    """
-    nb_vertices = getattr(mesh, "mNumVertices")
-
-    def fill(name):
-        mAttr = getattr(mesh, name)
-        if numpy:
-            if mAttr:
-                data = numpy.array([make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)], dtype=numpy.float32)
-                setattr(target, name[1:].lower(), data)
-            else:
-                setattr(target, name[1:].lower(), numpy.array([], dtype="float32"))
-        else:
-            if mAttr:
-                data = [make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)]
-                setattr(target, name[1:].lower(), data)
-            else:
-                setattr(target, name[1:].lower(), [])
-
-    def fillarray(name):
-        mAttr = getattr(mesh, name)
-
-        data = []
-        for index, mSubAttr in enumerate(mAttr):
-            if mSubAttr:
-                data.append([make_tuple(getattr(mesh, name)[index][i]) for i in range(nb_vertices)])
-
-        if numpy:
-            setattr(target, name[1:].lower(), numpy.array(data, dtype=numpy.float32))
-        else:
-            setattr(target, name[1:].lower(), data)
-
-    fill("mNormals")
-    fill("mTangents")
-    fill("mBitangents")
-
-    fillarray("mColors")
-    fillarray("mTextureCoords")
-
-    # prepare faces
-    if numpy:
-        faces = numpy.array([f.indices for f in target.faces], dtype=numpy.int32)
-    else:
-        faces = [f.indices for f in target.faces]
-    setattr(target, 'faces', faces)
-
-def _init_metadata_entry(entry):
-    from ctypes import POINTER, c_bool, c_int32, c_uint64, c_float, c_double, cast
-
-    entry.type = entry.mType
-    if entry.type == structs.MetadataEntry.AI_BOOL:
-        entry.data = cast(entry.mData, POINTER(c_bool)).contents.value
-    elif entry.type == structs.MetadataEntry.AI_INT32:
-        entry.data = cast(entry.mData, POINTER(c_int32)).contents.value
-    elif entry.type == structs.MetadataEntry.AI_UINT64:
-        entry.data = cast(entry.mData, POINTER(c_uint64)).contents.value
-    elif entry.type == structs.MetadataEntry.AI_FLOAT:
-        entry.data = cast(entry.mData, POINTER(c_float)).contents.value
-    elif entry.type == structs.MetadataEntry.AI_DOUBLE:
-        entry.data = cast(entry.mData, POINTER(c_double)).contents.value
-    elif entry.type == structs.MetadataEntry.AI_AISTRING:
-        assimp_string = cast(entry.mData, POINTER(structs.String)).contents
-        entry.data = _convert_assimp_string(assimp_string)
-    elif entry.type == structs.MetadataEntry.AI_AIVECTOR3D:
-        assimp_vector = cast(entry.mData, POINTER(structs.Vector3D)).contents
-        entry.data = make_tuple(assimp_vector)
-
-    return entry
-
-def _finalize_metadata(metadata, target):
-    """ Building the metadata object is a bit specific.
-
-    Firstly, there are two separate arrays: one with metadata keys and one
-    with metadata values, and there are no corresponding mNum* attributes,
-    so the C arrays are not converted to Python arrays using the generic
-    code in the _init function.
-
-    Secondly, a metadata entry value has to be cast according to declared
-    metadata entry type.
-    """
-    length = metadata.mNumProperties
-    setattr(target, 'keys', [str(_convert_assimp_string(metadata.mKeys[i])) for i in range(length)])
-    setattr(target, 'values', [_init_metadata_entry(metadata.mValues[i]) for i in range(length)])
-
-class PropertyGetter(dict):
-    def __getitem__(self, key):
-        semantic = 0
-        if isinstance(key, tuple):
-            key, semantic = key
-
-        return dict.__getitem__(self, (key, semantic))
-
-    def keys(self):
-        for k in dict.keys(self):
-            yield k[0]
-
-    def __iter__(self):
-        return self.keys()
-
-    def items(self):
-        for k, v in dict.items(self):
-            yield k[0], v
-
-
-def _get_properties(properties, length):
-    """
-    Convenience Function to get the material properties as a dict
-    and values in a python format.
-    """
-    result = {}
-    #read all properties
-    for p in [properties[i] for i in range(length)]:
-        #the name
-        p = p.contents
-        key = str(_convert_assimp_string(p.mKey))
-        key = (key.split('.')[1], p.mSemantic)
-
-        #the data
-        from ctypes import POINTER, cast, c_int, c_float, sizeof
-        if p.mType == 1:
-            arr = cast(p.mData, POINTER(c_float * int(p.mDataLength/sizeof(c_float)) )).contents
-            value = [x for x in arr]
-        elif p.mType == 3: #string can't be an array
-            value = _convert_assimp_string(cast(p.mData, POINTER(structs.MaterialPropertyString)).contents)
-
-        elif p.mType == 4:
-            arr = cast(p.mData, POINTER(c_int * int(p.mDataLength/sizeof(c_int)) )).contents
-            value = [x for x in arr]
-        else:
-            value = p.mData[:p.mDataLength]
-
-        if len(value) == 1:
-            [value] = value
-
-        result[key] = value
-
-    return PropertyGetter(result)
-
-def decompose_matrix(matrix):
-    if not isinstance(matrix, structs.Matrix4x4):
-        raise AssimpError("pyassimp.decompose_matrix failed: Not a Matrix4x4!")
-
-    scaling = structs.Vector3D()
-    rotation = structs.Quaternion()
-    position = structs.Vector3D()
-
-    from ctypes import byref, pointer
-    _assimp_lib.dll.aiDecomposeMatrix(pointer(matrix), byref(scaling), byref(rotation), byref(position))
-    return scaling._init(), rotation._init(), position._init()
-    
+"""
+PyAssimp
+
+This is the main-module of PyAssimp.
+"""
+
+import sys
+if sys.version_info < (2,6):
+    raise RuntimeError('pyassimp: need python 2.6 or newer')
+
+# xrange was renamed range in Python 3 and the original range from Python 2 was removed.
+# To keep compatibility with both Python 2 and 3, xrange is set to range for version 3.0 and up.
+if sys.version_info >= (3,0):
+    xrange = range
+
+
+try: import numpy
+except ImportError: numpy = None
+import logging
+import ctypes
+logger = logging.getLogger("pyassimp")
+# attach default null handler to logger so it doesn't complain
+# even if you don't attach another handler to logger
+logger.addHandler(logging.NullHandler())
+
+from . import structs
+from . import helper
+from . import postprocess
+from .errors import AssimpError
+
+class AssimpLib(object):
+    """
+    Assimp-Singleton
+    """
+    load, load_mem, export, export_blob, release, dll = helper.search_library()
+_assimp_lib = AssimpLib()
+
+def make_tuple(ai_obj, type = None):
+    res = None
+
+    #notes:
+    # ai_obj._fields_ = [ ("attr", c_type), ... ]
+    # getattr(ai_obj, e[0]).__class__ == float
+
+    if isinstance(ai_obj, structs.Matrix4x4):
+        if numpy:
+            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((4,4))
+            #import pdb;pdb.set_trace()
+        else:
+            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
+            res = [res[i:i+4] for i in xrange(0,16,4)]
+    elif isinstance(ai_obj, structs.Matrix3x3):
+        if numpy:
+            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((3,3))
+        else:
+            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
+            res = [res[i:i+3] for i in xrange(0,9,3)]
+    else:
+        if numpy:
+            res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_])
+        else:
+            res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_]
+
+    return res
+
+# Returns unicode object for Python 2, and str object for Python 3.
+def _convert_assimp_string(assimp_string):
+    if sys.version_info >= (3, 0):
+        return str(assimp_string.data, errors='ignore')
+    else:
+        return unicode(assimp_string.data, errors='ignore')
+
+# It is faster and more correct to have an init function for each assimp class
+def _init_face(aiFace):
+    aiFace.indices = [aiFace.mIndices[i] for i in range(aiFace.mNumIndices)]
+assimp_struct_inits =  { structs.Face : _init_face }
+
+def call_init(obj, caller = None):
+    if helper.hasattr_silent(obj,'contents'): #pointer
+        _init(obj.contents, obj, caller)
+    else:
+        _init(obj,parent=caller)
+
+def _is_init_type(obj):
+    if helper.hasattr_silent(obj,'contents'): #pointer
+        return _is_init_type(obj[0])
+    # null-pointer case that arises when we reach a mesh attribute
+    # like mBitangents which use mNumVertices rather than mNumBitangents
+    # so it breaks the 'is iterable' check.
+    # Basically:
+    # FIXME!
+    elif not bool(obj):
+        return False
+    tname = obj.__class__.__name__
+    return not (tname[:2] == 'c_' or tname == 'Structure' \
+            or tname == 'POINTER') and not isinstance(obj,int)
+
+def _init(self, target = None, parent = None):
+    """
+    Custom initialize() for C structs, adds safely accessible member functionality.
+
+    :param target: set the object which receive the added methods. Useful when manipulating
+    pointers, to skip the intermediate 'contents' deferencing.
+    """
+    if not target:
+        target = self
+
+    dirself = dir(self)
+    for m in dirself:
+
+        if m.startswith("_"):
+            continue
+
+        if m.startswith('mNum'):
+            if 'm' + m[4:] in dirself:
+                continue # will be processed later on
+            else:
+                name = m[1:].lower()
+
+                obj = getattr(self, m)
+                setattr(target, name, obj)
+                continue
+
+        if m == 'mName':
+            target.name = str(_convert_assimp_string(self.mName))
+            target.__class__.__repr__ = lambda x: str(x.__class__) + "(" + getattr(x, 'name','') + ")"
+            target.__class__.__str__ = lambda x: getattr(x, 'name', '')
+            continue
+
+        name = m[1:].lower()
+
+        obj = getattr(self, m)
+
+        # Create tuples
+        if isinstance(obj, structs.assimp_structs_as_tuple):
+            setattr(target, name, make_tuple(obj))
+            logger.debug(str(self) + ": Added array " + str(getattr(target, name)) +  " as self." + name.lower())
+            continue
+
+        if m.startswith('m'):
+
+            if name == "parent":
+                setattr(target, name, parent)
+                logger.debug("Added a parent as self." + name)
+                continue
+
+            if helper.hasattr_silent(self, 'mNum' + m[1:]):
+
+                length =  getattr(self, 'mNum' + m[1:])
+
+                # -> special case: properties are
+                # stored as a dict.
+                if m == 'mProperties':
+                    setattr(target, name, _get_properties(obj, length))
+                    continue
+
+
+                if not length: # empty!
+                    setattr(target, name, [])
+                    logger.debug(str(self) + ": " + name + " is an empty list.")
+                    continue
+
+
+                try:
+                    if obj._type_ in structs.assimp_structs_as_tuple:
+                        if numpy:
+                            setattr(target, name, numpy.array([make_tuple(obj[i]) for i in range(length)], dtype=numpy.float32))
+
+                            logger.debug(str(self) + ": Added an array of numpy arrays (type "+ str(type(obj)) + ") as self." + name)
+                        else:
+                            setattr(target, name, [make_tuple(obj[i]) for i in range(length)])
+
+                            logger.debug(str(self) + ": Added a list of lists (type "+ str(type(obj)) + ") as self." + name)
+
+                    else:
+                        setattr(target, name, [obj[i] for i in range(length)]) #TODO: maybe not necessary to recreate an array?
+
+                        logger.debug(str(self) + ": Added list of " + str(obj) + " " + name + " as self." + name + " (type: " + str(type(obj)) + ")")
+
+                        # initialize array elements
+                        try:
+                            init = assimp_struct_inits[type(obj[0])]
+                        except KeyError:
+                            if _is_init_type(obj[0]):
+                                for e in getattr(target, name):
+                                    call_init(e, target)
+                        else:
+                            for e in getattr(target, name):
+                                init(e)
+
+
+                except IndexError:
+                    logger.error("in " + str(self) +" : mismatch between mNum" + name + " and the actual amount of data in m" + name + ". This may be due to version mismatch between libassimp and pyassimp. Quitting now.")
+                    sys.exit(1)
+
+                except ValueError as e:
+
+                    logger.error("In " + str(self) +  "->" + name + ": " + str(e) + ". Quitting now.")
+                    if "setting an array element with a sequence" in str(e):
+                        logger.error("Note that pyassimp does not currently "
+                                     "support meshes with mixed triangles "
+                                     "and quads. Try to load your mesh with"
+                                     " a post-processing to triangulate your"
+                                     " faces.")
+                    raise e
+
+
+
+            else: # starts with 'm' but not iterable
+                setattr(target, name, obj)
+                logger.debug("Added " + name + " as self." + name + " (type: " + str(type(obj)) + ")")
+
+                if _is_init_type(obj):
+                    call_init(obj, target)
+
+    if isinstance(self, structs.Mesh):
+        _finalize_mesh(self, target)
+
+    if isinstance(self, structs.Texture):
+        _finalize_texture(self, target)
+
+    if isinstance(self, structs.Metadata):
+        _finalize_metadata(self, target)
+
+
+    return self
+
+
+def pythonize_assimp(type, obj, scene):
+    """ This method modify the Assimp data structures
+    to make them easier to work with in Python.
+
+    Supported operations:
+     - MESH: replace a list of mesh IDs by reference to these meshes
+     - ADDTRANSFORMATION: add a reference to an object's transformation taken from their associated node.
+
+    :param type: the type of modification to operate (cf above)
+    :param obj: the input object to modify
+    :param scene: a reference to the whole scene
+    """
+
+    if type == "MESH":
+        meshes = []
+        for i in obj:
+            meshes.append(scene.meshes[i])
+        return meshes
+
+    if type == "ADDTRANSFORMATION":
+        def getnode(node, name):
+            if node.name == name: return node
+            for child in node.children:
+                n = getnode(child, name)
+                if n: return n
+
+        node = getnode(scene.rootnode, obj.name)
+        if not node:
+            raise AssimpError("Object " + str(obj) + " has no associated node!")
+        setattr(obj, "transformation", node.transformation)
+
+def recur_pythonize(node, scene):
+    '''
+    Recursively call pythonize_assimp on
+    nodes tree to apply several post-processing to
+    pythonize the assimp datastructures.
+    '''
+    node.meshes = pythonize_assimp("MESH", node.meshes, scene)
+    for mesh in node.meshes:
+        mesh.material = scene.materials[mesh.materialindex]
+    for cam in scene.cameras:
+        pythonize_assimp("ADDTRANSFORMATION", cam, scene)
+    for c in node.children:
+        recur_pythonize(c, scene)
+
+def load(filename,
+         file_type  = None,
+         processing = postprocess.aiProcess_Triangulate):
+    '''
+    Load a model into a scene. On failure throws AssimpError.
+
+    Arguments
+    ---------
+    filename:   Either a filename or a file object to load model from.
+                If a file object is passed, file_type MUST be specified
+                Otherwise Assimp has no idea which importer to use.
+                This is named 'filename' so as to not break legacy code.
+    processing: assimp postprocessing parameters. Verbose keywords are imported
+                from postprocessing, and the parameters can be combined bitwise to
+                generate the final processing value. Note that the default value will
+                triangulate quad faces. Example of generating other possible values:
+                processing = (pyassimp.postprocess.aiProcess_Triangulate |
+                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
+    file_type:  string of file extension, such as 'stl'
+
+    Returns
+    ---------
+    Scene object with model data
+    '''
+
+    if hasattr(filename, 'read'):
+        # This is the case where a file object has been passed to load.
+        # It is calling the following function:
+        # const aiScene* aiImportFileFromMemory(const char* pBuffer,
+        #                                      unsigned int pLength,
+        #                                      unsigned int pFlags,
+        #                                      const char* pHint)
+        if file_type is None:
+            raise AssimpError('File type must be specified when passing file objects!')
+        data  = filename.read()
+        model = _assimp_lib.load_mem(data,
+                                     len(data),
+                                     processing,
+                                     file_type)
+    else:
+        # a filename string has been passed
+        model = _assimp_lib.load(filename.encode(sys.getfilesystemencoding()), processing)
+
+    if not model:
+        raise AssimpError('Could not import file!')
+    scene = _init(model.contents)
+    recur_pythonize(scene.rootnode, scene)
+    return scene
+
+def export(scene,
+           filename,
+           file_type  = None,
+           processing = postprocess.aiProcess_Triangulate):
+    '''
+    Export a scene. On failure throws AssimpError.
+
+    Arguments
+    ---------
+    scene: scene to export.
+    filename: Filename that the scene should be exported to.
+    file_type: string of file exporter to use. For example "collada".
+    processing: assimp postprocessing parameters. Verbose keywords are imported
+                from postprocessing, and the parameters can be combined bitwise to
+                generate the final processing value. Note that the default value will
+                triangulate quad faces. Example of generating other possible values:
+                processing = (pyassimp.postprocess.aiProcess_Triangulate |
+                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
+
+    '''
+
+    exportStatus = _assimp_lib.export(ctypes.pointer(scene), file_type.encode("ascii"), filename.encode(sys.getfilesystemencoding()), processing)
+
+    if exportStatus != 0:
+        raise AssimpError('Could not export scene!')
+
+def export_blob(scene,
+                file_type = None,
+                processing = postprocess.aiProcess_Triangulate):
+    '''
+    Export a scene and return a blob in the correct format. On failure throws AssimpError.
+
+    Arguments
+    ---------
+    scene: scene to export.
+    file_type: string of file exporter to use. For example "collada".
+    processing: assimp postprocessing parameters. Verbose keywords are imported
+                from postprocessing, and the parameters can be combined bitwise to
+                generate the final processing value. Note that the default value will
+                triangulate quad faces. Example of generating other possible values:
+                processing = (pyassimp.postprocess.aiProcess_Triangulate |
+                              pyassimp.postprocess.aiProcess_OptimizeMeshes)
+    Returns
+    ---------
+    Pointer to structs.ExportDataBlob
+    '''
+    exportBlobPtr = _assimp_lib.export_blob(ctypes.pointer(scene), file_type.encode("ascii"), processing)
+
+    if exportBlobPtr == 0:
+        raise AssimpError('Could not export scene to blob!')
+    return exportBlobPtr
+
+def release(scene):
+    _assimp_lib.release(ctypes.pointer(scene))
+
+def _finalize_texture(tex, target):
+    setattr(target, "achformathint", tex.achFormatHint)
+    if numpy:
+        data = numpy.array([make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)])
+    else:
+        data = [make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)]
+    setattr(target, "data", data)
+
+def _finalize_mesh(mesh, target):
+    """ Building of meshes is a bit specific.
+
+    We override here the various datasets that can
+    not be process as regular fields.
+
+    For instance, the length of the normals array is
+    mNumVertices (no mNumNormals is available)
+    """
+    nb_vertices = getattr(mesh, "mNumVertices")
+
+    def fill(name):
+        mAttr = getattr(mesh, name)
+        if numpy:
+            if mAttr:
+                data = numpy.array([make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)], dtype=numpy.float32)
+                setattr(target, name[1:].lower(), data)
+            else:
+                setattr(target, name[1:].lower(), numpy.array([], dtype="float32"))
+        else:
+            if mAttr:
+                data = [make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)]
+                setattr(target, name[1:].lower(), data)
+            else:
+                setattr(target, name[1:].lower(), [])
+
+    def fillarray(name):
+        mAttr = getattr(mesh, name)
+
+        data = []
+        for index, mSubAttr in enumerate(mAttr):
+            if mSubAttr:
+                data.append([make_tuple(getattr(mesh, name)[index][i]) for i in range(nb_vertices)])
+
+        if numpy:
+            setattr(target, name[1:].lower(), numpy.array(data, dtype=numpy.float32))
+        else:
+            setattr(target, name[1:].lower(), data)
+
+    fill("mNormals")
+    fill("mTangents")
+    fill("mBitangents")
+
+    fillarray("mColors")
+    fillarray("mTextureCoords")
+
+    # prepare faces
+    if numpy:
+        faces = numpy.array([f.indices for f in target.faces], dtype=numpy.int32)
+    else:
+        faces = [f.indices for f in target.faces]
+    setattr(target, 'faces', faces)
+
+def _init_metadata_entry(entry):
+    entry.type = entry.mType
+    if entry.type == structs.MetadataEntry.AI_BOOL:
+        entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_bool)).contents.value
+    elif entry.type == structs.MetadataEntry.AI_INT32:
+        entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_int32)).contents.value
+    elif entry.type == structs.MetadataEntry.AI_UINT64:
+        entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_uint64)).contents.value
+    elif entry.type == structs.MetadataEntry.AI_FLOAT:
+        entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_float)).contents.value
+    elif entry.type == structs.MetadataEntry.AI_DOUBLE:
+        entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_double)).contents.value
+    elif entry.type == structs.MetadataEntry.AI_AISTRING:
+        assimp_string = ctypes.cast(entry.mData, ctypes.POINTER(structs.String)).contents
+        entry.data = _convert_assimp_string(assimp_string)
+    elif entry.type == structs.MetadataEntry.AI_AIVECTOR3D:
+        assimp_vector = ctypes.cast(entry.mData, ctypes.POINTER(structs.Vector3D)).contents
+        entry.data = make_tuple(assimp_vector)
+
+    return entry
+
+def _finalize_metadata(metadata, target):
+    """ Building the metadata object is a bit specific.
+
+    Firstly, there are two separate arrays: one with metadata keys and one
+    with metadata values, and there are no corresponding mNum* attributes,
+    so the C arrays are not converted to Python arrays using the generic
+    code in the _init function.
+
+    Secondly, a metadata entry value has to be cast according to declared
+    metadata entry type.
+    """
+    length = metadata.mNumProperties
+    setattr(target, 'keys', [str(_convert_assimp_string(metadata.mKeys[i])) for i in range(length)])
+    setattr(target, 'values', [_init_metadata_entry(metadata.mValues[i]) for i in range(length)])
+
+class PropertyGetter(dict):
+    def __getitem__(self, key):
+        semantic = 0
+        if isinstance(key, tuple):
+            key, semantic = key
+
+        return dict.__getitem__(self, (key, semantic))
+
+    def keys(self):
+        for k in dict.keys(self):
+            yield k[0]
+
+    def __iter__(self):
+        return self.keys()
+
+    def items(self):
+        for k, v in dict.items(self):
+            yield k[0], v
+
+
+def _get_properties(properties, length):
+    """
+    Convenience Function to get the material properties as a dict
+    and values in a python format.
+    """
+    result = {}
+    #read all properties
+    for p in [properties[i] for i in range(length)]:
+        #the name
+        p = p.contents
+        key = str(_convert_assimp_string(p.mKey))
+        key = (key.split('.')[1], p.mSemantic)
+
+        #the data
+        if p.mType == 1:
+            arr = ctypes.cast(p.mData,
+                              ctypes.POINTER(ctypes.c_float * int(p.mDataLength/ctypes.sizeof(ctypes.c_float)))
+                              ).contents
+            value = [x for x in arr]
+        elif p.mType == 3: #string can't be an array
+            value = _convert_assimp_string(ctypes.cast(p.mData, ctypes.POINTER(structs.MaterialPropertyString)).contents)
+
+        elif p.mType == 4:
+            arr = ctypes.cast(p.mData,
+                              ctypes.POINTER(ctypes.c_int * int(p.mDataLength/ctypes.sizeof(ctypes.c_int)))
+                              ).contents
+            value = [x for x in arr]
+        else:
+            value = p.mData[:p.mDataLength]
+
+        if len(value) == 1:
+            [value] = value
+
+        result[key] = value
+
+    return PropertyGetter(result)
+
+def decompose_matrix(matrix):
+    if not isinstance(matrix, structs.Matrix4x4):
+        raise AssimpError("pyassimp.decompose_matrix failed: Not a Matrix4x4!")
+
+    scaling = structs.Vector3D()
+    rotation = structs.Quaternion()
+    position = structs.Vector3D()
+
+    _assimp_lib.dll.aiDecomposeMatrix(ctypes.pointer(matrix),
+                                      ctypes.byref(scaling),
+                                      ctypes.byref(rotation),
+                                      ctypes.byref(position))
+    return scaling._init(), rotation._init(), position._init()
+    

+ 1 - 1
port/PyAssimp/pyassimp/formats.py

@@ -21,7 +21,7 @@ FORMATS = ["CSM",
             "STL", 
             "IRR", 
             "Q3O",
-            "Q3D"
+            "Q3D",
             "MS3D", 
             "Q3S", 
             "ZGL", 

+ 279 - 280
port/PyAssimp/pyassimp/helper.py

@@ -1,280 +1,279 @@
-#-*- coding: UTF-8 -*-
-
-"""
-Some fancy helper functions.
-"""
-
-import os
-import ctypes
-from ctypes import POINTER
-import operator
-
-from distutils.sysconfig import get_python_lib
-import re
-import sys
-
-try: import numpy
-except: numpy = None
-
-import logging;logger = logging.getLogger("pyassimp")
-
-from .errors import AssimpError
-
-additional_dirs, ext_whitelist = [],[]
-
-# populate search directories and lists of allowed file extensions
-# depending on the platform we're running on.
-if os.name=='posix':
-    additional_dirs.append('./')
-    additional_dirs.append('/usr/lib/')
-    additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
-    additional_dirs.append('/usr/local/lib/')
-
-    if 'LD_LIBRARY_PATH' in os.environ:
-        additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
-
-    # check if running from anaconda.
-    if "conda" or "continuum" in sys.version.lower():
-      cur_path = get_python_lib()
-      pattern = re.compile('.*\/lib\/')
-      conda_lib = pattern.match(cur_path).group()
-      logger.info("Adding Anaconda lib path:"+ conda_lib)
-      additional_dirs.append(conda_lib)
-
-    # note - this won't catch libassimp.so.N.n, but
-    # currently there's always a symlink called
-    # libassimp.so in /usr/local/lib.
-    ext_whitelist.append('.so')
-    # libassimp.dylib in /usr/local/lib
-    ext_whitelist.append('.dylib')
-
-elif os.name=='nt':
-    ext_whitelist.append('.dll')
-    path_dirs = os.environ['PATH'].split(';')
-    additional_dirs.extend(path_dirs)
-
-def vec2tuple(x):
-    """ Converts a VECTOR3D to a Tuple """
-    return (x.x, x.y, x.z)
-
-def transform(vector3, matrix4x4):
-    """ Apply a transformation matrix on a 3D vector.
-
-    :param vector3: array with 3 elements
-    :param matrix4x4: 4x4 matrix
-    """
-    if numpy:
-        return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
-    else:
-        m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
-        return [
-            m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
-            m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
-            m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
-            m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
-            ]
-
-def _inv(matrix4x4):
-    m0,m1,m2,m3 = matrix4x4
-
-    det  =  m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
-            m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
-            m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
-            m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
-            m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
-            m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
-            m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
-            m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
-            m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
-            m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
-            m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
-            m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
-
-    return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
-            ( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
-            ( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
-            ( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
-           [( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
-            ( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
-            ( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
-            ( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
-           [( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
-            ( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
-            ( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
-            ( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
-           [( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
-            ( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
-            ( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
-            ( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
-
-def get_bounding_box(scene):
-    bb_min = [1e10, 1e10, 1e10] # x,y,z
-    bb_max = [-1e10, -1e10, -1e10] # x,y,z
-    inv = numpy.linalg.inv if numpy else _inv
-    return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
-
-def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
-
-    if numpy:
-        transformation = numpy.dot(transformation, node.transformation)
-    else:
-        t0,t1,t2,t3 = transformation
-        T0,T1,T2,T3 = node.transformation
-        transformation = [ [
-                t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
-                t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
-                t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
-                t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
-            ],[
-                t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
-                t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
-                t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
-                t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
-            ],[
-                t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
-                t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
-                t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
-                t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
-            ],[
-                t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
-                t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
-                t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
-                t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
-            ] ]
-
-    for mesh in node.meshes:
-        for v in mesh.vertices:
-            v = transform(v, transformation)
-            bb_min[0] = min(bb_min[0], v[0])
-            bb_min[1] = min(bb_min[1], v[1])
-            bb_min[2] = min(bb_min[2], v[2])
-            bb_max[0] = max(bb_max[0], v[0])
-            bb_max[1] = max(bb_max[1], v[1])
-            bb_max[2] = max(bb_max[2], v[2])
-
-
-    for child in node.children:
-        bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
-
-    return bb_min, bb_max
-
-def try_load_functions(library_path, dll):
-    '''
-    Try to bind to aiImportFile and aiReleaseImport
-
-    Arguments
-    ---------
-    library_path: path to current lib
-    dll:          ctypes handle to library
-
-    Returns
-    ---------
-    If unsuccessful:
-        None
-    If successful:
-        Tuple containing (library_path,
-                          load from filename function,
-                          load from memory function,
-                          export to filename function,
-                          export to blob function,
-                          release function,
-                          ctypes handle to assimp library)
-    '''
-
-    try:
-        load     = dll.aiImportFile
-        release  = dll.aiReleaseImport
-        load_mem = dll.aiImportFileFromMemory
-        export   = dll.aiExportScene
-        export2blob = dll.aiExportSceneToBlob
-    except AttributeError:
-        #OK, this is a library, but it doesn't have the functions we need
-        return None
-
-    # library found!
-    from .structs import Scene, ExportDataBlob
-    load.restype = POINTER(Scene)
-    load_mem.restype = POINTER(Scene)
-    export2blob.restype = POINTER(ExportDataBlob)
-    return (library_path, load, load_mem, export, export2blob, release, dll)
-
-def search_library():
-    '''
-    Loads the assimp library.
-    Throws exception AssimpError if no library_path is found
-
-    Returns: tuple, (load from filename function,
-                     load from memory function,
-                     export to filename function,
-                     export to blob function,
-                     release function,
-                     dll)
-    '''
-    #this path
-    folder = os.path.dirname(__file__)
-
-    # silence 'DLL not found' message boxes on win
-    try:
-        ctypes.windll.kernel32.SetErrorMode(0x8007)
-    except AttributeError:
-        pass
-
-    candidates = []
-    # test every file
-    for curfolder in [folder]+additional_dirs:
-        if os.path.isdir(curfolder):
-            for filename in os.listdir(curfolder):
-                # our minimum requirement for candidates is that
-                # they should contain 'assimp' somewhere in
-                # their name                                  
-                if filename.lower().find('assimp')==-1 : 
-                    continue
-                is_out=1
-                for et in ext_whitelist:
-                  if et in filename.lower():
-                    is_out=0
-                    break
-                if is_out:
-                  continue
-                
-                library_path = os.path.join(curfolder, filename)
-                logger.debug('Try ' + library_path)
-                try:
-                    dll = ctypes.cdll.LoadLibrary(library_path)
-                except Exception as e:
-                    logger.warning(str(e))
-                    # OK, this except is evil. But different OSs will throw different
-                    # errors. So just ignore any errors.
-                    continue
-                # see if the functions we need are in the dll
-                loaded = try_load_functions(library_path, dll)
-                if loaded: candidates.append(loaded)
-
-    if not candidates:
-        # no library found
-        raise AssimpError("assimp library not found")
-    else:
-        # get the newest library_path
-        candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
-        res = max(candidates, key=operator.itemgetter(0))[1]
-        logger.debug('Using assimp library located at ' + res[0])
-
-        # XXX: if there are 1000 dll/so files containing 'assimp'
-        # in their name, do we have all of them in our address
-        # space now until gc kicks in?
-
-        # XXX: take version postfix of the .so on linux?
-        return res[1:]
-
-def hasattr_silent(object, name):
-    """
-        Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
-        functionality of silently catching exceptions.
-
-        Returns the result of hasatter() or False if an exception was raised.
-    """
-
-    try:
-        return hasattr(object, name)
-    except:
-        return False
+#-*- coding: UTF-8 -*-
+
+"""
+Some fancy helper functions.
+"""
+
+import os
+import ctypes
+import operator
+
+from distutils.sysconfig import get_python_lib
+import re
+import sys
+
+try: import numpy
+except ImportError: numpy = None
+
+import logging;logger = logging.getLogger("pyassimp")
+
+from .errors import AssimpError
+
+additional_dirs, ext_whitelist = [],[]
+
+# populate search directories and lists of allowed file extensions
+# depending on the platform we're running on.
+if os.name=='posix':
+    additional_dirs.append('./')
+    additional_dirs.append('/usr/lib/')
+    additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
+    additional_dirs.append('/usr/local/lib/')
+
+    if 'LD_LIBRARY_PATH' in os.environ:
+        additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
+
+    # check if running from anaconda.
+    if "conda" or "continuum" in sys.version.lower():
+      cur_path = get_python_lib()
+      pattern = re.compile('.*\/lib\/')
+      conda_lib = pattern.match(cur_path).group()
+      logger.info("Adding Anaconda lib path:"+ conda_lib)
+      additional_dirs.append(conda_lib)
+
+    # note - this won't catch libassimp.so.N.n, but
+    # currently there's always a symlink called
+    # libassimp.so in /usr/local/lib.
+    ext_whitelist.append('.so')
+    # libassimp.dylib in /usr/local/lib
+    ext_whitelist.append('.dylib')
+
+elif os.name=='nt':
+    ext_whitelist.append('.dll')
+    path_dirs = os.environ['PATH'].split(';')
+    additional_dirs.extend(path_dirs)
+
+def vec2tuple(x):
+    """ Converts a VECTOR3D to a Tuple """
+    return (x.x, x.y, x.z)
+
+def transform(vector3, matrix4x4):
+    """ Apply a transformation matrix on a 3D vector.
+
+    :param vector3: array with 3 elements
+    :param matrix4x4: 4x4 matrix
+    """
+    if numpy:
+        return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
+    else:
+        m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
+        return [
+            m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
+            m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
+            m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
+            m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
+            ]
+
+def _inv(matrix4x4):
+    m0,m1,m2,m3 = matrix4x4
+
+    det  =  m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
+            m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
+            m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
+            m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
+            m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
+            m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
+            m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
+            m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
+            m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
+            m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
+            m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
+            m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
+
+    return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
+            ( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
+            ( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
+            ( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
+           [( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
+            ( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
+            ( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
+            ( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
+           [( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
+            ( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
+            ( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
+            ( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
+           [( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
+            ( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
+            ( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
+            ( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
+
+def get_bounding_box(scene):
+    bb_min = [1e10, 1e10, 1e10] # x,y,z
+    bb_max = [-1e10, -1e10, -1e10] # x,y,z
+    inv = numpy.linalg.inv if numpy else _inv
+    return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
+
+def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
+
+    if numpy:
+        transformation = numpy.dot(transformation, node.transformation)
+    else:
+        t0,t1,t2,t3 = transformation
+        T0,T1,T2,T3 = node.transformation
+        transformation = [ [
+                t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
+                t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
+                t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
+                t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
+            ],[
+                t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
+                t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
+                t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
+                t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
+            ],[
+                t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
+                t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
+                t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
+                t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
+            ],[
+                t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
+                t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
+                t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
+                t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
+            ] ]
+
+    for mesh in node.meshes:
+        for v in mesh.vertices:
+            v = transform(v, transformation)
+            bb_min[0] = min(bb_min[0], v[0])
+            bb_min[1] = min(bb_min[1], v[1])
+            bb_min[2] = min(bb_min[2], v[2])
+            bb_max[0] = max(bb_max[0], v[0])
+            bb_max[1] = max(bb_max[1], v[1])
+            bb_max[2] = max(bb_max[2], v[2])
+
+
+    for child in node.children:
+        bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
+
+    return bb_min, bb_max
+
+def try_load_functions(library_path, dll):
+    '''
+    Try to bind to aiImportFile and aiReleaseImport
+
+    Arguments
+    ---------
+    library_path: path to current lib
+    dll:          ctypes handle to library
+
+    Returns
+    ---------
+    If unsuccessful:
+        None
+    If successful:
+        Tuple containing (library_path,
+                          load from filename function,
+                          load from memory function,
+                          export to filename function,
+                          export to blob function,
+                          release function,
+                          ctypes handle to assimp library)
+    '''
+
+    try:
+        load     = dll.aiImportFile
+        release  = dll.aiReleaseImport
+        load_mem = dll.aiImportFileFromMemory
+        export   = dll.aiExportScene
+        export2blob = dll.aiExportSceneToBlob
+    except AttributeError:
+        #OK, this is a library, but it doesn't have the functions we need
+        return None
+
+    # library found!
+    from .structs import Scene, ExportDataBlob
+    load.restype = ctype.POINTER(Scene)
+    load_mem.restype = ctype.POINTER(Scene)
+    export2blob.restype = ctype.POINTER(ExportDataBlob)
+    return (library_path, load, load_mem, export, export2blob, release, dll)
+
+def search_library():
+    '''
+    Loads the assimp library.
+    Throws exception AssimpError if no library_path is found
+
+    Returns: tuple, (load from filename function,
+                     load from memory function,
+                     export to filename function,
+                     export to blob function,
+                     release function,
+                     dll)
+    '''
+    #this path
+    folder = os.path.dirname(__file__)
+
+    # silence 'DLL not found' message boxes on win
+    try:
+        ctypes.windll.kernel32.SetErrorMode(0x8007)
+    except AttributeError:
+        pass
+
+    candidates = []
+    # test every file
+    for curfolder in [folder]+additional_dirs:
+        if os.path.isdir(curfolder):
+            for filename in os.listdir(curfolder):
+                # our minimum requirement for candidates is that
+                # they should contain 'assimp' somewhere in
+                # their name                                  
+                if filename.lower().find('assimp')==-1 : 
+                    continue
+                is_out=1
+                for et in ext_whitelist:
+                  if et in filename.lower():
+                    is_out=0
+                    break
+                if is_out:
+                  continue
+                
+                library_path = os.path.join(curfolder, filename)
+                logger.debug('Try ' + library_path)
+                try:
+                    dll = ctypes.cdll.LoadLibrary(library_path)
+                except Exception as e:
+                    logger.warning(str(e))
+                    # OK, this except is evil. But different OSs will throw different
+                    # errors. So just ignore any errors.
+                    continue
+                # see if the functions we need are in the dll
+                loaded = try_load_functions(library_path, dll)
+                if loaded: candidates.append(loaded)
+
+    if not candidates:
+        # no library found
+        raise AssimpError("assimp library not found")
+    else:
+        # get the newest library_path
+        candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
+        res = max(candidates, key=operator.itemgetter(0))[1]
+        logger.debug('Using assimp library located at ' + res[0])
+
+        # XXX: if there are 1000 dll/so files containing 'assimp'
+        # in their name, do we have all of them in our address
+        # space now until gc kicks in?
+
+        # XXX: take version postfix of the .so on linux?
+        return res[1:]
+
+def hasattr_silent(object, name):
+    """
+        Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
+        functionality of silently catching exceptions.
+
+        Returns the result of hasatter() or False if an exception was raised.
+    """
+
+    try:
+        return hasattr(object, name)
+    except AttributeError:
+        return False

+ 1 - 0
port/PyAssimp/pyassimp/postprocess.py

@@ -435,6 +435,7 @@ aiProcess_Debone  = 0x4000000
 aiProcess_GenEntityMeshes = 0x100000
 aiProcess_OptimizeAnimations = 0x200000
 aiProcess_FixTexturePaths = 0x200000
+aiProcess_EmbedTextures  = 0x10000000,
 
 ## @def aiProcess_ConvertToLeftHanded
  #  @brief Shortcut flag for Direct3D-based applications. 

+ 2 - 2
port/PyAssimp/pyassimp/structs.py

@@ -1,6 +1,6 @@
 #-*- coding: UTF-8 -*-
 
-from ctypes import POINTER, c_void_p, c_int, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
+from ctypes import POINTER, c_void_p, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
 
 
 class Vector2D(Structure):
@@ -70,7 +70,7 @@ class String(Structure):
     See 'types.h' for details.
     """ 
 
-    MAXLEN = 1024
+    MAXLEN = 1024
 
     _fields_ = [
             # Binary length of the string excluding the terminal 0. This is NOT the

+ 3 - 2
port/PyAssimp/scripts/fixed_pipeline_3d_viewer.py

@@ -24,12 +24,13 @@ This sample is based on several sources, including:
  - ASSIMP's C++ SimpleOpenGL viewer
 """
 
-import os, sys
+import sys
 from OpenGL.GLUT import *
 from OpenGL.GLU import *
 from OpenGL.GL import *
 
-import logging;logger = logging.getLogger("pyassimp_opengl")
+import logging
+logger = logging.getLogger("pyassimp_opengl")
 logging.basicConfig(level=logging.INFO)
 
 import math

+ 3 - 3
port/PyAssimp/scripts/sample.py

@@ -5,7 +5,7 @@
 This module demonstrates the functionality of PyAssimp.
 """
 
-import os, sys
+import sys
 import logging
 logging.basicConfig(level=logging.INFO)
 
@@ -50,8 +50,8 @@ def main(filename=None):
         print("    colors:" + str(len(mesh.colors)))
         tcs = mesh.texturecoords
         if tcs.any():
-            for index, tc in enumerate(tcs):
-                print("    texture-coords "+ str(index) + ":" + str(len(tcs[index])) + "first3:" + str(tcs[index][:3]))
+            for tc_index, tc in enumerate(tcs):
+                print("    texture-coords "+ str(tc_index) + ":" + str(len(tcs[tc_index])) + "first3:" + str(tcs[tc_index][:3]))
 
         else:
             print("    no texture coordinates")

+ 3 - 1
scripts/BlenderImporter/genblenddna.py

@@ -291,7 +291,9 @@ def main():
     #s += "#endif\n"
         
     output.write(templt.replace("<HERE>",s))
-        
+
+    # we got here, so no error
+    return 0
 
 if __name__ == "__main__":
     sys.exit(main())

+ 5 - 5
scripts/StepImporter/CppGenerator.py

@@ -151,11 +151,8 @@ def handle_unset_args(field,entity,schema,argnum):
     return n+template_allow_optional.format()
 
 def get_single_conversion(field,schema,argnum=0,classname='?'):
-    typen = field.type
     name = field.name
-    if field.collection:
-        typen = 'LIST'
-    return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
+    return template_convert_single.format(name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
 
 def count_args_up(entity,schema):
     return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
@@ -218,7 +215,7 @@ def get_derived(e,schema):
     return res
 
 def get_hierarchy(e,schema):
-    return get_derived(e.schema)+[e.name]+get_base_classes(e,schema)
+    return get_derived(e, schema)+[e.name]+get_base_classes(e,schema)
 
 def sort_entity_list(schema):
     deps = []
@@ -300,5 +297,8 @@ def work(filename):
         with open(output_file_cpp,'wt') as outp:
             outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
 
+    # Finished without error, so return 0
+    return 0
+
 if __name__ == "__main__":
     sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))

+ 2 - 1
scripts/StepImporter/ExpressReader.py

@@ -43,7 +43,8 @@
 """Parse an EXPRESS file and extract basic information on all
 entities and data types contained"""
 
-import sys, os, re
+import sys
+import re
 from collections import OrderedDict
 
 re_match_entity = re.compile(r"""