Browse Source

Added handling of materials to CTMLoader. Added more complex CTM example.

CTM pipeline produces very small and efficient models, just it's quite involved.

Models with multiple materials need to be split into separate files. For this there "split_obj.py" helper script.

Also, as CTM format uses indexed shared vertices, with more complex UV unwrapping mesh may need to get preprocessed to create extra vertices for faces which have different UVs for the same vertices (MeshLab can do this).
alteredq 13 years ago
parent
commit
b59e818f39

+ 129 - 0
examples/ctm/camaro/camaro.js

@@ -0,0 +1,129 @@
+{
+"geometries" :
+	[
+	"camaro_000.ctm",
+	"camaro_001.ctm",
+	"camaro_002.ctm",
+	"camaro_003.ctm",
+	"camaro_004.ctm",
+	"camaro_005.ctm",
+	"camaro_006.ctm",
+	"camaro_007.ctm"
+	],
+
+"materials" :
+	[
+	{
+	"DbgColor" : 15658734,
+	"DbgIndex" : 0,
+	"DbgName" : "Body_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.1816, 0.3264, 0.3704],
+	"colorSpecular" : [2.0, 2.0, 2.0],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 778.431373,
+	"transparency" : 1.0
+	},
+
+	{
+	"DbgColor" : 15597568,
+	"DbgIndex" : 1,
+	"DbgName" : "tire_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.2168, 0.2168, 0.2104],
+	"colorSpecular" : [0.1, 0.1, 0.1],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 15.686275,
+	"transparency" : 1.0
+	},
+
+	{
+	"DbgColor" : 60928,
+	"DbgIndex" : 2,
+	"DbgName" : "black2_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.0, 0.0, 0.0],
+	"colorSpecular" : [0.0, 0.0, 0.0],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 0.0,
+	"transparency" : 1.0
+	},
+
+	{
+	"DbgColor" : 238,
+	"DbgIndex" : 3,
+	"DbgName" : "tireling_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.4, 0.4, 0.4],
+	"colorSpecular" : [0.2, 0.2, 0.2],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 15.686275,
+	"transparency" : 1.0
+	},
+
+	{
+	"DbgColor" : 15658496,
+	"DbgIndex" : 4,
+	"DbgName" : "glass_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.16, 0.248, 0.2448],
+	"colorSpecular" : [2.0, 2.0, 2.0],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 778.431373,
+	"transparency" : 0.34
+	},
+
+	{
+	"DbgColor" : 61166,
+	"DbgIndex" : 5,
+	"DbgName" : "black_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.0816, 0.0816, 0.0816],
+	"colorSpecular" : [0.2, 0.2, 0.2],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 3.921569,
+	"transparency" : 1.0
+	},
+
+	{
+	"DbgColor" : 15597806,
+	"DbgIndex" : 6,
+	"DbgName" : "mirror_car-ao",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.24, 0.24, 0.24],
+	"colorSpecular" : [2.0, 2.0, 2.0],
+	"illumination" : 2,
+	"mapDiffuse" : "car-ao.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 778.431373,
+	"transparency" : 1.0
+	},
+
+
+	{
+	"DbgColor" : 3744854,
+	"DbgIndex" : 8,
+	"DbgName" : "Material.001_plane-ao-256",
+	"colorAmbient" : [0.0, 0.0, 0.0],
+	"colorDiffuse" : [0.798635, 0.776149, 0.8],
+	"colorSpecular" : [0.5, 0.5, 0.5],
+	"illumination" : 2,
+	"mapDiffuse" : "plane-ao-256.png",
+	"opticalDensity" : 1.0,
+	"specularCoef" : 96.078431,
+	"transparency" : 1.0
+	}
+	]
+}

BIN
examples/ctm/camaro/camaro_000.ctm


BIN
examples/ctm/camaro/camaro_001.ctm


BIN
examples/ctm/camaro/camaro_002.ctm


BIN
examples/ctm/camaro/camaro_003.ctm


BIN
examples/ctm/camaro/camaro_004.ctm


BIN
examples/ctm/camaro/camaro_005.ctm


BIN
examples/ctm/camaro/camaro_006.ctm


BIN
examples/ctm/camaro/camaro_007.ctm


BIN
examples/ctm/camaro/car-ao.png


BIN
examples/ctm/camaro/plane-ao-256.png


+ 78 - 5
examples/js/ctm/CTMLoader.js

@@ -8,16 +8,89 @@
  * @author alteredq / http://alteredqualia.com/
  */
 
-THREE.CTMLoader = function ( context ) {
+THREE.CTMLoader = function ( context, showStatus ) {
 
 	this.context = context;
 
+	THREE.Loader.call( this, showStatus );
+
 };
 
-THREE.CTMLoader.prototype = new THREE.CTMLoader();
+THREE.CTMLoader.prototype = new THREE.Loader();
 THREE.CTMLoader.prototype.constructor = THREE.CTMLoader;
 
 
+// Load multiple CTM parts defined in JSON
+
+THREE.CTMLoader.prototype.loadParts = function( url, callback, useWorker, useBuffers, basePath ) {
+
+	var scope = this;
+
+	var xhr = new XMLHttpRequest();
+
+	basePath = basePath ? basePath : this.extractUrlbase( url );
+
+	console.log( basePath );
+
+	xhr.onreadystatechange = function() {
+
+		if ( xhr.readyState == 4 ) {
+
+			if ( xhr.status == 200 || xhr.status == 0 ) {
+
+				var jsonObject = JSON.parse( xhr.responseText );
+
+				var geometries = [], materials = [];
+				var partCounter = 0;
+
+				function generateCallback( index ) {
+
+					return function ( geometry ) {
+
+						geometries[ index ] = geometry;
+
+						partCounter += 1;
+
+						if ( partCounter === jsonObject.geometries.length ) {
+
+							callback( geometries, materials );
+
+						}
+
+					}
+
+				}
+
+				// init materials
+
+				for ( var i = 0; i < jsonObject.materials.length; i ++ ) {
+
+					materials[ i ] = THREE.Loader.prototype.createMaterial( jsonObject.materials[ i ], basePath );
+
+				}
+
+				// load individual CTM files
+
+				for ( var i = 0; i < jsonObject.geometries.length; i ++ ) {
+
+					var partUrl = basePath + jsonObject.geometries[ i ];
+					scope.load( partUrl, generateCallback( i ), useWorker, useBuffers );
+
+				}
+
+			}
+
+		}
+
+	}
+
+	xhr.open( "GET", url, true );
+	xhr.overrideMimeType( "text/plain; charset=x-user-defined" );
+	xhr.setRequestHeader( "Content-Type", "text/plain" );
+	xhr.send( null );
+
+};
+
 // Load CTMLoader compressed models
 //  - parameters
 //		- url (required)
@@ -40,7 +113,7 @@ THREE.CTMLoader.prototype.load = function( url, callback, useWorker, useBuffers
 
 				var binaryData = xhr.responseText;
 
-				var s = Date.now();
+				//var s = Date.now();
 
 				if ( useWorker ) {
 
@@ -60,8 +133,8 @@ THREE.CTMLoader.prototype.load = function( url, callback, useWorker, useBuffers
 
 						}
 
-						var e = Date.now();
-						console.log( "CTM data parse time [worker]: " + (e-s) + " ms" );
+						//var e = Date.now();
+						//console.log( "CTM data parse time [worker]: " + (e-s) + " ms" );
 
 					};
 

+ 1 - 1
src/extras/loaders/JSONLoader.js

@@ -29,7 +29,7 @@ THREE.JSONLoader.prototype.load = function ( url, callback, texturePath ) {
 
 	}
 
-	texturePath = texturePath ? texturePath : this.extractUrlbase( url ),
+	texturePath = texturePath ? texturePath : this.extractUrlbase( url );
 
 	this.onLoadStart();
 	this.loadAjaxJSON( this, url, callback, texturePath );

+ 0 - 0
utils/exporters/convert_obj_three.py → utils/exporters/obj/convert_obj_three.py


+ 504 - 0
utils/exporters/obj/split_obj.py

@@ -0,0 +1,504 @@
+"""Split single OBJ model into mutliple OBJ files by materials
+
+-------------------------
+How to use this converter
+-------------------------
+
+python split_obj.py -i infile.obj -o outfile
+
+Will generate:
+
+outfile_000.obj
+outfile_001.obj
+
+...
+
+outfile_XXX.obj
+
+-------------------------------------
+Parser based on format description
+-------------------------------------
+
+    http://en.wikipedia.org/wiki/Obj
+
+------
+Author
+------
+AlteredQualia http://alteredqualia.com
+
+"""
+
+import fileinput
+import operator
+import random
+import os.path
+import getopt
+import sys
+import struct
+import math
+import glob
+
+# #####################################################
+# Configuration
+# #####################################################
+TRUNCATE = False
+SCALE = 1.0
+
+
+# #####################################################
+# Templates
+# #####################################################
+TEMPLATE_OBJ = u"""\
+################################
+# OBJ generated by split_obj.py
+################################
+# Faces:    %(nfaces)d
+# Vertices: %(nvertices)d
+# Normals:  %(nnormals)d
+# UVs:      %(nuvs)d
+################################
+
+# vertices
+
+%(vertices)s
+
+# normals
+
+%(normals)s
+
+# uvs
+
+%(uvs)s
+
+# faces
+
+%(faces)s
+"""
+
+TEMPLATE_VERTEX = "v %f %f %f"
+TEMPLATE_VERTEX_TRUNCATE = "v %d %d %d"
+
+TEMPLATE_NORMAL = "vn %.5g %.5g %.5g"
+TEMPLATE_UV = "vt %.5g %.5g"
+
+TEMPLATE_FACE3_V = "f %d %d %d"
+TEMPLATE_FACE4_V = "f %d %d %d %d"
+
+TEMPLATE_FACE3_VT = "f %d/%d %d/%d %d/%d"
+TEMPLATE_FACE4_VT = "f %d/%d %d/%d %d/%d %d/%d"
+
+TEMPLATE_FACE3_VN = "f %d//%d %d//%d %d//%d"
+TEMPLATE_FACE4_VN = "f %d//%d %d//%d %d//%d %d//%d"
+
+TEMPLATE_FACE3_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d"
+TEMPLATE_FACE4_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d"
+
+
+# #####################################################
+# Utils
+# #####################################################
+def file_exists(filename):
+    """Return true if file exists and is accessible for reading.
+
+    Should be safer than just testing for existence due to links and
+    permissions magic on Unix filesystems.
+
+    @rtype: boolean
+    """
+
+    try:
+        f = open(filename, 'r')
+        f.close()
+        return True
+    except IOError:
+        return False
+
+# #####################################################
+# OBJ parser
+# #####################################################
+def parse_vertex(text):
+    """Parse text chunk specifying single vertex.
+
+    Possible formats:
+        vertex index
+        vertex index / texture index
+        vertex index / texture index / normal index
+        vertex index / / normal index
+    """
+
+    v = 0
+    t = 0
+    n = 0
+
+    chunks = text.split("/")
+
+    v = int(chunks[0])
+    if len(chunks) > 1:
+        if chunks[1]:
+            t = int(chunks[1])
+    if len(chunks) > 2:
+        if chunks[2]:
+            n = int(chunks[2])
+
+    return { 'v': v, 't': t, 'n': n }
+
+def parse_obj(fname):
+    """Parse OBJ file.
+    """
+
+    vertices = []
+    normals = []
+    uvs = []
+
+    faces = []
+
+    materials = {}
+    mcounter = 0
+    mcurrent = 0
+
+    mtllib = ""
+
+    # current face state
+    group = 0
+    object = 0
+    smooth = 0
+
+    for line in fileinput.input(fname):
+        chunks = line.split()
+        if len(chunks) > 0:
+
+            # Vertices as (x,y,z) coordinates
+            # v 0.123 0.234 0.345
+            if chunks[0] == "v" and len(chunks) == 4:
+                x = float(chunks[1])
+                y = float(chunks[2])
+                z = float(chunks[3])
+                vertices.append([x,y,z])
+
+            # Normals in (x,y,z) form; normals might not be unit
+            # vn 0.707 0.000 0.707
+            if chunks[0] == "vn" and len(chunks) == 4:
+                x = float(chunks[1])
+                y = float(chunks[2])
+                z = float(chunks[3])
+                normals.append([x,y,z])
+
+            # Texture coordinates in (u,v[,w]) coordinates, w is optional
+            # vt 0.500 -1.352 [0.234]
+            if chunks[0] == "vt" and len(chunks) >= 3:
+                u = float(chunks[1])
+                v = float(chunks[2])
+                w = 0
+                if len(chunks)>3:
+                    w = float(chunks[3])
+                uvs.append([u,v,w])
+
+            # Face
+            if chunks[0] == "f" and len(chunks) >= 4:
+                vertex_index = []
+                uv_index = []
+                normal_index = []
+
+                for v in chunks[1:]:
+                    vertex = parse_vertex(v)
+                    if vertex['v']:
+                        vertex_index.append(vertex['v'])
+                    if vertex['t']:
+                        uv_index.append(vertex['t'])
+                    if vertex['n']:
+                        normal_index.append(vertex['n'])
+
+                faces.append({
+                    'vertex':vertex_index,
+                    'uv':uv_index,
+                    'normal':normal_index,
+
+                    'material':mcurrent,
+                    'group':group,
+                    'object':object,
+                    'smooth':smooth,
+                    })
+
+            # Group
+            if chunks[0] == "g" and len(chunks) == 2:
+                group = chunks[1]
+
+            # Object
+            if chunks[0] == "o" and len(chunks) == 2:
+                object = chunks[1]
+
+            # Materials definition
+            if chunks[0] == "mtllib" and len(chunks) == 2:
+                mtllib = chunks[1]
+
+            # Material
+            if chunks[0] == "usemtl" and len(chunks) == 2:
+                material = chunks[1]
+                if not material in materials:
+                    mcurrent = mcounter
+                    materials[material] = mcounter
+                    mcounter += 1
+                else:
+                    mcurrent = materials[material]
+
+            # Smooth shading
+            if chunks[0] == "s" and len(chunks) == 2:
+                smooth = chunks[1]
+
+    return faces, vertices, uvs, normals, materials, mtllib
+
+# #############################################################################
+# API - Breaker
+# #############################################################################
+def break_obj(infile, outfile):
+    """Break infile.obj to outfile.obj
+    """
+
+    if not file_exists(infile):
+        print "Couldn't find [%s]" % infile
+        return
+
+    faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
+
+    # sort faces by materials
+
+    chunks = {}
+
+    for face in faces:
+        material = face["material"]
+        if not material in chunks:
+            chunks[material] = {"faces": [], "vertices": set(), "normals": set(), "uvs": set()}
+
+        chunks[material]["faces"].append(face)
+
+    # extract unique vertex / normal / uv indices used per chunk
+
+    for material in chunks:
+        chunk = chunks[material]
+        for face in chunk["faces"]:
+            for i in face["vertex"]:
+                chunk["vertices"].add(i)
+
+            for i in face["normal"]:
+                chunk["normals"].add(i)
+
+            for i in face["uv"]:
+                chunk["uvs"].add(i)
+
+    # generate new OBJs
+
+    for mi, material in enumerate(chunks):
+        chunk = chunks[material]
+
+        # generate separate vertex / normal / uv index lists for each chunk
+        # (including mapping from original to new indices)
+
+        # get well defined order
+
+        new_vertices = list(chunk["vertices"])
+        new_normals = list(chunk["normals"])
+        new_uvs = list(chunk["uvs"])
+
+        # map original => new indices
+
+        vmap = {}
+        for i, v in enumerate(new_vertices):
+            vmap[v] = i + 1
+
+        nmap = {}
+        for i, n in enumerate(new_normals):
+            nmap[n] = i + 1
+
+        tmap = {}
+        for i, t in enumerate(new_uvs):
+            tmap[t] = i + 1
+
+
+        # vertices
+
+        pieces = []
+        for i in new_vertices:
+            vertex = vertices[i-1]
+            txt = TEMPLATE_VERTEX % (vertex[0], vertex[1], vertex[2])
+            pieces.append(txt)
+
+        str_vertices = "\n".join(pieces)
+
+        # normals
+
+        pieces = []
+        for i in new_normals:
+            normal = normals[i-1]
+            txt = TEMPLATE_NORMAL % (normal[0], normal[1], normal[2])
+            pieces.append(txt)
+
+        str_normals = "\n".join(pieces)
+
+        # uvs
+
+        pieces = []
+        for i in new_uvs:
+            uv = uvs[i-1]
+            txt = TEMPLATE_UV % (uv[0], uv[1])
+            pieces.append(txt)
+
+        str_uvs = "\n".join(pieces)
+
+        # faces
+
+        pieces = []
+
+        for face in chunk["faces"]:
+
+            txt = ""
+
+            fv = face["vertex"]
+            fn = face["normal"]
+            ft = face["uv"]
+
+            if len(fv) == 3:
+
+                va = vmap[fv[0]]
+                vb = vmap[fv[1]]
+                vc = vmap[fv[2]]
+
+                if len(fn) == 3 and len(ft) == 3:
+                    na = nmap[fn[0]]
+                    nb = nmap[fn[1]]
+                    nc = nmap[fn[2]]
+
+                    ta = tmap[ft[0]]
+                    tb = tmap[ft[1]]
+                    tc = tmap[ft[2]]
+
+                    txt = TEMPLATE_FACE3_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc)
+
+                elif len(fn) == 3:
+                    na = nmap[fn[0]]
+                    nb = nmap[fn[1]]
+                    nc = nmap[fn[2]]
+
+                    txt = TEMPLATE_FACE3_VN % (va, na, vb, nb, vc, nc)
+
+                elif len(ft) == 3:
+                    ta = tmap[ft[0]]
+                    tb = tmap[ft[1]]
+                    tc = tmap[ft[2]]
+
+                    txt = TEMPLATE_FACE3_VT % (va, ta, vb, tb, vc, tc)
+
+                else:
+                    txt = TEMPLATE_FACE3_V % (va, vb, vc)
+
+            elif len(fv) == 4:
+
+                va = vmap[fv[0]]
+                vb = vmap[fv[1]]
+                vc = vmap[fv[2]]
+                vd = vmap[fv[3]]
+
+                if len(fn) == 4 and len(ft) == 4:
+                    na = nmap[fn[0]]
+                    nb = nmap[fn[1]]
+                    nc = nmap[fn[2]]
+                    nd = nmap[fn[3]]
+
+                    ta = tmap[ft[0]]
+                    tb = tmap[ft[1]]
+                    tc = tmap[ft[2]]
+                    td = tmap[ft[3]]
+
+                    txt = TEMPLATE_FACE4_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc, vd, td, nd)
+
+                elif len(fn) == 4:
+                    na = nmap[fn[0]]
+                    nb = nmap[fn[1]]
+                    nc = nmap[fn[2]]
+                    nd = nmap[fn[3]]
+
+                    txt = TEMPLATE_FACE4_VN % (va, na, vb, nb, vc, nc, vd, nd)
+
+                elif len(ft) == 4:
+                    ta = tmap[ft[0]]
+                    tb = tmap[ft[1]]
+                    tc = tmap[ft[2]]
+                    td = tmap[ft[3]]
+
+                    txt = TEMPLATE_FACE4_VT % (va, ta, vb, tb, vc, tc, vd, td)
+
+                else:
+                    txt = TEMPLATE_FACE4_V % (va, vb, vc, vd)
+
+            pieces.append(txt)
+
+
+        str_faces = "\n".join(pieces)
+
+        # generate OBJ string
+
+        content = TEMPLATE_OBJ % {
+        "nfaces"        : len(chunk["faces"]),
+        "nvertices"     : len(new_vertices),
+        "nnormals"      : len(new_normals),
+        "nuvs"          : len(new_uvs),
+
+        "vertices"      : str_vertices,
+        "normals"       : str_normals,
+        "uvs"           : str_uvs,
+        "faces"         : str_faces
+        }
+
+        # write OBJ file
+
+        outname = "%s_%03d.obj" % (outfile, mi)
+
+        f = open(outname, "w")
+        f.write(content)
+        f.close()
+
+
+# #############################################################################
+# Helpers
+# #############################################################################
+def usage():
+    print "Usage: %s -i filename.obj -o prefix" % os.path.basename(sys.argv[0])
+
+# #####################################################
+# Main
+# #####################################################
+if __name__ == "__main__":
+
+    # get parameters from the command line
+
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "hi:o:x:", ["help", "input=", "output=", "truncatescale="])
+
+    except getopt.GetoptError:
+        usage()
+        sys.exit(2)
+
+    infile = outfile = ""
+
+    for o, a in opts:
+        if o in ("-h", "--help"):
+            usage()
+            sys.exit()
+
+        elif o in ("-i", "--input"):
+            infile = a
+
+        elif o in ("-o", "--output"):
+            outfile = a
+
+        elif o in ("-x", "--truncatescale"):
+            TRUNCATE = True
+            SCALE = float(a)
+
+    if infile == "" or outfile == "":
+        usage()
+        sys.exit(2)
+
+    print "Splitting [%s] into [%s_XXX.obj] ..." % (infile, outfile)
+
+    break_obj(infile, outfile)
+