Browse Source

More GLTF

Panagiotis Christopoulos Charitos 7 years ago
parent
commit
f71ccb860e
3 changed files with 309 additions and 24 deletions
  1. 3 1
      tools/gltf_exporter/CMakeLists.txt
  2. 302 23
      tools/gltf_exporter/Exporter.cpp
  3. 4 0
      tools/gltf_exporter/Exporter.h

+ 3 - 1
tools/gltf_exporter/CMakeLists.txt

@@ -2,6 +2,8 @@ include_directories("../../src")
 
 add_definitions("-fexceptions")
 
-add_executable(gltf_exporter Main.cpp Exporter.cpp)
+file(GLOB_RECURSE SOURCES *.cpp)
+
+add_executable(gltf_exporter ${SOURCES})
 target_link_libraries(gltf_exporter anki)
 installExecutable(gltf_exporter)

+ 302 - 23
tools/gltf_exporter/Exporter.cpp

@@ -4,23 +4,19 @@
 // http://www.anki3d.org/LICENSE
 
 #include "Exporter.h"
-
-#if defined(__GNUC__)
-#	pragma GCC diagnostic push
-#	pragma GCC diagnostic ignored "-Wmisleading-indentation"
-#	pragma GCC diagnostic ignored "-Wshift-negative-value"
-#endif
-#define TINYGLTF_IMPLEMENTATION
-#define STB_IMAGE_IMPLEMENTATION
-#define STB_IMAGE_WRITE_IMPLEMENTATION
-#include <tinygltf/tiny_gltf.h>
-#if defined(__GNUC__)
-#	pragma GCC diagnostic pop
-#endif
+#include <anki/resource/MeshLoader.h>
+#include <anki/util/File.h>
 
 namespace anki
 {
 
+class WeightVertex
+{
+public:
+	U16 m_boneIndices[4] = {0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF};
+	U8 m_weights[4] = {0, 0, 0, 0};
+};
+
 Error Exporter::load()
 {
 	std::string err, warn;
@@ -237,7 +233,7 @@ Error Exporter::exportMesh(const tinygltf::Mesh& mesh)
 
 	// Get positions
 	DynamicArrayAuto<Vec3> positions(m_alloc);
-	Vec3 minPos, maxPos;
+	Vec3 aabbMin(MAX_F32), aabbMax(MIN_F32);
 	U32 vertCount;
 	{
 		EXPORT_ASSERT(primitive.attributes.find("POSITION") != primitive.attributes.end());
@@ -253,13 +249,18 @@ Error Exporter::exportMesh(const tinygltf::Mesh& mesh)
 		{
 			positions[v] = Vec3(reinterpret_cast<const F32*>(&posBuff[v * posStride]));
 
-			// TODO minPos, maxPos
+			aabbMin = aabbMin.min(positions[v]);
+			aabbMax = aabbMax.max(positions[v]);
 		}
+
+		aabbMax += EPSILON * 10.0f; // Bump it a bit
 	}
 
 	// Get normals and UVs
 	DynamicArrayAuto<Vec3> normals(m_alloc);
 	DynamicArrayAuto<Vec2> uvs(m_alloc);
+	F32 maxUvDistance = MIN_F32;
+	F32 minUvDistance = MAX_F32;
 	{
 		EXPORT_ASSERT(primitive.attributes.find("NORMAL") != primitive.attributes.end());
 		EXPORT_ASSERT(primitive.attributes.find("TEXCOORD_0") != primitive.attributes.end());
@@ -283,6 +284,9 @@ Error Exporter::exportMesh(const tinygltf::Mesh& mesh)
 		{
 			normals[v] = Vec3(reinterpret_cast<const F32*>(&normalBuff[v * normalStride]));
 			uvs[v] = Vec2(reinterpret_cast<const F32*>(&uvBuff[v * uvStride]));
+
+			maxUvDistance = max(maxUvDistance, max(uvs[v].x(), uvs[v].y()));
+			minUvDistance = min(minUvDistance, min(uvs[v].x(), uvs[v].y()));
 		}
 	}
 
@@ -373,8 +377,7 @@ Error Exporter::exportMesh(const tinygltf::Mesh& mesh)
 	}
 
 	// Load bone info
-	DynamicArrayAuto<UVec4> boneIds(m_alloc);
-	DynamicArrayAuto<Vec4> weights(m_alloc);
+	DynamicArrayAuto<WeightVertex> weights(m_alloc);
 
 	if(primitive.attributes.find("JOINTS_0") != primitive.attributes.end()
 		&& primitive.attributes.find("WEIGHTS_0") != primitive.attributes.end())
@@ -391,23 +394,289 @@ Error Exporter::exportMesh(const tinygltf::Mesh& mesh)
 		getAttributeInfo(primitive, "WEIGHTS_0", weightsBuff, weightsStride, count, fmt);
 		EXPORT_ASSERT(count == vertCount && fmt == Format::R32G32B32A32_SFLOAT);
 
-		boneIds.create(vertCount);
 		weights.create(vertCount);
 
 		for(U v = 0; v < vertCount; ++v)
 		{
-			boneIds[v] = UVec4(*reinterpret_cast<const U16*>(&bonesBuff[v * bonesStride]),
-				*reinterpret_cast<const U16*>(&bonesBuff[v * bonesStride + sizeof(U16)]),
-				*reinterpret_cast<const U16*>(&bonesBuff[v * bonesStride + sizeof(U16) * 2]),
-				*reinterpret_cast<const U16*>(&bonesBuff[v * bonesStride + sizeof(U16) * 3]));
+			WeightVertex w;
+
+			const U16* inIdxs = reinterpret_cast<const U16*>(&bonesBuff[v * bonesStride]);
+			w.m_boneIndices[0] = inIdxs[0];
+			w.m_boneIndices[1] = inIdxs[1];
+			w.m_boneIndices[2] = inIdxs[2];
+			w.m_boneIndices[3] = inIdxs[3];
+
+			const F32* inW = reinterpret_cast<const F32*>(&weights[v * weightsStride]);
+			w.m_weights[0] = inW[0] * 0xFF;
+			w.m_weights[1] = inW[1] * 0xFF;
+			w.m_weights[2] = inW[2] * 0xFF;
+			w.m_weights[3] = inW[3] * 0xFF;
+		}
+	}
+
+	// Find if it's a convex shape
+	Bool convex = true;
+	for(U i = 0; i < indices.getSize(); i += 3)
+	{
+		const U i0 = indices[i + 0];
+		const U i1 = indices[i + 1];
+		const U i2 = indices[i + 2];
+
+		const Vec3& v0 = positions[i0];
+		const Vec3& v1 = positions[i1];
+		const Vec3& v2 = positions[i2];
+
+		// Check that all positions are behind the plane
+		Plane plane(v0.xyz0(), v1.xyz0(), v2.xyz0());
+
+		for(U j = 0; j < positions.getSize(); ++j)
+		{
+			const Vec3& pos = positions[j];
+
+			F32 test = plane.test(pos.xyz0());
+			if(test > EPSILON)
+			{
+				convex = false;
+				break;
+			}
+		}
+
+		if(convex == false)
+		{
+			break;
+		}
+	}
+
+	// Chose the formats of the attributes
+	MeshBinaryFile::Header header = {};
+	{
+		// Positions
+		Vec3 dist3d = aabbMin.getAbs().max(aabbMax.getAbs());
+		const F32 maxPositionDistance = max(max(dist3d.x(), dist3d.y()), dist3d.z());
+		auto& posa = header.m_vertexAttributes[VertexAttributeLocation::POSITION];
+		posa.m_bufferBinding = 0;
+		posa.m_format = (maxPositionDistance < 2.0) ? Format::R16G16B16A16_SFLOAT : Format::R32G32B32_SFLOAT;
+		posa.m_relativeOffset = 0;
+		posa.m_scale = 1.0;
+
+		// Normals
+		auto& na = header.m_vertexAttributes[VertexAttributeLocation::NORMAL];
+		na.m_bufferBinding = 1;
+		na.m_format = Format::A2B10G10R10_SNORM_PACK32;
+		na.m_relativeOffset = 0;
+		na.m_scale = 1.0;
+
+		// Tangents
+		auto& ta = header.m_vertexAttributes[VertexAttributeLocation::TANGENT];
+		ta.m_bufferBinding = 1;
+		ta.m_format = Format::A2B10G10R10_SNORM_PACK32;
+		ta.m_relativeOffset = sizeof(U32);
+		ta.m_scale = 1.0;
+
+		// UVs
+		auto& uva = header.m_vertexAttributes[VertexAttributeLocation::UV];
+		uva.m_bufferBinding = 1;
+		if(minUvDistance >= 0.0 && maxUvDistance <= 1.0)
+		{
+			uva.m_format = Format::R16G16_UNORM;
+		}
+		else
+		{
+			uva.m_format = Format::R16G16_SFLOAT;
+		}
+		uva.m_relativeOffset = sizeof(U32) * 2;
+		uva.m_scale = 1.0;
+
+		// Bone weight
+		if(weights.getSize())
+		{
+			auto& bidxa = header.m_vertexAttributes[VertexAttributeLocation::BONE_INDICES];
+			bidxa.m_bufferBinding = 2;
+			bidxa.m_format = Format::R16G16B16A16_UINT;
+			bidxa.m_relativeOffset = 0;
+			bidxa.m_scale = 1.0;
+
+			auto& wa = header.m_vertexAttributes[VertexAttributeLocation::BONE_WEIGHTS];
+			wa.m_bufferBinding = 2;
+			wa.m_format = Format::R8G8B8A8_UNORM;
+			wa.m_relativeOffset = sizeof(U16) * 4;
+			wa.m_scale = 1.0;
+		}
+	}
+
+	// Arange the attributes into vert buffers
+	{
+		header.m_vertexBufferCount = 2;
+
+		// First buff has positions
+		const auto& posa = header.m_vertexAttributes[VertexAttributeLocation::POSITION];
+		if(posa.m_format == Format::R32G32B32_SFLOAT)
+		{
+			header.m_vertexBuffers[0].m_vertexStride = sizeof(F32) * 3;
+		}
+		else if(posa.m_format == Format::R16G16B16A16_SFLOAT)
+		{
+			header.m_vertexBuffers[0].m_vertexStride = sizeof(U16) * 4;
+		}
+		else
+		{
+			ANKI_ASSERT(0);
+		}
+
+		// 2nd buff has normal + tangent + texcoords
+		header.m_vertexBuffers[1].m_vertexStride = sizeof(U32) * 2 + sizeof(U16) * 2;
+
+		// 3rd has bone weights
+		if(weights.getSize())
+		{
+			header.m_vertexBuffers[2].m_vertexStride = sizeof(WeightVertex);
+			++header.m_vertexBufferCount;
+		}
+	}
+
+	// Write some other header stuff
+	{
+		memcpy(&header.m_magic[0], MeshBinaryFile::MAGIC, 8);
+		header.m_flags = MeshBinaryFile::Flag::NONE;
+		if(convex)
+		{
+			header.m_flags |= MeshBinaryFile::Flag::CONVEX;
+		}
+		header.m_indexType = IndexType::U16;
+		header.m_totalIndexCount = indices.getSize();
+		header.m_totalVertexCount = vertCount;
+		header.m_subMeshCount = 1;
+		header.m_aabbMin = aabbMin;
+		header.m_aabbMax = aabbMax;
+	}
+
+	// Open file
+	File file;
+	ANKI_CHECK(file.open(
+		StringAuto(m_alloc).sprintf("%s/%s.ankimesh", m_outputDirectory.cstr(), mesh.name.c_str()).toCString(),
+		FileOpenFlag::WRITE | FileOpenFlag::BINARY));
+
+	// Write header
+	ANKI_CHECK(file.write(&header, sizeof(header)));
+
+	// Write sub meshes
+	{
+		MeshBinaryFile::SubMesh smesh;
+		smesh.m_firstIndex = 0;
+		smesh.m_indexCount = header.m_totalIndexCount;
+		smesh.m_aabbMin = aabbMin;
+		smesh.m_aabbMax = aabbMax;
+
+		ANKI_CHECK(file.write(&smesh, sizeof(smesh)));
+	}
+
+	// Write indices
+	ANKI_CHECK(file.write(&indices[0], indices.getSizeInBytes()));
+
+	// Write first vert buffer
+	{
+		const auto& posa = header.m_vertexAttributes[VertexAttributeLocation::POSITION];
+		if(posa.m_format == Format::R32G32B32_SFLOAT)
+		{
+			ANKI_CHECK(file.write(&positions[0], positions.getSizeInBytes()));
+		}
+		else if(posa.m_format == Format::R16G16B16A16_SFLOAT)
+		{
+			DynamicArrayAuto<F16> pos16(m_alloc);
+			pos16.create(vertCount * 4);
+
+			const Vec3* p32 = &positions[0];
+			const Vec3* p32end = p32 + positions.getSize();
+			F16* p16 = &pos16[0];
+			while(p32 != p32end)
+			{
+				p16[0] = F16(p32->x());
+				p16[1] = F16(p32->y());
+				p16[2] = F16(p32->z());
+				p16[3] = F16(0.0f);
 
-			weights[v] = Vec4(reinterpret_cast<const F32*>(&weights[v * weightsStride]));
+				p32 += 1;
+				p16 += 4;
+			}
+
+			ANKI_CHECK(file.write(&pos16[0], pos16.getSizeInBytes()));
+		}
+		else
+		{
+			ANKI_ASSERT(0);
+		}
+	}
+
+	// Write 2nd vert buffer
+	{
+		struct Vert
+		{
+			U32 m_n;
+			U32 m_t;
+			U16 m_uv[2];
+		};
+
+		DynamicArrayAuto<Vert> verts(m_alloc);
+		verts.create(vertCount);
+
+		for(U i = 0; i < vertCount; ++i)
+		{
+			const Vec3& normal = normals[i];
+			const Vec4& tangent = tangents[i];
+			const Vec2& uv = uvs[i];
+
+			verts[i].m_n = packColorToR10G10B10A2SNorm(normal.x(), normal.y(), normal.z(), 0.0f);
+			verts[i].m_t = packColorToR10G10B10A2SNorm(tangent.x(), tangent.y(), tangent.z(), tangent.w());
+
+			const Format uvfmt = header.m_vertexAttributes[VertexAttributeLocation::UV].m_format;
+			if(uvfmt == Format::R16G16_UNORM)
+			{
+				assert(uv[0] <= 1.0 && uv[0] >= 0.0 && uv[1] <= 1.0 && uv[1] >= 0.0);
+				verts[i].m_uv[0] = uv[0] * 0xFFFF;
+				verts[i].m_uv[1] = uv[1] * 0xFFFF;
+			}
+			else if(uvfmt == Format::R16G16_SFLOAT)
+			{
+				verts[i].m_uv[0] = F16(uv[0]).toU16();
+				verts[i].m_uv[1] = F16(uv[1]).toU16();
+			}
+			else
+			{
+				ANKI_ASSERT(0);
+			}
 		}
+
+		ANKI_CHECK(file.write(&verts[0], verts.getSizeInBytes()));
+	}
+
+	// Write 3rd vert buffer
+	if(weights.getSize())
+	{
+		ANKI_CHECK(file.write(&weights[0], weights.getSizeInBytes()));
 	}
 
 	return Error::NONE;
 }
 
+void Exporter::getTexture(tinygltf::Material& mtl, CString texName, CString& fname) const
+{
+	if(mtl.values.find(texName.cstr()) != mtl.values.end())
+	{
+		const I textureIdx = mtl.values["baseColorTexture"].TextureIndex();
+		const I imageIdx = m_model.textures[textureIdx].source;
+		fname = m_model.images[imageIdx].uri.c_str();
+	}
+	else
+	{
+		fname = CString();
+	}
+}
+
+Error Exporter::exportMaterial(const tinygltf::Material& mtl)
+{
+	return Error::NONE;
+}
+
 Error Exporter::exportAll()
 {
 	for(const tinygltf::Mesh& mesh : m_model.meshes)
@@ -420,6 +689,16 @@ Error Exporter::exportAll()
 		}
 	}
 
+	for(const tinygltf::Material& mtl : m_model.materials)
+	{
+		const Error err = exportMaterial(mtl);
+		if(err)
+		{
+			ANKI_LOGE("Failed to load material %s", mtl.name.c_str());
+			return err;
+		}
+	}
+
 	return Error::NONE;
 }
 

+ 4 - 0
tools/gltf_exporter/Exporter.h

@@ -40,6 +40,10 @@ private:
 		U32& stride,
 		U32& count,
 		Format& fmt) const;
+
+	Error exportMaterial(const tinygltf::Material& mtl);
+
+	void getTexture(tinygltf::Material& mtl, CString texName, CString& fname) const;
 };
 
 #define EXPORT_ASSERT(expr) \