fbx_mesh_data.cpp 57 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461
  1. /*************************************************************************/
  2. /* fbx_mesh_data.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "fbx_mesh_data.h"
  31. #include "core/templates/local_vector.h"
  32. #include "scene/resources/mesh.h"
  33. #include "scene/resources/surface_tool.h"
  34. #include "thirdparty/misc/triangulator.h"
  35. template <class T>
  36. T collect_first(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  37. if (p_data->empty()) {
  38. return p_fall_back;
  39. }
  40. return (*p_data)[0].data;
  41. }
  42. template <class T>
  43. HashMap<int, T> collect_all(const Vector<VertexData<T>> *p_data, HashMap<int, T> p_fall_back) {
  44. if (p_data->empty()) {
  45. return p_fall_back;
  46. }
  47. HashMap<int, T> collection;
  48. for (int i = 0; i < p_data->size(); i += 1) {
  49. const VertexData<T> &vd = (*p_data)[i];
  50. collection[vd.polygon_index] = vd.data;
  51. }
  52. return collection;
  53. }
  54. template <class T>
  55. T collect_average(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  56. if (p_data->empty()) {
  57. return p_fall_back;
  58. }
  59. T combined = (*p_data)[0].data; // Make sure the data is always correctly initialized.
  60. print_verbose("size of data: " + itos(p_data->size()));
  61. for (int i = 1; i < p_data->size(); i += 1) {
  62. combined += (*p_data)[i].data;
  63. }
  64. combined = combined / real_t(p_data->size());
  65. return combined.normalized();
  66. }
  67. HashMap<int, Vector3> collect_normal(const Vector<VertexData<Vector3>> *p_data, HashMap<int, Vector3> p_fall_back) {
  68. if (p_data->empty()) {
  69. return p_fall_back;
  70. }
  71. HashMap<int, Vector3> collection;
  72. for (int i = 0; i < p_data->size(); i += 1) {
  73. const VertexData<Vector3> &vd = (*p_data)[i];
  74. collection[vd.polygon_index] = vd.data;
  75. }
  76. return collection;
  77. }
  78. HashMap<int, Vector2> collect_uv(const Vector<VertexData<Vector2>> *p_data, HashMap<int, Vector2> p_fall_back) {
  79. if (p_data->empty()) {
  80. return p_fall_back;
  81. }
  82. HashMap<int, Vector2> collection;
  83. for (int i = 0; i < p_data->size(); i += 1) {
  84. const VertexData<Vector2> &vd = (*p_data)[i];
  85. collection[vd.polygon_index] = vd.data;
  86. }
  87. return collection;
  88. }
  89. typedef int Vertex;
  90. typedef int SurfaceId;
  91. typedef int PolygonId;
  92. typedef int DataIndex;
  93. struct SurfaceData {
  94. Ref<SurfaceTool> surface_tool;
  95. OrderedHashMap<Vertex, int> lookup_table; // proposed fix is to replace lookup_table[vertex_id] to give the position of the vertices_map[int] index.
  96. LocalVector<Vertex> vertices_map; // this must be ordered the same as insertion <-- slow to do find() operation.
  97. Ref<Material> material;
  98. HashMap<PolygonId, Vector<DataIndex>> surface_polygon_vertex;
  99. Array morphs;
  100. };
  101. EditorSceneImporterMeshNode *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
  102. mesh_geometry = p_mesh_geometry;
  103. // todo: make this just use a uint64_t FBX ID this is a copy of our original materials unfortunately.
  104. const std::vector<const FBXDocParser::Material *> &material_lookup = model->GetMaterials();
  105. // TODO: perf hotspot on large files
  106. // this can be a very large copy
  107. std::vector<int> polygon_indices = mesh_geometry->get_polygon_indices();
  108. std::vector<Vector3> vertices = mesh_geometry->get_vertices();
  109. // Phase 1. Parse all FBX data.
  110. HashMap<int, Vector3> normals;
  111. HashMap<int, HashMap<int, Vector3>> normals_raw = extract_per_vertex_data(
  112. vertices.size(),
  113. mesh_geometry->get_edge_map(),
  114. polygon_indices,
  115. mesh_geometry->get_normals(),
  116. &collect_all,
  117. HashMap<int, Vector3>());
  118. // List<int> keys;
  119. // normals.get_key_list(&keys);
  120. //
  121. // const std::vector<Assimp::FBX::MeshGeometry::Edge>& edges = mesh_geometry->get_edge_map();
  122. // for (int index = 0; index < keys.size(); index++) {
  123. // const int key = keys[index];
  124. // const int v1 = edges[key].vertex_0;
  125. // const int v2 = edges[key].vertex_1;
  126. // const Vector3& n1 = normals.get(v1);
  127. // const Vector3& n2 = normals.get(v2);
  128. // print_verbose("[" + itos(v1) + "] n1: " + n1 + "\n[" + itos(v2) + "] n2: " + n2);
  129. // //print_verbose("[" + itos(key) + "] n1: " + n1 + ", n2: " + n2) ;
  130. // //print_verbose("vindex: " + itos(edges[key].vertex_0) + ", vindex2: " + itos(edges[key].vertex_1));
  131. // //Vector3 ver1 = vertices[edges[key].vertex_0];
  132. // //Vector3 ver2 = vertices[edges[key].vertex_1];
  133. // /*real_t angle1 = Math::rad2deg(n1.angle_to(n2));
  134. // real_t angle2 = Math::rad2deg(n2.angle_to(n1));
  135. // print_verbose("angle of normals: " + rtos(angle1) + " angle 2" + rtos(angle2));*/
  136. // }
  137. HashMap<int, Vector2> uvs_0;
  138. HashMap<int, HashMap<int, Vector2>> uvs_0_raw = extract_per_vertex_data(
  139. vertices.size(),
  140. mesh_geometry->get_edge_map(),
  141. polygon_indices,
  142. mesh_geometry->get_uv_0(),
  143. &collect_all,
  144. HashMap<int, Vector2>());
  145. HashMap<int, Vector2> uvs_1;
  146. HashMap<int, HashMap<int, Vector2>> uvs_1_raw = extract_per_vertex_data(
  147. vertices.size(),
  148. mesh_geometry->get_edge_map(),
  149. polygon_indices,
  150. mesh_geometry->get_uv_1(),
  151. &collect_all,
  152. HashMap<int, Vector2>());
  153. HashMap<int, Color> colors;
  154. HashMap<int, HashMap<int, Color>> colors_raw = extract_per_vertex_data(
  155. vertices.size(),
  156. mesh_geometry->get_edge_map(),
  157. polygon_indices,
  158. mesh_geometry->get_colors(),
  159. &collect_all,
  160. HashMap<int, Color>());
  161. // TODO what about tangents?
  162. // TODO what about bi-nomials?
  163. // TODO there is other?
  164. HashMap<int, SurfaceId> polygon_surfaces = extract_per_polygon(
  165. vertices.size(),
  166. polygon_indices,
  167. mesh_geometry->get_material_allocation_id(),
  168. -1);
  169. HashMap<String, MorphVertexData> morphs;
  170. extract_morphs(mesh_geometry, morphs);
  171. // TODO please add skinning.
  172. //mesh_id = mesh_geometry->ID();
  173. sanitize_vertex_weights(state);
  174. // Re organize polygon vertices to to correctly take into account strange
  175. // UVs.
  176. reorganize_vertices(
  177. polygon_indices,
  178. vertices,
  179. normals,
  180. uvs_0,
  181. uvs_1,
  182. colors,
  183. morphs,
  184. normals_raw,
  185. colors_raw,
  186. uvs_0_raw,
  187. uvs_1_raw);
  188. const int color_count = colors.size();
  189. print_verbose("Vertex color count: " + itos(color_count));
  190. // Make sure that from this moment on the mesh_geometry is no used anymore.
  191. // This is a safety step, because the mesh_geometry data are no more valid
  192. // at this point.
  193. const int vertex_count = vertices.size();
  194. print_verbose("Vertex count: " + itos(vertex_count));
  195. // The map key is the material allocator id that is also used as surface id.
  196. HashMap<SurfaceId, SurfaceData> surfaces;
  197. // Phase 2. For each material create a surface tool (So a different mesh).
  198. {
  199. if (polygon_surfaces.empty()) {
  200. // No material, just use the default one with index -1.
  201. // Set -1 to all polygons.
  202. const int polygon_count = count_polygons(polygon_indices);
  203. for (int p = 0; p < polygon_count; p += 1) {
  204. polygon_surfaces[p] = -1;
  205. }
  206. }
  207. // Create the surface now.
  208. for (const int *polygon_id = polygon_surfaces.next(nullptr); polygon_id != nullptr; polygon_id = polygon_surfaces.next(polygon_id)) {
  209. const int surface_id = polygon_surfaces[*polygon_id];
  210. if (surfaces.has(surface_id) == false) {
  211. SurfaceData sd;
  212. sd.surface_tool.instance();
  213. sd.surface_tool->begin(Mesh::PRIMITIVE_TRIANGLES);
  214. if (surface_id < 0) {
  215. // nothing to do
  216. } else if (surface_id < (int)material_lookup.size()) {
  217. const FBXDocParser::Material *mat_mapping = material_lookup.at(surface_id);
  218. const uint64_t mapping_id = mat_mapping->ID();
  219. if (state.cached_materials.has(mapping_id)) {
  220. sd.material = state.cached_materials[mapping_id];
  221. }
  222. } else {
  223. WARN_PRINT("out of bounds surface detected, FBX file has corrupt material data");
  224. }
  225. surfaces.set(surface_id, sd);
  226. }
  227. }
  228. }
  229. // Phase 3. Map the vertices relative to each surface, in this way we can
  230. // just insert the vertices that we need per each surface.
  231. {
  232. PolygonId polygon_index = -1;
  233. SurfaceId surface_id = -1;
  234. SurfaceData *surface_data = nullptr;
  235. for (size_t polygon_vertex = 0; polygon_vertex < polygon_indices.size(); polygon_vertex += 1) {
  236. if (is_start_of_polygon(polygon_indices, polygon_vertex)) {
  237. polygon_index += 1;
  238. ERR_FAIL_COND_V_MSG(polygon_surfaces.has(polygon_index) == false, nullptr, "The FBX file is corrupted, This surface_index is not expected.");
  239. surface_id = polygon_surfaces[polygon_index];
  240. surface_data = surfaces.getptr(surface_id);
  241. CRASH_COND(surface_data == nullptr); // Can't be null.
  242. }
  243. const int vertex = get_vertex_from_polygon_vertex(polygon_indices, polygon_vertex);
  244. // The vertex position in the surface
  245. // Uses a lookup table for speed with large scenes
  246. bool has_polygon_vertex_index = surface_data->lookup_table.has(vertex);
  247. int surface_polygon_vertex_index = -1;
  248. if (has_polygon_vertex_index) {
  249. surface_polygon_vertex_index = surface_data->lookup_table[vertex];
  250. } else {
  251. surface_polygon_vertex_index = surface_data->vertices_map.size();
  252. surface_data->lookup_table[vertex] = surface_polygon_vertex_index;
  253. surface_data->vertices_map.push_back(vertex);
  254. }
  255. surface_data->surface_polygon_vertex[polygon_index].push_back(surface_polygon_vertex_index);
  256. }
  257. }
  258. //print_verbose("[debug UV 1] UV1: " + itos(uvs_0.size()));
  259. //print_verbose("[debug UV 2] UV2: " + itos(uvs_1.size()));
  260. // Phase 4. Per each surface just insert the vertices and add the indices.
  261. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  262. SurfaceData *surface = surfaces.getptr(*surface_id);
  263. // Just add the vertices data.
  264. for (unsigned int i = 0; i < surface->vertices_map.size(); i += 1) {
  265. const Vertex vertex = surface->vertices_map[i];
  266. // This must be done before add_vertex because the surface tool is
  267. // expecting this before the st->add_vertex() call
  268. add_vertex(state,
  269. surface->surface_tool,
  270. state.scale,
  271. vertex,
  272. vertices,
  273. normals,
  274. uvs_0,
  275. uvs_1,
  276. colors);
  277. }
  278. // Triangulate the various polygons and add the indices.
  279. for (const PolygonId *polygon_id = surface->surface_polygon_vertex.next(nullptr); polygon_id != nullptr; polygon_id = surface->surface_polygon_vertex.next(polygon_id)) {
  280. const Vector<DataIndex> *indices = surface->surface_polygon_vertex.getptr(*polygon_id);
  281. triangulate_polygon(
  282. surface->surface_tool,
  283. *indices,
  284. surface->vertices_map,
  285. vertices);
  286. }
  287. }
  288. // Phase 5. Compose the morphs if any.
  289. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  290. SurfaceData *surface = surfaces.getptr(*surface_id);
  291. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  292. MorphVertexData *morph_data = morphs.getptr(*morph_name);
  293. // As said by the docs, this is not supposed to be different than
  294. // vertex_count.
  295. CRASH_COND(morph_data->vertices.size() != vertex_count);
  296. CRASH_COND(morph_data->normals.size() != vertex_count);
  297. Vector3 *vertices_ptr = morph_data->vertices.ptrw();
  298. Vector3 *normals_ptr = morph_data->normals.ptrw();
  299. Ref<SurfaceTool> morph_st;
  300. morph_st.instance();
  301. morph_st->begin(Mesh::PRIMITIVE_TRIANGLES);
  302. for (unsigned int vi = 0; vi < surface->vertices_map.size(); vi += 1) {
  303. const Vertex vertex = surface->vertices_map[vi];
  304. add_vertex(
  305. state,
  306. morph_st,
  307. state.scale,
  308. vertex,
  309. vertices,
  310. normals,
  311. uvs_0,
  312. uvs_1,
  313. colors,
  314. vertices_ptr[vertex],
  315. normals_ptr[vertex]);
  316. }
  317. morph_st->generate_tangents();
  318. surface->morphs.push_back(morph_st->commit_to_arrays());
  319. }
  320. }
  321. // Phase 6. Compose the mesh and return it.
  322. Ref<EditorSceneImporterMesh> mesh;
  323. mesh.instance();
  324. // Add blend shape info.
  325. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  326. mesh->add_blend_shape(*morph_name);
  327. }
  328. // TODO always normalized, Why?
  329. mesh->set_blend_shape_mode(Mesh::BLEND_SHAPE_MODE_NORMALIZED);
  330. // Add surfaces.
  331. int in_mesh_surface_id = 0;
  332. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  333. SurfaceData *surface = surfaces.getptr(*surface_id);
  334. // you can't generate them without a valid uv map.
  335. if (uvs_0_raw.size() > 0) {
  336. surface->surface_tool->generate_tangents();
  337. }
  338. Array mesh_array = surface->surface_tool->commit_to_arrays();
  339. Array blend_shapes = surface->morphs;
  340. if (surface->material.is_valid()) {
  341. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes, Dictionary(), surface->material, surface->material->get_name());
  342. } else {
  343. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes);
  344. }
  345. in_mesh_surface_id += 1;
  346. }
  347. EditorSceneImporterMeshNode *godot_mesh = memnew(EditorSceneImporterMeshNode);
  348. godot_mesh->set_mesh(mesh);
  349. return godot_mesh;
  350. }
  351. void FBXMeshData::sanitize_vertex_weights(const ImportState &state) {
  352. const int max_vertex_influence_count = RS::ARRAY_WEIGHTS_SIZE;
  353. Map<int, int> skeleton_to_skin_bind_id;
  354. // TODO: error's need added
  355. const FBXDocParser::Skin *fbx_skin = mesh_geometry->DeformerSkin();
  356. if (fbx_skin == nullptr || fbx_skin->Clusters().size() == 0) {
  357. return; // do nothing
  358. }
  359. //
  360. // Precalculate the skin cluster mapping
  361. //
  362. int bind_id = 0;
  363. for (const FBXDocParser::Cluster *cluster : fbx_skin->Clusters()) {
  364. Ref<FBXBone> bone = state.fbx_bone_map[cluster->TargetNode()->ID()];
  365. skeleton_to_skin_bind_id.insert(bone->godot_bone_id, bind_id);
  366. bind_id++;
  367. }
  368. for (const Vertex *v = vertex_weights.next(nullptr); v != nullptr; v = vertex_weights.next(v)) {
  369. VertexWeightMapping *vm = vertex_weights.getptr(*v);
  370. ERR_CONTINUE(vm->bones.size() != vm->weights.size()); // No message, already checked.
  371. ERR_CONTINUE(vm->bones_ref.size() != vm->weights.size()); // No message, already checked.
  372. const int initial_size = vm->weights.size();
  373. {
  374. // Init bone id
  375. int *bones_ptr = vm->bones.ptrw();
  376. Ref<FBXBone> *bones_ref_ptr = vm->bones_ref.ptrw();
  377. for (int i = 0; i < vm->weights.size(); i += 1) {
  378. // At this point this is not possible because the skeleton is already initialized.
  379. CRASH_COND(bones_ref_ptr[i]->godot_bone_id == -2);
  380. bones_ptr[i] = skeleton_to_skin_bind_id[bones_ref_ptr[i]->godot_bone_id];
  381. }
  382. // From this point on the data is no more valid.
  383. vm->bones_ref.clear();
  384. }
  385. {
  386. // Sort
  387. real_t *weights_ptr = vm->weights.ptrw();
  388. int *bones_ptr = vm->bones.ptrw();
  389. for (int i = 0; i < vm->weights.size(); i += 1) {
  390. for (int x = i + 1; x < vm->weights.size(); x += 1) {
  391. if (weights_ptr[i] < weights_ptr[x]) {
  392. SWAP(weights_ptr[i], weights_ptr[x]);
  393. SWAP(bones_ptr[i], bones_ptr[x]);
  394. }
  395. }
  396. }
  397. }
  398. {
  399. // Resize
  400. vm->weights.resize(max_vertex_influence_count);
  401. vm->bones.resize(max_vertex_influence_count);
  402. real_t *weights_ptr = vm->weights.ptrw();
  403. int *bones_ptr = vm->bones.ptrw();
  404. for (int i = initial_size; i < max_vertex_influence_count; i += 1) {
  405. weights_ptr[i] = 0.0;
  406. bones_ptr[i] = 0;
  407. }
  408. // Normalize
  409. real_t sum = 0.0;
  410. for (int i = 0; i < max_vertex_influence_count; i += 1) {
  411. sum += weights_ptr[i];
  412. }
  413. if (sum > 0.0) {
  414. for (int i = 0; i < vm->weights.size(); i += 1) {
  415. weights_ptr[i] = weights_ptr[i] / sum;
  416. }
  417. }
  418. }
  419. }
  420. }
  421. void FBXMeshData::reorganize_vertices(
  422. // TODO: perf hotspot on insane files
  423. std::vector<int> &r_polygon_indices,
  424. std::vector<Vector3> &r_vertices,
  425. HashMap<int, Vector3> &r_normals,
  426. HashMap<int, Vector2> &r_uv_1,
  427. HashMap<int, Vector2> &r_uv_2,
  428. HashMap<int, Color> &r_color,
  429. HashMap<String, MorphVertexData> &r_morphs,
  430. HashMap<int, HashMap<int, Vector3>> &r_normals_raw,
  431. HashMap<int, HashMap<int, Color>> &r_colors_raw,
  432. HashMap<int, HashMap<int, Vector2>> &r_uv_1_raw,
  433. HashMap<int, HashMap<int, Vector2>> &r_uv_2_raw) {
  434. // Key: OldVertex; Value: [New vertices];
  435. HashMap<int, Vector<int>> duplicated_vertices;
  436. PolygonId polygon_index = -1;
  437. for (int pv = 0; pv < (int)r_polygon_indices.size(); pv += 1) {
  438. if (is_start_of_polygon(r_polygon_indices, pv)) {
  439. polygon_index += 1;
  440. }
  441. const Vertex index = get_vertex_from_polygon_vertex(r_polygon_indices, pv);
  442. bool need_duplication = false;
  443. Vector2 this_vert_poly_uv1 = Vector2();
  444. Vector2 this_vert_poly_uv2 = Vector2();
  445. Vector3 this_vert_poly_normal = Vector3();
  446. Color this_vert_poly_color = Color();
  447. // Take the normal and see if we need to duplicate this polygon.
  448. if (r_normals_raw.has(index)) {
  449. const HashMap<PolygonId, Vector3> *nrml_arr = r_normals_raw.getptr(index);
  450. if (nrml_arr->has(polygon_index)) {
  451. this_vert_poly_normal = nrml_arr->get(polygon_index);
  452. } else if (nrml_arr->has(-1)) {
  453. this_vert_poly_normal = nrml_arr->get(-1);
  454. } else {
  455. print_error("invalid normal detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  456. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  457. print_verbose("debug contents key: " + itos(*pid));
  458. if (nrml_arr->has(*pid)) {
  459. print_verbose("contents valid: " + nrml_arr->get(*pid));
  460. }
  461. }
  462. }
  463. // Now, check if we need to duplicate it.
  464. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  465. if (*pid == polygon_index) {
  466. continue;
  467. }
  468. const Vector3 vert_poly_normal = *nrml_arr->getptr(*pid);
  469. if ((this_vert_poly_normal - vert_poly_normal).length_squared() > CMP_EPSILON) {
  470. // Yes this polygon need duplication.
  471. need_duplication = true;
  472. break;
  473. }
  474. }
  475. }
  476. // TODO: make me vertex color
  477. // Take the normal and see if we need to duplicate this polygon.
  478. if (r_colors_raw.has(index)) {
  479. const HashMap<PolygonId, Color> *color_arr = r_colors_raw.getptr(index);
  480. if (color_arr->has(polygon_index)) {
  481. this_vert_poly_color = color_arr->get(polygon_index);
  482. } else if (color_arr->has(-1)) {
  483. this_vert_poly_color = color_arr->get(-1);
  484. } else {
  485. print_error("invalid color detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  486. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  487. print_verbose("debug contents key: " + itos(*pid));
  488. if (color_arr->has(*pid)) {
  489. print_verbose("contents valid: " + color_arr->get(*pid));
  490. }
  491. }
  492. }
  493. // Now, check if we need to duplicate it.
  494. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  495. if (*pid == polygon_index) {
  496. continue;
  497. }
  498. const Color vert_poly_color = *color_arr->getptr(*pid);
  499. if (!this_vert_poly_color.is_equal_approx(vert_poly_color)) {
  500. // Yes this polygon need duplication.
  501. need_duplication = true;
  502. break;
  503. }
  504. }
  505. }
  506. // Take the UV1 and UV2 and see if we need to duplicate this polygon.
  507. {
  508. HashMap<int, HashMap<int, Vector2>> *uv_raw = &r_uv_1_raw;
  509. Vector2 *this_vert_poly_uv = &this_vert_poly_uv1;
  510. for (int kk = 0; kk < 2; kk++) {
  511. if (uv_raw->has(index)) {
  512. const HashMap<PolygonId, Vector2> *uvs = uv_raw->getptr(index);
  513. if (uvs->has(polygon_index)) {
  514. // This Polygon has its own uv.
  515. (*this_vert_poly_uv) = *uvs->getptr(polygon_index);
  516. // Check if we need to duplicate it.
  517. for (const PolygonId *pid = uvs->next(nullptr); pid != nullptr; pid = uvs->next(pid)) {
  518. if (*pid == polygon_index) {
  519. continue;
  520. }
  521. const Vector2 vert_poly_uv = *uvs->getptr(*pid);
  522. if (((*this_vert_poly_uv) - vert_poly_uv).length_squared() > CMP_EPSILON) {
  523. // Yes this polygon need duplication.
  524. need_duplication = true;
  525. break;
  526. }
  527. }
  528. } else if (uvs->has(-1)) {
  529. // It has the default UV.
  530. (*this_vert_poly_uv) = *uvs->getptr(-1);
  531. } else if (uvs->size() > 0) {
  532. // No uv, this is strange, just take the first and duplicate.
  533. (*this_vert_poly_uv) = *uvs->getptr(*uvs->next(nullptr));
  534. WARN_PRINT("No UVs for this polygon, while there is no default and some other polygons have it. This FBX file may be corrupted.");
  535. }
  536. }
  537. uv_raw = &r_uv_2_raw;
  538. this_vert_poly_uv = &this_vert_poly_uv2;
  539. }
  540. }
  541. // If we want to duplicate it, Let's see if we already duplicated this
  542. // vertex.
  543. if (need_duplication) {
  544. if (duplicated_vertices.has(index)) {
  545. Vertex similar_vertex = -1;
  546. // Let's see if one of the new vertices has the same data of this.
  547. const Vector<int> *new_vertices = duplicated_vertices.getptr(index);
  548. for (int j = 0; j < new_vertices->size(); j += 1) {
  549. const Vertex new_vertex = (*new_vertices)[j];
  550. bool same_uv1 = false;
  551. bool same_uv2 = false;
  552. bool same_normal = false;
  553. bool same_color = false;
  554. if (r_uv_1.has(new_vertex)) {
  555. if ((this_vert_poly_uv1 - (*r_uv_1.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  556. same_uv1 = true;
  557. }
  558. }
  559. if (r_uv_2.has(new_vertex)) {
  560. if ((this_vert_poly_uv2 - (*r_uv_2.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  561. same_uv2 = true;
  562. }
  563. }
  564. if (r_color.has(new_vertex)) {
  565. if (this_vert_poly_color.is_equal_approx((*r_color.getptr(new_vertex)))) {
  566. same_color = true;
  567. }
  568. }
  569. if (r_normals.has(new_vertex)) {
  570. if ((this_vert_poly_normal - (*r_normals.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  571. same_uv2 = true;
  572. }
  573. }
  574. if (same_uv1 && same_uv2 && same_normal && same_color) {
  575. similar_vertex = new_vertex;
  576. break;
  577. }
  578. }
  579. if (similar_vertex != -1) {
  580. // Update polygon.
  581. if (is_end_of_polygon(r_polygon_indices, pv)) {
  582. r_polygon_indices[pv] = ~similar_vertex;
  583. } else {
  584. r_polygon_indices[pv] = similar_vertex;
  585. }
  586. need_duplication = false;
  587. }
  588. }
  589. }
  590. if (need_duplication) {
  591. const Vertex old_index = index;
  592. const Vertex new_index = r_vertices.size();
  593. // Polygon index.
  594. if (is_end_of_polygon(r_polygon_indices, pv)) {
  595. r_polygon_indices[pv] = ~new_index;
  596. } else {
  597. r_polygon_indices[pv] = new_index;
  598. }
  599. // Vertex position.
  600. r_vertices.push_back(r_vertices[old_index]);
  601. // Normals
  602. if (r_normals_raw.has(old_index)) {
  603. r_normals.set(new_index, this_vert_poly_normal);
  604. r_normals_raw.getptr(old_index)->erase(polygon_index);
  605. r_normals_raw[new_index][polygon_index] = this_vert_poly_normal;
  606. }
  607. // Vertex Color
  608. if (r_colors_raw.has(old_index)) {
  609. r_color.set(new_index, this_vert_poly_color);
  610. r_colors_raw.getptr(old_index)->erase(polygon_index);
  611. r_colors_raw[new_index][polygon_index] = this_vert_poly_color;
  612. }
  613. // UV 0
  614. if (r_uv_1_raw.has(old_index)) {
  615. r_uv_1.set(new_index, this_vert_poly_uv1);
  616. r_uv_1_raw.getptr(old_index)->erase(polygon_index);
  617. r_uv_1_raw[new_index][polygon_index] = this_vert_poly_uv1;
  618. }
  619. // UV 1
  620. if (r_uv_2_raw.has(old_index)) {
  621. r_uv_2.set(new_index, this_vert_poly_uv2);
  622. r_uv_2_raw.getptr(old_index)->erase(polygon_index);
  623. r_uv_2_raw[new_index][polygon_index] = this_vert_poly_uv2;
  624. }
  625. // Morphs
  626. for (const String *mname = r_morphs.next(nullptr); mname != nullptr; mname = r_morphs.next(mname)) {
  627. MorphVertexData *d = r_morphs.getptr(*mname);
  628. // This can't never happen.
  629. CRASH_COND(d == nullptr);
  630. if (d->vertices.size() > old_index) {
  631. d->vertices.push_back(d->vertices[old_index]);
  632. }
  633. if (d->normals.size() > old_index) {
  634. d->normals.push_back(d->normals[old_index]);
  635. }
  636. }
  637. if (vertex_weights.has(old_index)) {
  638. vertex_weights.set(new_index, vertex_weights[old_index]);
  639. }
  640. duplicated_vertices[old_index].push_back(new_index);
  641. } else {
  642. if (r_normals_raw.has(index) &&
  643. r_normals.has(index) == false) {
  644. r_normals.set(index, this_vert_poly_normal);
  645. }
  646. if (r_colors_raw.has(index) && r_color.has(index) == false) {
  647. r_color.set(index, this_vert_poly_color);
  648. }
  649. if (r_uv_1_raw.has(index) &&
  650. r_uv_1.has(index) == false) {
  651. r_uv_1.set(index, this_vert_poly_uv1);
  652. }
  653. if (r_uv_2_raw.has(index) &&
  654. r_uv_2.has(index) == false) {
  655. r_uv_2.set(index, this_vert_poly_uv2);
  656. }
  657. }
  658. }
  659. }
  660. void FBXMeshData::add_vertex(
  661. const ImportState &state,
  662. Ref<SurfaceTool> p_surface_tool,
  663. real_t p_scale,
  664. Vertex p_vertex,
  665. const std::vector<Vector3> &p_vertices_position,
  666. const HashMap<int, Vector3> &p_normals,
  667. const HashMap<int, Vector2> &p_uvs_0,
  668. const HashMap<int, Vector2> &p_uvs_1,
  669. const HashMap<int, Color> &p_colors,
  670. const Vector3 &p_morph_value,
  671. const Vector3 &p_morph_normal) {
  672. ERR_FAIL_INDEX_MSG(p_vertex, (Vertex)p_vertices_position.size(), "FBX file is corrupted, the position of the vertex can't be retrieved.");
  673. if (p_normals.has(p_vertex)) {
  674. p_surface_tool->set_normal(p_normals[p_vertex] + p_morph_normal);
  675. }
  676. if (p_uvs_0.has(p_vertex)) {
  677. //print_verbose("uv1: [" + itos(p_vertex) + "] " + p_uvs_0[p_vertex]);
  678. // Inverts Y UV.
  679. p_surface_tool->set_uv(Vector2(p_uvs_0[p_vertex].x, 1 - p_uvs_0[p_vertex].y));
  680. }
  681. if (p_uvs_1.has(p_vertex)) {
  682. //print_verbose("uv2: [" + itos(p_vertex) + "] " + p_uvs_1[p_vertex]);
  683. // Inverts Y UV.
  684. p_surface_tool->set_uv2(Vector2(p_uvs_1[p_vertex].x, 1 - p_uvs_1[p_vertex].y));
  685. }
  686. if (p_colors.has(p_vertex)) {
  687. p_surface_tool->set_color(p_colors[p_vertex]);
  688. }
  689. // TODO what about binormals?
  690. // TODO there is other?
  691. if (vertex_weights.has(p_vertex)) {
  692. // Let's extract the weight info.
  693. const VertexWeightMapping *vm = vertex_weights.getptr(p_vertex);
  694. const Vector<int> &bones = vm->bones;
  695. // the bug is that the bone idx is wrong because it is not ref'ing the skin.
  696. if (bones.size() > RS::ARRAY_WEIGHTS_SIZE) {
  697. print_error("[weight overflow detected]");
  698. }
  699. p_surface_tool->set_weights(vm->weights);
  700. // 0 1 2 3 4 5 6 7 < local skeleton / skin for mesh
  701. // 0 1 2 3 4 5 6 7 8 9 10 < actual skeleton with all joints
  702. p_surface_tool->set_bones(bones);
  703. }
  704. // The surface tool want the vertex position as last thing.
  705. p_surface_tool->add_vertex((p_vertices_position[p_vertex] + p_morph_value) * p_scale);
  706. }
  707. void FBXMeshData::triangulate_polygon(Ref<SurfaceTool> st, Vector<int> p_polygon_vertex, const Vector<Vertex> p_surface_vertex_map, const std::vector<Vector3> &p_vertices) const {
  708. const int polygon_vertex_count = p_polygon_vertex.size();
  709. if (polygon_vertex_count == 1) {
  710. // point to triangle
  711. st->add_index(p_polygon_vertex[0]);
  712. st->add_index(p_polygon_vertex[0]);
  713. st->add_index(p_polygon_vertex[0]);
  714. return;
  715. } else if (polygon_vertex_count == 2) {
  716. // line to triangle
  717. st->add_index(p_polygon_vertex[1]);
  718. st->add_index(p_polygon_vertex[1]);
  719. st->add_index(p_polygon_vertex[0]);
  720. return;
  721. } else if (polygon_vertex_count == 3) {
  722. // triangle to triangle
  723. st->add_index(p_polygon_vertex[0]);
  724. st->add_index(p_polygon_vertex[2]);
  725. st->add_index(p_polygon_vertex[1]);
  726. return;
  727. } else if (polygon_vertex_count == 4) {
  728. // quad to triangle - this code is awesome for import times
  729. // it prevents triangles being generated slowly
  730. st->add_index(p_polygon_vertex[0]);
  731. st->add_index(p_polygon_vertex[2]);
  732. st->add_index(p_polygon_vertex[1]);
  733. st->add_index(p_polygon_vertex[2]);
  734. st->add_index(p_polygon_vertex[0]);
  735. st->add_index(p_polygon_vertex[3]);
  736. return;
  737. } else {
  738. // non triangulated - we must run the triangulation algorithm
  739. bool is_simple_convex = false;
  740. // this code is 'slow' but required it triangulates all the unsupported geometry.
  741. // Doesn't allow for bigger polygons because those are unlikely be convex
  742. if (polygon_vertex_count <= 6) {
  743. // Start from true, check if it's false.
  744. is_simple_convex = true;
  745. Vector3 first_vec;
  746. for (int i = 0; i < polygon_vertex_count; i += 1) {
  747. const Vector3 p1 = p_vertices[p_surface_vertex_map[p_polygon_vertex[i]]];
  748. const Vector3 p2 = p_vertices[p_surface_vertex_map[p_polygon_vertex[(i + 1) % polygon_vertex_count]]];
  749. const Vector3 p3 = p_vertices[p_surface_vertex_map[p_polygon_vertex[(i + 2) % polygon_vertex_count]]];
  750. const Vector3 edge1 = p1 - p2;
  751. const Vector3 edge2 = p3 - p2;
  752. const Vector3 res = edge1.normalized().cross(edge2.normalized()).normalized();
  753. if (i == 0) {
  754. first_vec = res;
  755. } else {
  756. if (first_vec.dot(res) < 0.0) {
  757. // Ok we found an angle that is not the same dir of the
  758. // others.
  759. is_simple_convex = false;
  760. break;
  761. }
  762. }
  763. }
  764. }
  765. if (is_simple_convex) {
  766. // This is a convex polygon, so just triangulate it.
  767. for (int i = 0; i < (polygon_vertex_count - 2); i += 1) {
  768. st->add_index(p_polygon_vertex[2 + i]);
  769. st->add_index(p_polygon_vertex[1 + i]);
  770. st->add_index(p_polygon_vertex[0]);
  771. }
  772. return;
  773. }
  774. }
  775. {
  776. // This is a concave polygon.
  777. std::vector<Vector3> poly_vertices(polygon_vertex_count);
  778. for (int i = 0; i < polygon_vertex_count; i += 1) {
  779. poly_vertices[i] = p_vertices[p_surface_vertex_map[p_polygon_vertex[i]]];
  780. }
  781. const Vector3 poly_norm = get_poly_normal(poly_vertices);
  782. if (poly_norm.length_squared() <= CMP_EPSILON) {
  783. ERR_FAIL_COND_MSG(poly_norm.length_squared() <= CMP_EPSILON, "The normal of this poly was not computed. Is this FBX file corrupted.");
  784. }
  785. // Select the plan coordinate.
  786. int axis_1_coord = 0;
  787. int axis_2_coord = 1;
  788. {
  789. real_t inv = poly_norm.z;
  790. const real_t axis_x = ABS(poly_norm.x);
  791. const real_t axis_y = ABS(poly_norm.y);
  792. const real_t axis_z = ABS(poly_norm.z);
  793. if (axis_x > axis_y) {
  794. if (axis_x > axis_z) {
  795. // For the most part the normal point toward X.
  796. axis_1_coord = 1;
  797. axis_2_coord = 2;
  798. inv = poly_norm.x;
  799. }
  800. } else if (axis_y > axis_z) {
  801. // For the most part the normal point toward Y.
  802. axis_1_coord = 2;
  803. axis_2_coord = 0;
  804. inv = poly_norm.y;
  805. }
  806. // Swap projection axes to take the negated projection vector into account
  807. if (inv < 0.0f) {
  808. SWAP(axis_1_coord, axis_2_coord);
  809. }
  810. }
  811. TriangulatorPoly triangulator_poly;
  812. triangulator_poly.Init(polygon_vertex_count);
  813. std::vector<Vector2> projected_vertices(polygon_vertex_count);
  814. for (int i = 0; i < polygon_vertex_count; i += 1) {
  815. const Vector2 pv(poly_vertices[i][axis_1_coord], poly_vertices[i][axis_2_coord]);
  816. projected_vertices[i] = pv;
  817. triangulator_poly.GetPoint(i) = pv;
  818. }
  819. triangulator_poly.SetOrientation(TRIANGULATOR_CCW);
  820. List<TriangulatorPoly> out_poly;
  821. TriangulatorPartition triangulator_partition;
  822. if (triangulator_partition.Triangulate_OPT(&triangulator_poly, &out_poly) == 0) { // Good result.
  823. if (triangulator_partition.Triangulate_EC(&triangulator_poly, &out_poly) == 0) { // Medium result.
  824. if (triangulator_partition.Triangulate_MONO(&triangulator_poly, &out_poly) == 0) { // Really poor result.
  825. ERR_FAIL_MSG("The triangulation of this polygon failed, please try to triangulate your mesh or check if it has broken polygons.");
  826. }
  827. }
  828. }
  829. std::vector<Vector2> tris(out_poly.size());
  830. for (List<TriangulatorPoly>::Element *I = out_poly.front(); I; I = I->next()) {
  831. TriangulatorPoly &tp = I->get();
  832. ERR_FAIL_COND_MSG(tp.GetNumPoints() != 3, "The triangulator retuned more points, how this is possible?");
  833. // Find Index
  834. for (int i = 2; i >= 0; i -= 1) {
  835. const Vector2 vertex = tp.GetPoint(i);
  836. bool done = false;
  837. // Find Index
  838. for (int y = 0; y < polygon_vertex_count; y += 1) {
  839. if ((projected_vertices[y] - vertex).length_squared() <= CMP_EPSILON) {
  840. // This seems the right vertex
  841. st->add_index(p_polygon_vertex[y]);
  842. done = true;
  843. break;
  844. }
  845. }
  846. ERR_FAIL_COND(done == false);
  847. }
  848. }
  849. }
  850. }
  851. void FBXMeshData::gen_weight_info(Ref<SurfaceTool> st, Vertex vertex_id) const {
  852. if (vertex_weights.empty()) {
  853. return;
  854. }
  855. if (vertex_weights.has(vertex_id)) {
  856. // Let's extract the weight info.
  857. const VertexWeightMapping *vm = vertex_weights.getptr(vertex_id);
  858. st->set_weights(vm->weights);
  859. st->set_bones(vm->bones);
  860. }
  861. }
  862. int FBXMeshData::get_vertex_from_polygon_vertex(const std::vector<int> &p_polygon_indices, int p_index) const {
  863. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  864. return -1;
  865. }
  866. const int vertex = p_polygon_indices[p_index];
  867. if (vertex >= 0) {
  868. return vertex;
  869. } else {
  870. // Negative numbers are the end of the face, reversing the bits is
  871. // possible to obtain the positive correct vertex number.
  872. return ~vertex;
  873. }
  874. }
  875. bool FBXMeshData::is_end_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  876. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  877. return false;
  878. }
  879. const int vertex = p_polygon_indices[p_index];
  880. // If the index is negative this is the end of the Polygon.
  881. return vertex < 0;
  882. }
  883. bool FBXMeshData::is_start_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  884. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  885. return false;
  886. }
  887. if (p_index == 0) {
  888. return true;
  889. }
  890. // If the previous indices is negative this is the begin of a new Polygon.
  891. return p_polygon_indices[p_index - 1] < 0;
  892. }
  893. int FBXMeshData::count_polygons(const std::vector<int> &p_polygon_indices) const {
  894. // The negative numbers define the end of the polygon. Counting the amount of
  895. // negatives the numbers of polygons are obtained.
  896. int count = 0;
  897. for (size_t i = 0; i < p_polygon_indices.size(); i += 1) {
  898. if (p_polygon_indices[i] < 0) {
  899. count += 1;
  900. }
  901. }
  902. return count;
  903. }
  904. template <class R, class T>
  905. HashMap<int, R> FBXMeshData::extract_per_vertex_data(
  906. int p_vertex_count,
  907. const std::vector<FBXDocParser::MeshGeometry::Edge> &p_edge_map,
  908. const std::vector<int> &p_mesh_indices,
  909. const FBXDocParser::MeshGeometry::MappingData<T> &p_mapping_data,
  910. R (*collector_function)(const Vector<VertexData<T>> *p_vertex_data, R p_fall_back),
  911. R p_fall_back) const {
  912. /* When index_to_direct is set
  913. * index size is 184 ( contains index for the data array [values 0, 96] )
  914. * data size is 96 (contains uv coordinates)
  915. * this means index is simple data reduction basically
  916. */
  917. ////
  918. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0) {
  919. print_verbose("debug count: index size: " + itos(p_mapping_data.index.size()) + ", data size: " + itos(p_mapping_data.data.size()));
  920. print_verbose("vertex indices count: " + itos(p_mesh_indices.size()));
  921. print_verbose("Edge map size: " + itos(p_edge_map.size()));
  922. }
  923. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "FBX importer needs to map correctly to this field, please specify the override index name to fix this problem!");
  924. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "The FBX seems corrupted");
  925. // Aggregate vertex data.
  926. HashMap<Vertex, Vector<VertexData<T>>> aggregate_vertex_data;
  927. switch (p_mapping_data.map_type) {
  928. case FBXDocParser::MeshGeometry::MapType::none: {
  929. // No data nothing to do.
  930. return (HashMap<int, R>());
  931. }
  932. case FBXDocParser::MeshGeometry::MapType::vertex: {
  933. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct, (HashMap<int, R>()), "We will support in future");
  934. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  935. // The data is mapped per vertex directly.
  936. ERR_FAIL_COND_V_MSG((int)p_mapping_data.data.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR01");
  937. for (size_t vertex_index = 0; vertex_index < p_mapping_data.data.size(); vertex_index += 1) {
  938. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[vertex_index] });
  939. }
  940. } else {
  941. // The data is mapped per vertex using a reference.
  942. // The indices array, contains a *reference_id for each vertex.
  943. // * Note that the reference_id is the id of data into the data array.
  944. //
  945. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  946. ERR_FAIL_COND_V_MSG((int)p_mapping_data.index.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR02");
  947. for (size_t vertex_index = 0; vertex_index < p_mapping_data.index.size(); vertex_index += 1) {
  948. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[vertex_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR03.");
  949. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[p_mapping_data.index[vertex_index]] });
  950. }
  951. }
  952. } break;
  953. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  954. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  955. // The data is mapped using each index from the indexes array then direct to the data (data reduction algorithm)
  956. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  957. int polygon_id = -1;
  958. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  959. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  960. polygon_id += 1;
  961. }
  962. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  963. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  964. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  965. const int index_to_direct = p_mapping_data.index[polygon_vertex_index];
  966. T value = p_mapping_data.data[index_to_direct];
  967. aggregate_vertex_data[vertex_index].push_back({ polygon_id, value });
  968. }
  969. } else if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  970. // The data are mapped per polygon vertex directly.
  971. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  972. int polygon_id = -1;
  973. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.data.size(); polygon_vertex_index += 1) {
  974. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  975. polygon_id += 1;
  976. }
  977. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  978. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  979. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  980. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[polygon_vertex_index] });
  981. }
  982. } else {
  983. // The data is mapped per polygon_vertex using a reference.
  984. // The indices array, contains a *reference_id for each polygon_vertex.
  985. // * Note that the reference_id is the id of data into the data array.
  986. //
  987. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  988. ERR_FAIL_COND_V_MSG(p_mesh_indices.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR7");
  989. int polygon_id = -1;
  990. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  991. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  992. polygon_id += 1;
  993. }
  994. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  995. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR8");
  996. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR9.");
  997. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] < 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR10.");
  998. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] >= (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR11.");
  999. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[p_mapping_data.index[polygon_vertex_index]] });
  1000. }
  1001. }
  1002. } break;
  1003. case FBXDocParser::MeshGeometry::MapType::polygon: {
  1004. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1005. // The data are mapped per polygon directly.
  1006. const int polygon_count = count_polygons(p_mesh_indices);
  1007. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR12");
  1008. // Advance each polygon vertex, each new polygon advance the polygon index.
  1009. int polygon_index = -1;
  1010. for (size_t polygon_vertex_index = 0;
  1011. polygon_vertex_index < p_mesh_indices.size();
  1012. polygon_vertex_index += 1) {
  1013. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1014. polygon_index += 1;
  1015. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR13");
  1016. }
  1017. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1018. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR14");
  1019. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[polygon_index] });
  1020. }
  1021. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR16. Not all Polygons are present in the file.");
  1022. } else {
  1023. // The data is mapped per polygon using a reference.
  1024. // The indices array, contains a *reference_id for each polygon.
  1025. // * Note that the reference_id is the id of data into the data array.
  1026. //
  1027. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1028. const int polygon_count = count_polygons(p_mesh_indices);
  1029. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR17");
  1030. // Advance each polygon vertex, each new polygon advance the polygon index.
  1031. int polygon_index = -1;
  1032. for (size_t polygon_vertex_index = 0;
  1033. polygon_vertex_index < p_mesh_indices.size();
  1034. polygon_vertex_index += 1) {
  1035. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1036. polygon_index += 1;
  1037. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR18");
  1038. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[polygon_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR19");
  1039. }
  1040. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1041. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR20");
  1042. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[p_mapping_data.index[polygon_index]] });
  1043. }
  1044. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR22. Not all Polygons are present in the file.");
  1045. }
  1046. } break;
  1047. case FBXDocParser::MeshGeometry::MapType::edge: {
  1048. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1049. // The data are mapped per edge directly.
  1050. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR23");
  1051. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1052. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1053. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR24");
  1054. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR25");
  1055. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR26");
  1056. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR27");
  1057. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[edge_index] });
  1058. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[edge_index] });
  1059. }
  1060. } else {
  1061. // The data is mapped per edge using a reference.
  1062. // The indices array, contains a *reference_id for each polygon.
  1063. // * Note that the reference_id is the id of data into the data array.
  1064. //
  1065. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1066. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR28");
  1067. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1068. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1069. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR29");
  1070. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR30");
  1071. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR31");
  1072. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR32");
  1073. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_0], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR33");
  1074. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_1], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR34");
  1075. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1076. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1077. }
  1078. }
  1079. } break;
  1080. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1081. // No matter the mode, no matter the data size; The first always win
  1082. // and is set to all the vertices.
  1083. ERR_FAIL_COND_V_MSG(p_mapping_data.data.size() <= 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR35");
  1084. if (p_mapping_data.data.size() > 0) {
  1085. for (int vertex_index = 0; vertex_index < p_vertex_count; vertex_index += 1) {
  1086. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[0] });
  1087. }
  1088. }
  1089. } break;
  1090. }
  1091. if (aggregate_vertex_data.size() == 0) {
  1092. return (HashMap<int, R>());
  1093. }
  1094. // A map is used because turns out that the some FBX file are not well organized
  1095. // with vertices well compacted. Using a map allows avoid those issues.
  1096. HashMap<Vertex, R> result;
  1097. // Aggregate the collected data.
  1098. for (const Vertex *index = aggregate_vertex_data.next(nullptr); index != nullptr; index = aggregate_vertex_data.next(index)) {
  1099. Vector<VertexData<T>> *aggregated_vertex = aggregate_vertex_data.getptr(*index);
  1100. // This can't be null because we are just iterating.
  1101. CRASH_COND(aggregated_vertex == nullptr);
  1102. ERR_FAIL_INDEX_V_MSG(0, aggregated_vertex->size(), (HashMap<int, R>()), "The FBX file is corrupted, No valid data for this vertex index.");
  1103. result[*index] = collector_function(aggregated_vertex, p_fall_back);
  1104. }
  1105. // Sanitize the data now, if the file is broken we can try import it anyway.
  1106. bool problem_found = false;
  1107. for (size_t i = 0; i < p_mesh_indices.size(); i += 1) {
  1108. const Vertex vertex = get_vertex_from_polygon_vertex(p_mesh_indices, i);
  1109. if (result.has(vertex) == false) {
  1110. result[vertex] = p_fall_back;
  1111. problem_found = true;
  1112. }
  1113. }
  1114. if (problem_found) {
  1115. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN0.");
  1116. }
  1117. return result;
  1118. }
  1119. template <class T>
  1120. HashMap<int, T> FBXMeshData::extract_per_polygon(
  1121. int p_vertex_count,
  1122. const std::vector<int> &p_polygon_indices,
  1123. const FBXDocParser::MeshGeometry::MappingData<T> &p_fbx_data,
  1124. T p_fallback_value) const {
  1125. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_fbx_data.data.size() == 0, (HashMap<int, T>()), "invalid index to direct array");
  1126. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_fbx_data.index.size() == 0, (HashMap<int, T>()), "The FBX seems corrupted");
  1127. const int polygon_count = count_polygons(p_polygon_indices);
  1128. // Aggregate vertex data.
  1129. HashMap<int, Vector<T>> aggregate_polygon_data;
  1130. switch (p_fbx_data.map_type) {
  1131. case FBXDocParser::MeshGeometry::MapType::none: {
  1132. // No data nothing to do.
  1133. return (HashMap<int, T>());
  1134. }
  1135. case FBXDocParser::MeshGeometry::MapType::vertex: {
  1136. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per vertex. This should not happen.");
  1137. } break;
  1138. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  1139. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per polygon vertex. This should not happen.");
  1140. } break;
  1141. case FBXDocParser::MeshGeometry::MapType::polygon: {
  1142. if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  1143. // The data is stored efficiently index_to_direct allows less data in the FBX file.
  1144. for (int polygon_index = 0;
  1145. polygon_index < polygon_count;
  1146. polygon_index += 1) {
  1147. if (p_fbx_data.index.size() == 0) {
  1148. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1149. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1150. } else {
  1151. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1152. const int index_to_direct = p_fbx_data.index[polygon_index];
  1153. T value = p_fbx_data.data[index_to_direct];
  1154. aggregate_polygon_data[polygon_index].push_back(value);
  1155. }
  1156. }
  1157. } else if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1158. // The data are mapped per polygon directly.
  1159. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR51");
  1160. // Advance each polygon vertex, each new polygon advance the polygon index.
  1161. for (int polygon_index = 0;
  1162. polygon_index < polygon_count;
  1163. polygon_index += 1) {
  1164. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR52");
  1165. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1166. }
  1167. } else {
  1168. // The data is mapped per polygon using a reference.
  1169. // The indices array, contains a *reference_id for each polygon.
  1170. // * Note that the reference_id is the id of data into the data array.
  1171. //
  1172. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1173. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file seems corrupted: #ERR52");
  1174. // Advance each polygon vertex, each new polygon advance the polygon index.
  1175. for (int polygon_index = 0;
  1176. polygon_index < polygon_count;
  1177. polygon_index += 1) {
  1178. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR53");
  1179. ERR_FAIL_INDEX_V_MSG(p_fbx_data.index[polygon_index], (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR54");
  1180. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[p_fbx_data.index[polygon_index]]);
  1181. }
  1182. }
  1183. } break;
  1184. case FBXDocParser::MeshGeometry::MapType::edge: {
  1185. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per edge. This should not happen.");
  1186. } break;
  1187. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1188. // No matter the mode, no matter the data size; The first always win
  1189. // and is set to all the vertices.
  1190. ERR_FAIL_COND_V_MSG(p_fbx_data.data.size() <= 0, (HashMap<int, T>()), "FBX file seems corrupted: #ERR55");
  1191. if (p_fbx_data.data.size() > 0) {
  1192. for (int polygon_index = 0; polygon_index < polygon_count; polygon_index += 1) {
  1193. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[0]);
  1194. }
  1195. }
  1196. } break;
  1197. }
  1198. if (aggregate_polygon_data.size() == 0) {
  1199. return (HashMap<int, T>());
  1200. }
  1201. // A map is used because turns out that the some FBX file are not well organized
  1202. // with vertices well compacted. Using a map allows avoid those issues.
  1203. HashMap<int, T> polygons;
  1204. // Take the first value for each vertex.
  1205. for (const Vertex *index = aggregate_polygon_data.next(nullptr); index != nullptr; index = aggregate_polygon_data.next(index)) {
  1206. Vector<T> *aggregated_polygon = aggregate_polygon_data.getptr(*index);
  1207. // This can't be null because we are just iterating.
  1208. CRASH_COND(aggregated_polygon == nullptr);
  1209. ERR_FAIL_INDEX_V_MSG(0, (int)aggregated_polygon->size(), (HashMap<int, T>()), "The FBX file is corrupted, No valid data for this polygon index.");
  1210. // Validate the final value.
  1211. polygons[*index] = (*aggregated_polygon)[0];
  1212. }
  1213. // Sanitize the data now, if the file is broken we can try import it anyway.
  1214. bool problem_found = false;
  1215. for (int polygon_i = 0; polygon_i < polygon_count; polygon_i += 1) {
  1216. if (polygons.has(polygon_i) == false) {
  1217. polygons[polygon_i] = p_fallback_value;
  1218. problem_found = true;
  1219. }
  1220. }
  1221. if (problem_found) {
  1222. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN1.");
  1223. }
  1224. return polygons;
  1225. }
  1226. void FBXMeshData::extract_morphs(const FBXDocParser::MeshGeometry *mesh_geometry, HashMap<String, MorphVertexData> &r_data) {
  1227. r_data.clear();
  1228. const int vertex_count = mesh_geometry->get_vertices().size();
  1229. for (const FBXDocParser::BlendShape *blend_shape : mesh_geometry->get_blend_shapes()) {
  1230. for (const FBXDocParser::BlendShapeChannel *blend_shape_channel : blend_shape->BlendShapeChannels()) {
  1231. const std::vector<const FBXDocParser::ShapeGeometry *> &shape_geometries = blend_shape_channel->GetShapeGeometries();
  1232. for (const FBXDocParser::ShapeGeometry *shape_geometry : shape_geometries) {
  1233. String morph_name = ImportUtils::FBXAnimMeshName(shape_geometry->Name()).c_str();
  1234. if (morph_name.empty()) {
  1235. morph_name = "morph";
  1236. }
  1237. // TODO we have only these??
  1238. const std::vector<unsigned int> &morphs_vertex_indices = shape_geometry->GetIndices();
  1239. const std::vector<Vector3> &morphs_vertices = shape_geometry->GetVertices();
  1240. const std::vector<Vector3> &morphs_normals = shape_geometry->GetNormals();
  1241. ERR_FAIL_COND_MSG((int)morphs_vertex_indices.size() > vertex_count, "The FBX file is corrupted: #ERR103");
  1242. ERR_FAIL_COND_MSG(morphs_vertex_indices.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR104");
  1243. ERR_FAIL_COND_MSG((int)morphs_vertices.size() > vertex_count, "The FBX file is corrupted: #ERR105");
  1244. ERR_FAIL_COND_MSG(morphs_normals.size() != 0 && morphs_normals.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR106");
  1245. if (r_data.has(morph_name) == false) {
  1246. // This morph doesn't exist yet.
  1247. // Create it.
  1248. MorphVertexData md;
  1249. md.vertices.resize(vertex_count);
  1250. md.normals.resize(vertex_count);
  1251. r_data.set(morph_name, md);
  1252. }
  1253. MorphVertexData *data = r_data.getptr(morph_name);
  1254. Vector3 *data_vertices_ptr = data->vertices.ptrw();
  1255. Vector3 *data_normals_ptr = data->normals.ptrw();
  1256. for (int i = 0; i < (int)morphs_vertex_indices.size(); i += 1) {
  1257. const Vertex vertex = morphs_vertex_indices[i];
  1258. ERR_FAIL_INDEX_MSG(vertex, vertex_count, "The blend shapes of this FBX file are corrupted. It has a not valid vertex.");
  1259. data_vertices_ptr[vertex] = morphs_vertices[i];
  1260. if (morphs_normals.size() != 0) {
  1261. data_normals_ptr[vertex] = morphs_normals[i];
  1262. }
  1263. }
  1264. }
  1265. }
  1266. }
  1267. }