fbx_mesh_data.cpp 56 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447
  1. /*************************************************************************/
  2. /* fbx_mesh_data.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "fbx_mesh_data.h"
  31. #include "core/templates/local_vector.h"
  32. #include "scene/resources/mesh.h"
  33. #include "scene/resources/surface_tool.h"
  34. #include "thirdparty/misc/polypartition.h"
  35. template <class T>
  36. T collect_first(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  37. if (p_data->is_empty()) {
  38. return p_fall_back;
  39. }
  40. return (*p_data)[0].data;
  41. }
  42. template <class T>
  43. HashMap<int, T> collect_all(const Vector<VertexData<T>> *p_data, HashMap<int, T> p_fall_back) {
  44. if (p_data->is_empty()) {
  45. return p_fall_back;
  46. }
  47. HashMap<int, T> collection;
  48. for (int i = 0; i < p_data->size(); i += 1) {
  49. const VertexData<T> &vd = (*p_data)[i];
  50. collection[vd.polygon_index] = vd.data;
  51. }
  52. return collection;
  53. }
  54. template <class T>
  55. T collect_average(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  56. if (p_data->is_empty()) {
  57. return p_fall_back;
  58. }
  59. T combined = (*p_data)[0].data; // Make sure the data is always correctly initialized.
  60. print_verbose("size of data: " + itos(p_data->size()));
  61. for (int i = 1; i < p_data->size(); i += 1) {
  62. combined += (*p_data)[i].data;
  63. }
  64. combined = combined / real_t(p_data->size());
  65. return combined.normalized();
  66. }
  67. HashMap<int, Vector3> collect_normal(const Vector<VertexData<Vector3>> *p_data, HashMap<int, Vector3> p_fall_back) {
  68. if (p_data->is_empty()) {
  69. return p_fall_back;
  70. }
  71. HashMap<int, Vector3> collection;
  72. for (int i = 0; i < p_data->size(); i += 1) {
  73. const VertexData<Vector3> &vd = (*p_data)[i];
  74. collection[vd.polygon_index] = vd.data;
  75. }
  76. return collection;
  77. }
  78. HashMap<int, Vector2> collect_uv(const Vector<VertexData<Vector2>> *p_data, HashMap<int, Vector2> p_fall_back) {
  79. if (p_data->is_empty()) {
  80. return p_fall_back;
  81. }
  82. HashMap<int, Vector2> collection;
  83. for (int i = 0; i < p_data->size(); i += 1) {
  84. const VertexData<Vector2> &vd = (*p_data)[i];
  85. collection[vd.polygon_index] = vd.data;
  86. }
  87. return collection;
  88. }
  89. typedef int Vertex;
  90. typedef int SurfaceId;
  91. typedef int PolygonId;
  92. typedef int DataIndex;
  93. struct SurfaceData {
  94. Ref<SurfaceTool> surface_tool;
  95. OrderedHashMap<Vertex, int> lookup_table; // proposed fix is to replace lookup_table[vertex_id] to give the position of the vertices_map[int] index.
  96. LocalVector<Vertex> vertices_map; // this must be ordered the same as insertion <-- slow to do find() operation.
  97. Ref<Material> material;
  98. HashMap<PolygonId, Vector<DataIndex>> surface_polygon_vertex;
  99. Array morphs;
  100. };
  101. EditorSceneImporterMeshNode3D *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
  102. mesh_geometry = p_mesh_geometry;
  103. // todo: make this just use a uint64_t FBX ID this is a copy of our original materials unfortunately.
  104. const std::vector<const FBXDocParser::Material *> &material_lookup = model->GetMaterials();
  105. // TODO: perf hotspot on large files
  106. // this can be a very large copy
  107. std::vector<int> polygon_indices = mesh_geometry->get_polygon_indices();
  108. std::vector<Vector3> vertices = mesh_geometry->get_vertices();
  109. // Phase 1. Parse all FBX data.
  110. HashMap<int, Vector3> normals;
  111. HashMap<int, HashMap<int, Vector3>> normals_raw = extract_per_vertex_data(
  112. vertices.size(),
  113. mesh_geometry->get_edge_map(),
  114. polygon_indices,
  115. mesh_geometry->get_normals(),
  116. &collect_all,
  117. HashMap<int, Vector3>());
  118. HashMap<int, Vector2> uvs_0;
  119. HashMap<int, HashMap<int, Vector2>> uvs_0_raw = extract_per_vertex_data(
  120. vertices.size(),
  121. mesh_geometry->get_edge_map(),
  122. polygon_indices,
  123. mesh_geometry->get_uv_0(),
  124. &collect_all,
  125. HashMap<int, Vector2>());
  126. HashMap<int, Vector2> uvs_1;
  127. HashMap<int, HashMap<int, Vector2>> uvs_1_raw = extract_per_vertex_data(
  128. vertices.size(),
  129. mesh_geometry->get_edge_map(),
  130. polygon_indices,
  131. mesh_geometry->get_uv_1(),
  132. &collect_all,
  133. HashMap<int, Vector2>());
  134. HashMap<int, Color> colors;
  135. HashMap<int, HashMap<int, Color>> colors_raw = extract_per_vertex_data(
  136. vertices.size(),
  137. mesh_geometry->get_edge_map(),
  138. polygon_indices,
  139. mesh_geometry->get_colors(),
  140. &collect_all,
  141. HashMap<int, Color>());
  142. // TODO what about tangents?
  143. // TODO what about bi-nomials?
  144. // TODO there is other?
  145. HashMap<int, SurfaceId> polygon_surfaces = extract_per_polygon(
  146. vertices.size(),
  147. polygon_indices,
  148. mesh_geometry->get_material_allocation_id(),
  149. -1);
  150. HashMap<String, MorphVertexData> morphs;
  151. extract_morphs(mesh_geometry, morphs);
  152. // TODO please add skinning.
  153. //mesh_id = mesh_geometry->ID();
  154. sanitize_vertex_weights(state);
  155. // Re organize polygon vertices to to correctly take into account strange
  156. // UVs.
  157. reorganize_vertices(
  158. polygon_indices,
  159. vertices,
  160. normals,
  161. uvs_0,
  162. uvs_1,
  163. colors,
  164. morphs,
  165. normals_raw,
  166. colors_raw,
  167. uvs_0_raw,
  168. uvs_1_raw);
  169. const int color_count = colors.size();
  170. print_verbose("Vertex color count: " + itos(color_count));
  171. // Make sure that from this moment on the mesh_geometry is no used anymore.
  172. // This is a safety step, because the mesh_geometry data are no more valid
  173. // at this point.
  174. const int vertex_count = vertices.size();
  175. print_verbose("Vertex count: " + itos(vertex_count));
  176. // The map key is the material allocator id that is also used as surface id.
  177. HashMap<SurfaceId, SurfaceData> surfaces;
  178. // Phase 2. For each material create a surface tool (So a different mesh).
  179. {
  180. if (polygon_surfaces.is_empty()) {
  181. // No material, just use the default one with index -1.
  182. // Set -1 to all polygons.
  183. const int polygon_count = count_polygons(polygon_indices);
  184. for (int p = 0; p < polygon_count; p += 1) {
  185. polygon_surfaces[p] = -1;
  186. }
  187. }
  188. // Create the surface now.
  189. for (const int *polygon_id = polygon_surfaces.next(nullptr); polygon_id != nullptr; polygon_id = polygon_surfaces.next(polygon_id)) {
  190. const int surface_id = polygon_surfaces[*polygon_id];
  191. if (surfaces.has(surface_id) == false) {
  192. SurfaceData sd;
  193. sd.surface_tool.instance();
  194. sd.surface_tool->begin(Mesh::PRIMITIVE_TRIANGLES);
  195. if (surface_id < 0) {
  196. // nothing to do
  197. } else if (surface_id < (int)material_lookup.size()) {
  198. const FBXDocParser::Material *mat_mapping = material_lookup.at(surface_id);
  199. const uint64_t mapping_id = mat_mapping->ID();
  200. if (state.cached_materials.has(mapping_id)) {
  201. sd.material = state.cached_materials[mapping_id];
  202. }
  203. } else {
  204. WARN_PRINT("out of bounds surface detected, FBX file has corrupt material data");
  205. }
  206. surfaces.set(surface_id, sd);
  207. }
  208. }
  209. }
  210. // Phase 3. Map the vertices relative to each surface, in this way we can
  211. // just insert the vertices that we need per each surface.
  212. {
  213. PolygonId polygon_index = -1;
  214. SurfaceId surface_id = -1;
  215. SurfaceData *surface_data = nullptr;
  216. for (size_t polygon_vertex = 0; polygon_vertex < polygon_indices.size(); polygon_vertex += 1) {
  217. if (is_start_of_polygon(polygon_indices, polygon_vertex)) {
  218. polygon_index += 1;
  219. ERR_FAIL_COND_V_MSG(polygon_surfaces.has(polygon_index) == false, nullptr, "The FBX file is corrupted, This surface_index is not expected.");
  220. surface_id = polygon_surfaces[polygon_index];
  221. surface_data = surfaces.getptr(surface_id);
  222. CRASH_COND(surface_data == nullptr); // Can't be null.
  223. }
  224. const int vertex = get_vertex_from_polygon_vertex(polygon_indices, polygon_vertex);
  225. // The vertex position in the surface
  226. // Uses a lookup table for speed with large scenes
  227. bool has_polygon_vertex_index = surface_data->lookup_table.has(vertex);
  228. int surface_polygon_vertex_index = -1;
  229. if (has_polygon_vertex_index) {
  230. surface_polygon_vertex_index = surface_data->lookup_table[vertex];
  231. } else {
  232. surface_polygon_vertex_index = surface_data->vertices_map.size();
  233. surface_data->lookup_table[vertex] = surface_polygon_vertex_index;
  234. surface_data->vertices_map.push_back(vertex);
  235. }
  236. surface_data->surface_polygon_vertex[polygon_index].push_back(surface_polygon_vertex_index);
  237. }
  238. }
  239. //print_verbose("[debug UV 1] UV1: " + itos(uvs_0.size()));
  240. //print_verbose("[debug UV 2] UV2: " + itos(uvs_1.size()));
  241. // Phase 4. Per each surface just insert the vertices and add the indices.
  242. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  243. SurfaceData *surface = surfaces.getptr(*surface_id);
  244. // Just add the vertices data.
  245. for (unsigned int i = 0; i < surface->vertices_map.size(); i += 1) {
  246. const Vertex vertex = surface->vertices_map[i];
  247. // This must be done before add_vertex because the surface tool is
  248. // expecting this before the st->add_vertex() call
  249. add_vertex(state,
  250. surface->surface_tool,
  251. state.scale,
  252. vertex,
  253. vertices,
  254. normals,
  255. uvs_0,
  256. uvs_1,
  257. colors);
  258. }
  259. // Triangulate the various polygons and add the indices.
  260. for (const PolygonId *polygon_id = surface->surface_polygon_vertex.next(nullptr); polygon_id != nullptr; polygon_id = surface->surface_polygon_vertex.next(polygon_id)) {
  261. const Vector<DataIndex> *indices = surface->surface_polygon_vertex.getptr(*polygon_id);
  262. triangulate_polygon(
  263. surface->surface_tool,
  264. *indices,
  265. surface->vertices_map,
  266. vertices);
  267. }
  268. }
  269. // Phase 5. Compose the morphs if any.
  270. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  271. SurfaceData *surface = surfaces.getptr(*surface_id);
  272. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  273. MorphVertexData *morph_data = morphs.getptr(*morph_name);
  274. // As said by the docs, this is not supposed to be different than
  275. // vertex_count.
  276. CRASH_COND(morph_data->vertices.size() != vertex_count);
  277. CRASH_COND(morph_data->normals.size() != vertex_count);
  278. Vector3 *vertices_ptr = morph_data->vertices.ptrw();
  279. Vector3 *normals_ptr = morph_data->normals.ptrw();
  280. Ref<SurfaceTool> morph_st;
  281. morph_st.instance();
  282. morph_st->begin(Mesh::PRIMITIVE_TRIANGLES);
  283. for (unsigned int vi = 0; vi < surface->vertices_map.size(); vi += 1) {
  284. const Vertex vertex = surface->vertices_map[vi];
  285. add_vertex(
  286. state,
  287. morph_st,
  288. state.scale,
  289. vertex,
  290. vertices,
  291. normals,
  292. uvs_0,
  293. uvs_1,
  294. colors,
  295. vertices_ptr[vertex],
  296. normals_ptr[vertex]);
  297. }
  298. if (state.is_blender_fbx) {
  299. morph_st->generate_normals();
  300. }
  301. morph_st->generate_tangents();
  302. surface->morphs.push_back(morph_st->commit_to_arrays());
  303. }
  304. }
  305. // Phase 6. Compose the mesh and return it.
  306. Ref<EditorSceneImporterMesh> mesh;
  307. mesh.instance();
  308. // Add blend shape info.
  309. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  310. mesh->add_blend_shape(*morph_name);
  311. }
  312. // TODO always normalized, Why?
  313. mesh->set_blend_shape_mode(Mesh::BLEND_SHAPE_MODE_NORMALIZED);
  314. // Add surfaces.
  315. int in_mesh_surface_id = 0;
  316. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  317. SurfaceData *surface = surfaces.getptr(*surface_id);
  318. if (state.is_blender_fbx) {
  319. surface->surface_tool->generate_normals();
  320. }
  321. // you can't generate them without a valid uv map.
  322. if (uvs_0_raw.size() > 0) {
  323. surface->surface_tool->generate_tangents();
  324. }
  325. Array mesh_array = surface->surface_tool->commit_to_arrays();
  326. Array blend_shapes = surface->morphs;
  327. if (surface->material.is_valid()) {
  328. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes, Dictionary(), surface->material, surface->material->get_name());
  329. } else {
  330. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes);
  331. }
  332. in_mesh_surface_id += 1;
  333. }
  334. EditorSceneImporterMeshNode3D *godot_mesh = memnew(EditorSceneImporterMeshNode3D);
  335. godot_mesh->set_mesh(mesh);
  336. return godot_mesh;
  337. }
  338. void FBXMeshData::sanitize_vertex_weights(const ImportState &state) {
  339. const int max_vertex_influence_count = RS::ARRAY_WEIGHTS_SIZE;
  340. Map<int, int> skeleton_to_skin_bind_id;
  341. // TODO: error's need added
  342. const FBXDocParser::Skin *fbx_skin = mesh_geometry->DeformerSkin();
  343. if (fbx_skin == nullptr || fbx_skin->Clusters().size() == 0) {
  344. return; // do nothing
  345. }
  346. //
  347. // Precalculate the skin cluster mapping
  348. //
  349. int bind_id = 0;
  350. for (const FBXDocParser::Cluster *cluster : fbx_skin->Clusters()) {
  351. Ref<FBXBone> bone = state.fbx_bone_map[cluster->TargetNode()->ID()];
  352. skeleton_to_skin_bind_id.insert(bone->godot_bone_id, bind_id);
  353. bind_id++;
  354. }
  355. for (const Vertex *v = vertex_weights.next(nullptr); v != nullptr; v = vertex_weights.next(v)) {
  356. VertexWeightMapping *vm = vertex_weights.getptr(*v);
  357. ERR_CONTINUE(vm->bones.size() != vm->weights.size()); // No message, already checked.
  358. ERR_CONTINUE(vm->bones_ref.size() != vm->weights.size()); // No message, already checked.
  359. const int initial_size = vm->weights.size();
  360. {
  361. // Init bone id
  362. int *bones_ptr = vm->bones.ptrw();
  363. Ref<FBXBone> *bones_ref_ptr = vm->bones_ref.ptrw();
  364. for (int i = 0; i < vm->weights.size(); i += 1) {
  365. // At this point this is not possible because the skeleton is already initialized.
  366. CRASH_COND(bones_ref_ptr[i]->godot_bone_id == -2);
  367. bones_ptr[i] = skeleton_to_skin_bind_id[bones_ref_ptr[i]->godot_bone_id];
  368. }
  369. // From this point on the data is no more valid.
  370. vm->bones_ref.clear();
  371. }
  372. {
  373. // Sort
  374. real_t *weights_ptr = vm->weights.ptrw();
  375. int *bones_ptr = vm->bones.ptrw();
  376. for (int i = 0; i < vm->weights.size(); i += 1) {
  377. for (int x = i + 1; x < vm->weights.size(); x += 1) {
  378. if (weights_ptr[i] < weights_ptr[x]) {
  379. SWAP(weights_ptr[i], weights_ptr[x]);
  380. SWAP(bones_ptr[i], bones_ptr[x]);
  381. }
  382. }
  383. }
  384. }
  385. {
  386. // Resize
  387. vm->weights.resize(max_vertex_influence_count);
  388. vm->bones.resize(max_vertex_influence_count);
  389. real_t *weights_ptr = vm->weights.ptrw();
  390. int *bones_ptr = vm->bones.ptrw();
  391. for (int i = initial_size; i < max_vertex_influence_count; i += 1) {
  392. weights_ptr[i] = 0.0;
  393. bones_ptr[i] = 0;
  394. }
  395. // Normalize
  396. real_t sum = 0.0;
  397. for (int i = 0; i < max_vertex_influence_count; i += 1) {
  398. sum += weights_ptr[i];
  399. }
  400. if (sum > 0.0) {
  401. for (int i = 0; i < vm->weights.size(); i += 1) {
  402. weights_ptr[i] = weights_ptr[i] / sum;
  403. }
  404. }
  405. }
  406. }
  407. }
  408. void FBXMeshData::reorganize_vertices(
  409. // TODO: perf hotspot on insane files
  410. std::vector<int> &r_polygon_indices,
  411. std::vector<Vector3> &r_vertices,
  412. HashMap<int, Vector3> &r_normals,
  413. HashMap<int, Vector2> &r_uv_1,
  414. HashMap<int, Vector2> &r_uv_2,
  415. HashMap<int, Color> &r_color,
  416. HashMap<String, MorphVertexData> &r_morphs,
  417. HashMap<int, HashMap<int, Vector3>> &r_normals_raw,
  418. HashMap<int, HashMap<int, Color>> &r_colors_raw,
  419. HashMap<int, HashMap<int, Vector2>> &r_uv_1_raw,
  420. HashMap<int, HashMap<int, Vector2>> &r_uv_2_raw) {
  421. // Key: OldVertex; Value: [New vertices];
  422. HashMap<int, Vector<int>> duplicated_vertices;
  423. PolygonId polygon_index = -1;
  424. for (int pv = 0; pv < (int)r_polygon_indices.size(); pv += 1) {
  425. if (is_start_of_polygon(r_polygon_indices, pv)) {
  426. polygon_index += 1;
  427. }
  428. const Vertex index = get_vertex_from_polygon_vertex(r_polygon_indices, pv);
  429. bool need_duplication = false;
  430. Vector2 this_vert_poly_uv1 = Vector2();
  431. Vector2 this_vert_poly_uv2 = Vector2();
  432. Vector3 this_vert_poly_normal = Vector3();
  433. Color this_vert_poly_color = Color();
  434. // Take the normal and see if we need to duplicate this polygon.
  435. if (r_normals_raw.has(index)) {
  436. const HashMap<PolygonId, Vector3> *nrml_arr = r_normals_raw.getptr(index);
  437. if (nrml_arr->has(polygon_index)) {
  438. this_vert_poly_normal = nrml_arr->get(polygon_index);
  439. } else if (nrml_arr->has(-1)) {
  440. this_vert_poly_normal = nrml_arr->get(-1);
  441. } else {
  442. print_error("invalid normal detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  443. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  444. print_verbose("debug contents key: " + itos(*pid));
  445. if (nrml_arr->has(*pid)) {
  446. print_verbose("contents valid: " + nrml_arr->get(*pid));
  447. }
  448. }
  449. }
  450. // Now, check if we need to duplicate it.
  451. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  452. if (*pid == polygon_index) {
  453. continue;
  454. }
  455. const Vector3 vert_poly_normal = *nrml_arr->getptr(*pid);
  456. if ((this_vert_poly_normal - vert_poly_normal).length_squared() > CMP_EPSILON) {
  457. // Yes this polygon need duplication.
  458. need_duplication = true;
  459. break;
  460. }
  461. }
  462. }
  463. // TODO: make me vertex color
  464. // Take the normal and see if we need to duplicate this polygon.
  465. if (r_colors_raw.has(index)) {
  466. const HashMap<PolygonId, Color> *color_arr = r_colors_raw.getptr(index);
  467. if (color_arr->has(polygon_index)) {
  468. this_vert_poly_color = color_arr->get(polygon_index);
  469. } else if (color_arr->has(-1)) {
  470. this_vert_poly_color = color_arr->get(-1);
  471. } else {
  472. print_error("invalid color detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  473. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  474. print_verbose("debug contents key: " + itos(*pid));
  475. if (color_arr->has(*pid)) {
  476. print_verbose("contents valid: " + color_arr->get(*pid));
  477. }
  478. }
  479. }
  480. // Now, check if we need to duplicate it.
  481. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  482. if (*pid == polygon_index) {
  483. continue;
  484. }
  485. const Color vert_poly_color = *color_arr->getptr(*pid);
  486. if (!this_vert_poly_color.is_equal_approx(vert_poly_color)) {
  487. // Yes this polygon need duplication.
  488. need_duplication = true;
  489. break;
  490. }
  491. }
  492. }
  493. // Take the UV1 and UV2 and see if we need to duplicate this polygon.
  494. {
  495. HashMap<int, HashMap<int, Vector2>> *uv_raw = &r_uv_1_raw;
  496. Vector2 *this_vert_poly_uv = &this_vert_poly_uv1;
  497. for (int kk = 0; kk < 2; kk++) {
  498. if (uv_raw->has(index)) {
  499. const HashMap<PolygonId, Vector2> *uvs = uv_raw->getptr(index);
  500. if (uvs->has(polygon_index)) {
  501. // This Polygon has its own uv.
  502. (*this_vert_poly_uv) = *uvs->getptr(polygon_index);
  503. // Check if we need to duplicate it.
  504. for (const PolygonId *pid = uvs->next(nullptr); pid != nullptr; pid = uvs->next(pid)) {
  505. if (*pid == polygon_index) {
  506. continue;
  507. }
  508. const Vector2 vert_poly_uv = *uvs->getptr(*pid);
  509. if (((*this_vert_poly_uv) - vert_poly_uv).length_squared() > CMP_EPSILON) {
  510. // Yes this polygon need duplication.
  511. need_duplication = true;
  512. break;
  513. }
  514. }
  515. } else if (uvs->has(-1)) {
  516. // It has the default UV.
  517. (*this_vert_poly_uv) = *uvs->getptr(-1);
  518. } else if (uvs->size() > 0) {
  519. // No uv, this is strange, just take the first and duplicate.
  520. (*this_vert_poly_uv) = *uvs->getptr(*uvs->next(nullptr));
  521. WARN_PRINT("No UVs for this polygon, while there is no default and some other polygons have it. This FBX file may be corrupted.");
  522. }
  523. }
  524. uv_raw = &r_uv_2_raw;
  525. this_vert_poly_uv = &this_vert_poly_uv2;
  526. }
  527. }
  528. // If we want to duplicate it, Let's see if we already duplicated this
  529. // vertex.
  530. if (need_duplication) {
  531. if (duplicated_vertices.has(index)) {
  532. Vertex similar_vertex = -1;
  533. // Let's see if one of the new vertices has the same data of this.
  534. const Vector<int> *new_vertices = duplicated_vertices.getptr(index);
  535. for (int j = 0; j < new_vertices->size(); j += 1) {
  536. const Vertex new_vertex = (*new_vertices)[j];
  537. bool same_uv1 = false;
  538. bool same_uv2 = false;
  539. bool same_normal = false;
  540. bool same_color = false;
  541. if (r_uv_1.has(new_vertex)) {
  542. if ((this_vert_poly_uv1 - (*r_uv_1.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  543. same_uv1 = true;
  544. }
  545. }
  546. if (r_uv_2.has(new_vertex)) {
  547. if ((this_vert_poly_uv2 - (*r_uv_2.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  548. same_uv2 = true;
  549. }
  550. }
  551. if (r_color.has(new_vertex)) {
  552. if (this_vert_poly_color.is_equal_approx((*r_color.getptr(new_vertex)))) {
  553. same_color = true;
  554. }
  555. }
  556. if (r_normals.has(new_vertex)) {
  557. if ((this_vert_poly_normal - (*r_normals.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  558. same_uv2 = true;
  559. }
  560. }
  561. if (same_uv1 && same_uv2 && same_normal && same_color) {
  562. similar_vertex = new_vertex;
  563. break;
  564. }
  565. }
  566. if (similar_vertex != -1) {
  567. // Update polygon.
  568. if (is_end_of_polygon(r_polygon_indices, pv)) {
  569. r_polygon_indices[pv] = ~similar_vertex;
  570. } else {
  571. r_polygon_indices[pv] = similar_vertex;
  572. }
  573. need_duplication = false;
  574. }
  575. }
  576. }
  577. if (need_duplication) {
  578. const Vertex old_index = index;
  579. const Vertex new_index = r_vertices.size();
  580. // Polygon index.
  581. if (is_end_of_polygon(r_polygon_indices, pv)) {
  582. r_polygon_indices[pv] = ~new_index;
  583. } else {
  584. r_polygon_indices[pv] = new_index;
  585. }
  586. // Vertex position.
  587. r_vertices.push_back(r_vertices[old_index]);
  588. // Normals
  589. if (r_normals_raw.has(old_index)) {
  590. r_normals.set(new_index, this_vert_poly_normal);
  591. r_normals_raw.getptr(old_index)->erase(polygon_index);
  592. r_normals_raw[new_index][polygon_index] = this_vert_poly_normal;
  593. }
  594. // Vertex Color
  595. if (r_colors_raw.has(old_index)) {
  596. r_color.set(new_index, this_vert_poly_color);
  597. r_colors_raw.getptr(old_index)->erase(polygon_index);
  598. r_colors_raw[new_index][polygon_index] = this_vert_poly_color;
  599. }
  600. // UV 0
  601. if (r_uv_1_raw.has(old_index)) {
  602. r_uv_1.set(new_index, this_vert_poly_uv1);
  603. r_uv_1_raw.getptr(old_index)->erase(polygon_index);
  604. r_uv_1_raw[new_index][polygon_index] = this_vert_poly_uv1;
  605. }
  606. // UV 1
  607. if (r_uv_2_raw.has(old_index)) {
  608. r_uv_2.set(new_index, this_vert_poly_uv2);
  609. r_uv_2_raw.getptr(old_index)->erase(polygon_index);
  610. r_uv_2_raw[new_index][polygon_index] = this_vert_poly_uv2;
  611. }
  612. // Morphs
  613. for (const String *mname = r_morphs.next(nullptr); mname != nullptr; mname = r_morphs.next(mname)) {
  614. MorphVertexData *d = r_morphs.getptr(*mname);
  615. // This can't never happen.
  616. CRASH_COND(d == nullptr);
  617. if (d->vertices.size() > old_index) {
  618. d->vertices.push_back(d->vertices[old_index]);
  619. }
  620. if (d->normals.size() > old_index) {
  621. d->normals.push_back(d->normals[old_index]);
  622. }
  623. }
  624. if (vertex_weights.has(old_index)) {
  625. vertex_weights.set(new_index, vertex_weights[old_index]);
  626. }
  627. duplicated_vertices[old_index].push_back(new_index);
  628. } else {
  629. if (r_normals_raw.has(index) &&
  630. r_normals.has(index) == false) {
  631. r_normals.set(index, this_vert_poly_normal);
  632. }
  633. if (r_colors_raw.has(index) && r_color.has(index) == false) {
  634. r_color.set(index, this_vert_poly_color);
  635. }
  636. if (r_uv_1_raw.has(index) &&
  637. r_uv_1.has(index) == false) {
  638. r_uv_1.set(index, this_vert_poly_uv1);
  639. }
  640. if (r_uv_2_raw.has(index) &&
  641. r_uv_2.has(index) == false) {
  642. r_uv_2.set(index, this_vert_poly_uv2);
  643. }
  644. }
  645. }
  646. }
  647. void FBXMeshData::add_vertex(
  648. const ImportState &state,
  649. Ref<SurfaceTool> p_surface_tool,
  650. real_t p_scale,
  651. Vertex p_vertex,
  652. const std::vector<Vector3> &p_vertices_position,
  653. const HashMap<int, Vector3> &p_normals,
  654. const HashMap<int, Vector2> &p_uvs_0,
  655. const HashMap<int, Vector2> &p_uvs_1,
  656. const HashMap<int, Color> &p_colors,
  657. const Vector3 &p_morph_value,
  658. const Vector3 &p_morph_normal) {
  659. ERR_FAIL_INDEX_MSG(p_vertex, (Vertex)p_vertices_position.size(), "FBX file is corrupted, the position of the vertex can't be retrieved.");
  660. if (p_normals.has(p_vertex) && !state.is_blender_fbx) {
  661. p_surface_tool->set_normal(p_normals[p_vertex] + p_morph_normal);
  662. }
  663. if (p_uvs_0.has(p_vertex)) {
  664. //print_verbose("uv1: [" + itos(p_vertex) + "] " + p_uvs_0[p_vertex]);
  665. // Inverts Y UV.
  666. p_surface_tool->set_uv(Vector2(p_uvs_0[p_vertex].x, 1 - p_uvs_0[p_vertex].y));
  667. }
  668. if (p_uvs_1.has(p_vertex)) {
  669. //print_verbose("uv2: [" + itos(p_vertex) + "] " + p_uvs_1[p_vertex]);
  670. // Inverts Y UV.
  671. p_surface_tool->set_uv2(Vector2(p_uvs_1[p_vertex].x, 1 - p_uvs_1[p_vertex].y));
  672. }
  673. if (p_colors.has(p_vertex)) {
  674. p_surface_tool->set_color(p_colors[p_vertex]);
  675. }
  676. // TODO what about binormals?
  677. // TODO there is other?
  678. if (vertex_weights.has(p_vertex)) {
  679. // Let's extract the weight info.
  680. const VertexWeightMapping *vm = vertex_weights.getptr(p_vertex);
  681. const Vector<int> &bones = vm->bones;
  682. // the bug is that the bone idx is wrong because it is not ref'ing the skin.
  683. if (bones.size() > RS::ARRAY_WEIGHTS_SIZE) {
  684. print_error("[weight overflow detected]");
  685. }
  686. p_surface_tool->set_weights(vm->weights);
  687. // 0 1 2 3 4 5 6 7 < local skeleton / skin for mesh
  688. // 0 1 2 3 4 5 6 7 8 9 10 < actual skeleton with all joints
  689. p_surface_tool->set_bones(bones);
  690. }
  691. // The surface tool want the vertex position as last thing.
  692. p_surface_tool->add_vertex((p_vertices_position[p_vertex] + p_morph_value) * p_scale);
  693. }
  694. void FBXMeshData::triangulate_polygon(Ref<SurfaceTool> st, Vector<int> p_polygon_vertex, const Vector<Vertex> p_surface_vertex_map, const std::vector<Vector3> &p_vertices) const {
  695. const int polygon_vertex_count = p_polygon_vertex.size();
  696. if (polygon_vertex_count == 1) {
  697. // point to triangle
  698. st->add_index(p_polygon_vertex[0]);
  699. st->add_index(p_polygon_vertex[0]);
  700. st->add_index(p_polygon_vertex[0]);
  701. return;
  702. } else if (polygon_vertex_count == 2) {
  703. // line to triangle
  704. st->add_index(p_polygon_vertex[1]);
  705. st->add_index(p_polygon_vertex[1]);
  706. st->add_index(p_polygon_vertex[0]);
  707. return;
  708. } else if (polygon_vertex_count == 3) {
  709. // triangle to triangle
  710. st->add_index(p_polygon_vertex[0]);
  711. st->add_index(p_polygon_vertex[2]);
  712. st->add_index(p_polygon_vertex[1]);
  713. return;
  714. } else if (polygon_vertex_count == 4) {
  715. // quad to triangle - this code is awesome for import times
  716. // it prevents triangles being generated slowly
  717. st->add_index(p_polygon_vertex[0]);
  718. st->add_index(p_polygon_vertex[2]);
  719. st->add_index(p_polygon_vertex[1]);
  720. st->add_index(p_polygon_vertex[2]);
  721. st->add_index(p_polygon_vertex[0]);
  722. st->add_index(p_polygon_vertex[3]);
  723. return;
  724. } else {
  725. // non triangulated - we must run the triangulation algorithm
  726. bool is_simple_convex = false;
  727. // this code is 'slow' but required it triangulates all the unsupported geometry.
  728. // Doesn't allow for bigger polygons because those are unlikely be convex
  729. if (polygon_vertex_count <= 6) {
  730. // Start from true, check if it's false.
  731. is_simple_convex = true;
  732. Vector3 first_vec;
  733. for (int i = 0; i < polygon_vertex_count; i += 1) {
  734. const Vector3 p1 = p_vertices[p_surface_vertex_map[p_polygon_vertex[i]]];
  735. const Vector3 p2 = p_vertices[p_surface_vertex_map[p_polygon_vertex[(i + 1) % polygon_vertex_count]]];
  736. const Vector3 p3 = p_vertices[p_surface_vertex_map[p_polygon_vertex[(i + 2) % polygon_vertex_count]]];
  737. const Vector3 edge1 = p1 - p2;
  738. const Vector3 edge2 = p3 - p2;
  739. const Vector3 res = edge1.normalized().cross(edge2.normalized()).normalized();
  740. if (i == 0) {
  741. first_vec = res;
  742. } else {
  743. if (first_vec.dot(res) < 0.0) {
  744. // Ok we found an angle that is not the same dir of the
  745. // others.
  746. is_simple_convex = false;
  747. break;
  748. }
  749. }
  750. }
  751. }
  752. if (is_simple_convex) {
  753. // This is a convex polygon, so just triangulate it.
  754. for (int i = 0; i < (polygon_vertex_count - 2); i += 1) {
  755. st->add_index(p_polygon_vertex[2 + i]);
  756. st->add_index(p_polygon_vertex[1 + i]);
  757. st->add_index(p_polygon_vertex[0]);
  758. }
  759. return;
  760. }
  761. }
  762. {
  763. // This is a concave polygon.
  764. std::vector<Vector3> poly_vertices(polygon_vertex_count);
  765. for (int i = 0; i < polygon_vertex_count; i += 1) {
  766. poly_vertices[i] = p_vertices[p_surface_vertex_map[p_polygon_vertex[i]]];
  767. }
  768. const Vector3 poly_norm = get_poly_normal(poly_vertices);
  769. if (poly_norm.length_squared() <= CMP_EPSILON) {
  770. ERR_FAIL_COND_MSG(poly_norm.length_squared() <= CMP_EPSILON, "The normal of this poly was not computed. Is this FBX file corrupted.");
  771. }
  772. // Select the plan coordinate.
  773. int axis_1_coord = 0;
  774. int axis_2_coord = 1;
  775. {
  776. real_t inv = poly_norm.z;
  777. const real_t axis_x = ABS(poly_norm.x);
  778. const real_t axis_y = ABS(poly_norm.y);
  779. const real_t axis_z = ABS(poly_norm.z);
  780. if (axis_x > axis_y) {
  781. if (axis_x > axis_z) {
  782. // For the most part the normal point toward X.
  783. axis_1_coord = 1;
  784. axis_2_coord = 2;
  785. inv = poly_norm.x;
  786. }
  787. } else if (axis_y > axis_z) {
  788. // For the most part the normal point toward Y.
  789. axis_1_coord = 2;
  790. axis_2_coord = 0;
  791. inv = poly_norm.y;
  792. }
  793. // Swap projection axes to take the negated projection vector into account
  794. if (inv < 0.0f) {
  795. SWAP(axis_1_coord, axis_2_coord);
  796. }
  797. }
  798. TPPLPoly tppl_poly;
  799. tppl_poly.Init(polygon_vertex_count);
  800. std::vector<Vector2> projected_vertices(polygon_vertex_count);
  801. for (int i = 0; i < polygon_vertex_count; i += 1) {
  802. const Vector2 pv(poly_vertices[i][axis_1_coord], poly_vertices[i][axis_2_coord]);
  803. projected_vertices[i] = pv;
  804. tppl_poly.GetPoint(i) = pv;
  805. }
  806. tppl_poly.SetOrientation(TPPL_ORIENTATION_CCW);
  807. List<TPPLPoly> out_poly;
  808. TPPLPartition tppl_partition;
  809. if (tppl_partition.Triangulate_OPT(&tppl_poly, &out_poly) == 0) { // Good result.
  810. if (tppl_partition.Triangulate_EC(&tppl_poly, &out_poly) == 0) { // Medium result.
  811. if (tppl_partition.Triangulate_MONO(&tppl_poly, &out_poly) == 0) { // Really poor result.
  812. ERR_FAIL_MSG("The triangulation of this polygon failed, please try to triangulate your mesh or check if it has broken polygons.");
  813. }
  814. }
  815. }
  816. std::vector<Vector2> tris(out_poly.size());
  817. for (List<TPPLPoly>::Element *I = out_poly.front(); I; I = I->next()) {
  818. TPPLPoly &tp = I->get();
  819. ERR_FAIL_COND_MSG(tp.GetNumPoints() != 3, "The triangulator retuned more points, how this is possible?");
  820. // Find Index
  821. for (int i = 2; i >= 0; i -= 1) {
  822. const Vector2 vertex = tp.GetPoint(i);
  823. bool done = false;
  824. // Find Index
  825. for (int y = 0; y < polygon_vertex_count; y += 1) {
  826. if ((projected_vertices[y] - vertex).length_squared() <= CMP_EPSILON) {
  827. // This seems the right vertex
  828. st->add_index(p_polygon_vertex[y]);
  829. done = true;
  830. break;
  831. }
  832. }
  833. ERR_FAIL_COND(done == false);
  834. }
  835. }
  836. }
  837. }
  838. void FBXMeshData::gen_weight_info(Ref<SurfaceTool> st, Vertex vertex_id) const {
  839. if (vertex_weights.is_empty()) {
  840. return;
  841. }
  842. if (vertex_weights.has(vertex_id)) {
  843. // Let's extract the weight info.
  844. const VertexWeightMapping *vm = vertex_weights.getptr(vertex_id);
  845. st->set_weights(vm->weights);
  846. st->set_bones(vm->bones);
  847. }
  848. }
  849. int FBXMeshData::get_vertex_from_polygon_vertex(const std::vector<int> &p_polygon_indices, int p_index) const {
  850. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  851. return -1;
  852. }
  853. const int vertex = p_polygon_indices[p_index];
  854. if (vertex >= 0) {
  855. return vertex;
  856. } else {
  857. // Negative numbers are the end of the face, reversing the bits is
  858. // possible to obtain the positive correct vertex number.
  859. return ~vertex;
  860. }
  861. }
  862. bool FBXMeshData::is_end_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  863. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  864. return false;
  865. }
  866. const int vertex = p_polygon_indices[p_index];
  867. // If the index is negative this is the end of the Polygon.
  868. return vertex < 0;
  869. }
  870. bool FBXMeshData::is_start_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  871. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  872. return false;
  873. }
  874. if (p_index == 0) {
  875. return true;
  876. }
  877. // If the previous indices is negative this is the begin of a new Polygon.
  878. return p_polygon_indices[p_index - 1] < 0;
  879. }
  880. int FBXMeshData::count_polygons(const std::vector<int> &p_polygon_indices) const {
  881. // The negative numbers define the end of the polygon. Counting the amount of
  882. // negatives the numbers of polygons are obtained.
  883. int count = 0;
  884. for (size_t i = 0; i < p_polygon_indices.size(); i += 1) {
  885. if (p_polygon_indices[i] < 0) {
  886. count += 1;
  887. }
  888. }
  889. return count;
  890. }
  891. template <class R, class T>
  892. HashMap<int, R> FBXMeshData::extract_per_vertex_data(
  893. int p_vertex_count,
  894. const std::vector<FBXDocParser::MeshGeometry::Edge> &p_edge_map,
  895. const std::vector<int> &p_mesh_indices,
  896. const FBXDocParser::MeshGeometry::MappingData<T> &p_mapping_data,
  897. R (*collector_function)(const Vector<VertexData<T>> *p_vertex_data, R p_fall_back),
  898. R p_fall_back) const {
  899. /* When index_to_direct is set
  900. * index size is 184 ( contains index for the data array [values 0, 96] )
  901. * data size is 96 (contains uv coordinates)
  902. * this means index is simple data reduction basically
  903. */
  904. ////
  905. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0) {
  906. print_verbose("debug count: index size: " + itos(p_mapping_data.index.size()) + ", data size: " + itos(p_mapping_data.data.size()));
  907. print_verbose("vertex indices count: " + itos(p_mesh_indices.size()));
  908. print_verbose("Edge map size: " + itos(p_edge_map.size()));
  909. }
  910. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "FBX importer needs to map correctly to this field, please specify the override index name to fix this problem!");
  911. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "The FBX seems corrupted");
  912. // Aggregate vertex data.
  913. HashMap<Vertex, Vector<VertexData<T>>> aggregate_vertex_data;
  914. switch (p_mapping_data.map_type) {
  915. case FBXDocParser::MeshGeometry::MapType::none: {
  916. // No data nothing to do.
  917. return (HashMap<int, R>());
  918. }
  919. case FBXDocParser::MeshGeometry::MapType::vertex: {
  920. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct, (HashMap<int, R>()), "We will support in future");
  921. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  922. // The data is mapped per vertex directly.
  923. ERR_FAIL_COND_V_MSG((int)p_mapping_data.data.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR01");
  924. for (size_t vertex_index = 0; vertex_index < p_mapping_data.data.size(); vertex_index += 1) {
  925. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[vertex_index] });
  926. }
  927. } else {
  928. // The data is mapped per vertex using a reference.
  929. // The indices array, contains a *reference_id for each vertex.
  930. // * Note that the reference_id is the id of data into the data array.
  931. //
  932. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  933. ERR_FAIL_COND_V_MSG((int)p_mapping_data.index.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR02");
  934. for (size_t vertex_index = 0; vertex_index < p_mapping_data.index.size(); vertex_index += 1) {
  935. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[vertex_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR03.");
  936. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[p_mapping_data.index[vertex_index]] });
  937. }
  938. }
  939. } break;
  940. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  941. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  942. // The data is mapped using each index from the indexes array then direct to the data (data reduction algorithm)
  943. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  944. int polygon_id = -1;
  945. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  946. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  947. polygon_id += 1;
  948. }
  949. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  950. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  951. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  952. const int index_to_direct = p_mapping_data.index[polygon_vertex_index];
  953. T value = p_mapping_data.data[index_to_direct];
  954. aggregate_vertex_data[vertex_index].push_back({ polygon_id, value });
  955. }
  956. } else if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  957. // The data are mapped per polygon vertex directly.
  958. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  959. int polygon_id = -1;
  960. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.data.size(); polygon_vertex_index += 1) {
  961. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  962. polygon_id += 1;
  963. }
  964. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  965. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  966. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  967. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[polygon_vertex_index] });
  968. }
  969. } else {
  970. // The data is mapped per polygon_vertex using a reference.
  971. // The indices array, contains a *reference_id for each polygon_vertex.
  972. // * Note that the reference_id is the id of data into the data array.
  973. //
  974. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  975. ERR_FAIL_COND_V_MSG(p_mesh_indices.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR7");
  976. int polygon_id = -1;
  977. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  978. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  979. polygon_id += 1;
  980. }
  981. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  982. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR8");
  983. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR9.");
  984. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] < 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR10.");
  985. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] >= (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR11.");
  986. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[p_mapping_data.index[polygon_vertex_index]] });
  987. }
  988. }
  989. } break;
  990. case FBXDocParser::MeshGeometry::MapType::polygon: {
  991. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  992. // The data are mapped per polygon directly.
  993. const int polygon_count = count_polygons(p_mesh_indices);
  994. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR12");
  995. // Advance each polygon vertex, each new polygon advance the polygon index.
  996. int polygon_index = -1;
  997. for (size_t polygon_vertex_index = 0;
  998. polygon_vertex_index < p_mesh_indices.size();
  999. polygon_vertex_index += 1) {
  1000. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1001. polygon_index += 1;
  1002. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR13");
  1003. }
  1004. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1005. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR14");
  1006. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[polygon_index] });
  1007. }
  1008. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR16. Not all Polygons are present in the file.");
  1009. } else {
  1010. // The data is mapped per polygon using a reference.
  1011. // The indices array, contains a *reference_id for each polygon.
  1012. // * Note that the reference_id is the id of data into the data array.
  1013. //
  1014. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1015. const int polygon_count = count_polygons(p_mesh_indices);
  1016. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR17");
  1017. // Advance each polygon vertex, each new polygon advance the polygon index.
  1018. int polygon_index = -1;
  1019. for (size_t polygon_vertex_index = 0;
  1020. polygon_vertex_index < p_mesh_indices.size();
  1021. polygon_vertex_index += 1) {
  1022. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1023. polygon_index += 1;
  1024. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR18");
  1025. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[polygon_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR19");
  1026. }
  1027. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1028. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR20");
  1029. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[p_mapping_data.index[polygon_index]] });
  1030. }
  1031. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR22. Not all Polygons are present in the file.");
  1032. }
  1033. } break;
  1034. case FBXDocParser::MeshGeometry::MapType::edge: {
  1035. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1036. // The data are mapped per edge directly.
  1037. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR23");
  1038. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1039. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1040. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR24");
  1041. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR25");
  1042. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR26");
  1043. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR27");
  1044. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[edge_index] });
  1045. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[edge_index] });
  1046. }
  1047. } else {
  1048. // The data is mapped per edge using a reference.
  1049. // The indices array, contains a *reference_id for each polygon.
  1050. // * Note that the reference_id is the id of data into the data array.
  1051. //
  1052. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1053. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR28");
  1054. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1055. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1056. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR29");
  1057. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR30");
  1058. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR31");
  1059. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR32");
  1060. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_0], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR33");
  1061. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_1], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR34");
  1062. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1063. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1064. }
  1065. }
  1066. } break;
  1067. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1068. // No matter the mode, no matter the data size; The first always win
  1069. // and is set to all the vertices.
  1070. ERR_FAIL_COND_V_MSG(p_mapping_data.data.size() <= 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR35");
  1071. if (p_mapping_data.data.size() > 0) {
  1072. for (int vertex_index = 0; vertex_index < p_vertex_count; vertex_index += 1) {
  1073. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[0] });
  1074. }
  1075. }
  1076. } break;
  1077. }
  1078. if (aggregate_vertex_data.size() == 0) {
  1079. return (HashMap<int, R>());
  1080. }
  1081. // A map is used because turns out that the some FBX file are not well organized
  1082. // with vertices well compacted. Using a map allows avoid those issues.
  1083. HashMap<Vertex, R> result;
  1084. // Aggregate the collected data.
  1085. for (const Vertex *index = aggregate_vertex_data.next(nullptr); index != nullptr; index = aggregate_vertex_data.next(index)) {
  1086. Vector<VertexData<T>> *aggregated_vertex = aggregate_vertex_data.getptr(*index);
  1087. // This can't be null because we are just iterating.
  1088. CRASH_COND(aggregated_vertex == nullptr);
  1089. ERR_FAIL_INDEX_V_MSG(0, aggregated_vertex->size(), (HashMap<int, R>()), "The FBX file is corrupted, No valid data for this vertex index.");
  1090. result[*index] = collector_function(aggregated_vertex, p_fall_back);
  1091. }
  1092. // Sanitize the data now, if the file is broken we can try import it anyway.
  1093. bool problem_found = false;
  1094. for (size_t i = 0; i < p_mesh_indices.size(); i += 1) {
  1095. const Vertex vertex = get_vertex_from_polygon_vertex(p_mesh_indices, i);
  1096. if (result.has(vertex) == false) {
  1097. result[vertex] = p_fall_back;
  1098. problem_found = true;
  1099. }
  1100. }
  1101. if (problem_found) {
  1102. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN0.");
  1103. }
  1104. return result;
  1105. }
  1106. template <class T>
  1107. HashMap<int, T> FBXMeshData::extract_per_polygon(
  1108. int p_vertex_count,
  1109. const std::vector<int> &p_polygon_indices,
  1110. const FBXDocParser::MeshGeometry::MappingData<T> &p_fbx_data,
  1111. T p_fallback_value) const {
  1112. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_fbx_data.data.size() == 0, (HashMap<int, T>()), "invalid index to direct array");
  1113. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_fbx_data.index.size() == 0, (HashMap<int, T>()), "The FBX seems corrupted");
  1114. const int polygon_count = count_polygons(p_polygon_indices);
  1115. // Aggregate vertex data.
  1116. HashMap<int, Vector<T>> aggregate_polygon_data;
  1117. switch (p_fbx_data.map_type) {
  1118. case FBXDocParser::MeshGeometry::MapType::none: {
  1119. // No data nothing to do.
  1120. return (HashMap<int, T>());
  1121. }
  1122. case FBXDocParser::MeshGeometry::MapType::vertex: {
  1123. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per vertex. This should not happen.");
  1124. } break;
  1125. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  1126. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per polygon vertex. This should not happen.");
  1127. } break;
  1128. case FBXDocParser::MeshGeometry::MapType::polygon: {
  1129. if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  1130. // The data is stored efficiently index_to_direct allows less data in the FBX file.
  1131. for (int polygon_index = 0;
  1132. polygon_index < polygon_count;
  1133. polygon_index += 1) {
  1134. if (p_fbx_data.index.size() == 0) {
  1135. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1136. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1137. } else {
  1138. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1139. const int index_to_direct = p_fbx_data.index[polygon_index];
  1140. T value = p_fbx_data.data[index_to_direct];
  1141. aggregate_polygon_data[polygon_index].push_back(value);
  1142. }
  1143. }
  1144. } else if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1145. // The data are mapped per polygon directly.
  1146. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR51");
  1147. // Advance each polygon vertex, each new polygon advance the polygon index.
  1148. for (int polygon_index = 0;
  1149. polygon_index < polygon_count;
  1150. polygon_index += 1) {
  1151. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR52");
  1152. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1153. }
  1154. } else {
  1155. // The data is mapped per polygon using a reference.
  1156. // The indices array, contains a *reference_id for each polygon.
  1157. // * Note that the reference_id is the id of data into the data array.
  1158. //
  1159. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1160. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file seems corrupted: #ERR52");
  1161. // Advance each polygon vertex, each new polygon advance the polygon index.
  1162. for (int polygon_index = 0;
  1163. polygon_index < polygon_count;
  1164. polygon_index += 1) {
  1165. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR53");
  1166. ERR_FAIL_INDEX_V_MSG(p_fbx_data.index[polygon_index], (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR54");
  1167. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[p_fbx_data.index[polygon_index]]);
  1168. }
  1169. }
  1170. } break;
  1171. case FBXDocParser::MeshGeometry::MapType::edge: {
  1172. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per edge. This should not happen.");
  1173. } break;
  1174. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1175. // No matter the mode, no matter the data size; The first always win
  1176. // and is set to all the vertices.
  1177. ERR_FAIL_COND_V_MSG(p_fbx_data.data.size() <= 0, (HashMap<int, T>()), "FBX file seems corrupted: #ERR55");
  1178. if (p_fbx_data.data.size() > 0) {
  1179. for (int polygon_index = 0; polygon_index < polygon_count; polygon_index += 1) {
  1180. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[0]);
  1181. }
  1182. }
  1183. } break;
  1184. }
  1185. if (aggregate_polygon_data.size() == 0) {
  1186. return (HashMap<int, T>());
  1187. }
  1188. // A map is used because turns out that the some FBX file are not well organized
  1189. // with vertices well compacted. Using a map allows avoid those issues.
  1190. HashMap<int, T> polygons;
  1191. // Take the first value for each vertex.
  1192. for (const Vertex *index = aggregate_polygon_data.next(nullptr); index != nullptr; index = aggregate_polygon_data.next(index)) {
  1193. Vector<T> *aggregated_polygon = aggregate_polygon_data.getptr(*index);
  1194. // This can't be null because we are just iterating.
  1195. CRASH_COND(aggregated_polygon == nullptr);
  1196. ERR_FAIL_INDEX_V_MSG(0, (int)aggregated_polygon->size(), (HashMap<int, T>()), "The FBX file is corrupted, No valid data for this polygon index.");
  1197. // Validate the final value.
  1198. polygons[*index] = (*aggregated_polygon)[0];
  1199. }
  1200. // Sanitize the data now, if the file is broken we can try import it anyway.
  1201. bool problem_found = false;
  1202. for (int polygon_i = 0; polygon_i < polygon_count; polygon_i += 1) {
  1203. if (polygons.has(polygon_i) == false) {
  1204. polygons[polygon_i] = p_fallback_value;
  1205. problem_found = true;
  1206. }
  1207. }
  1208. if (problem_found) {
  1209. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN1.");
  1210. }
  1211. return polygons;
  1212. }
  1213. void FBXMeshData::extract_morphs(const FBXDocParser::MeshGeometry *mesh_geometry, HashMap<String, MorphVertexData> &r_data) {
  1214. r_data.clear();
  1215. const int vertex_count = mesh_geometry->get_vertices().size();
  1216. for (const FBXDocParser::BlendShape *blend_shape : mesh_geometry->get_blend_shapes()) {
  1217. for (const FBXDocParser::BlendShapeChannel *blend_shape_channel : blend_shape->BlendShapeChannels()) {
  1218. const std::vector<const FBXDocParser::ShapeGeometry *> &shape_geometries = blend_shape_channel->GetShapeGeometries();
  1219. for (const FBXDocParser::ShapeGeometry *shape_geometry : shape_geometries) {
  1220. String morph_name = ImportUtils::FBXAnimMeshName(shape_geometry->Name()).c_str();
  1221. if (morph_name.is_empty()) {
  1222. morph_name = "morph";
  1223. }
  1224. // TODO we have only these??
  1225. const std::vector<unsigned int> &morphs_vertex_indices = shape_geometry->GetIndices();
  1226. const std::vector<Vector3> &morphs_vertices = shape_geometry->GetVertices();
  1227. const std::vector<Vector3> &morphs_normals = shape_geometry->GetNormals();
  1228. ERR_FAIL_COND_MSG((int)morphs_vertex_indices.size() > vertex_count, "The FBX file is corrupted: #ERR103");
  1229. ERR_FAIL_COND_MSG(morphs_vertex_indices.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR104");
  1230. ERR_FAIL_COND_MSG((int)morphs_vertices.size() > vertex_count, "The FBX file is corrupted: #ERR105");
  1231. ERR_FAIL_COND_MSG(morphs_normals.size() != 0 && morphs_normals.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR106");
  1232. if (r_data.has(morph_name) == false) {
  1233. // This morph doesn't exist yet.
  1234. // Create it.
  1235. MorphVertexData md;
  1236. md.vertices.resize(vertex_count);
  1237. md.normals.resize(vertex_count);
  1238. r_data.set(morph_name, md);
  1239. }
  1240. MorphVertexData *data = r_data.getptr(morph_name);
  1241. Vector3 *data_vertices_ptr = data->vertices.ptrw();
  1242. Vector3 *data_normals_ptr = data->normals.ptrw();
  1243. for (int i = 0; i < (int)morphs_vertex_indices.size(); i += 1) {
  1244. const Vertex vertex = morphs_vertex_indices[i];
  1245. ERR_FAIL_INDEX_MSG(vertex, vertex_count, "The blend shapes of this FBX file are corrupted. It has a not valid vertex.");
  1246. data_vertices_ptr[vertex] = morphs_vertices[i];
  1247. if (morphs_normals.size() != 0) {
  1248. data_normals_ptr[vertex] = morphs_normals[i];
  1249. }
  1250. }
  1251. }
  1252. }
  1253. }
  1254. }