fbx_mesh_data.cpp 56 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435
  1. /*************************************************************************/
  2. /* fbx_mesh_data.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "fbx_mesh_data.h"
  31. #include "core/templates/local_vector.h"
  32. #include "scene/resources/importer_mesh.h"
  33. #include "scene/resources/mesh.h"
  34. #include "scene/resources/surface_tool.h"
  35. #include "thirdparty/misc/polypartition.h"
  36. template <class T>
  37. T collect_first(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  38. if (p_data->is_empty()) {
  39. return p_fall_back;
  40. }
  41. return (*p_data)[0].data;
  42. }
  43. template <class T>
  44. HashMap<int, T> collect_all(const Vector<VertexData<T>> *p_data, HashMap<int, T> p_fall_back) {
  45. if (p_data->is_empty()) {
  46. return p_fall_back;
  47. }
  48. HashMap<int, T> collection;
  49. for (int i = 0; i < p_data->size(); i += 1) {
  50. const VertexData<T> &vd = (*p_data)[i];
  51. collection[vd.polygon_index] = vd.data;
  52. }
  53. return collection;
  54. }
  55. template <class T>
  56. T collect_average(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  57. if (p_data->is_empty()) {
  58. return p_fall_back;
  59. }
  60. T combined = (*p_data)[0].data; // Make sure the data is always correctly initialized.
  61. print_verbose("size of data: " + itos(p_data->size()));
  62. for (int i = 1; i < p_data->size(); i += 1) {
  63. combined += (*p_data)[i].data;
  64. }
  65. combined = combined / real_t(p_data->size());
  66. return combined.normalized();
  67. }
  68. HashMap<int, Vector3> collect_normal(const Vector<VertexData<Vector3>> *p_data, HashMap<int, Vector3> p_fall_back) {
  69. if (p_data->is_empty()) {
  70. return p_fall_back;
  71. }
  72. HashMap<int, Vector3> collection;
  73. for (int i = 0; i < p_data->size(); i += 1) {
  74. const VertexData<Vector3> &vd = (*p_data)[i];
  75. collection[vd.polygon_index] = vd.data;
  76. }
  77. return collection;
  78. }
  79. HashMap<int, Vector2> collect_uv(const Vector<VertexData<Vector2>> *p_data, HashMap<int, Vector2> p_fall_back) {
  80. if (p_data->is_empty()) {
  81. return p_fall_back;
  82. }
  83. HashMap<int, Vector2> collection;
  84. for (int i = 0; i < p_data->size(); i += 1) {
  85. const VertexData<Vector2> &vd = (*p_data)[i];
  86. collection[vd.polygon_index] = vd.data;
  87. }
  88. return collection;
  89. }
  90. ImporterMeshInstance3D *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
  91. mesh_geometry = p_mesh_geometry;
  92. // todo: make this just use a uint64_t FBX ID this is a copy of our original materials unfortunately.
  93. const std::vector<const FBXDocParser::Material *> &material_lookup = model->GetMaterials();
  94. // TODO: perf hotspot on large files
  95. // this can be a very large copy
  96. std::vector<int> polygon_indices = mesh_geometry->get_polygon_indices();
  97. std::vector<Vector3> vertices = mesh_geometry->get_vertices();
  98. // Phase 1. Parse all FBX data.
  99. HashMap<int, Vector3> normals;
  100. HashMap<int, HashMap<int, Vector3>> normals_raw = extract_per_vertex_data(
  101. vertices.size(),
  102. mesh_geometry->get_edge_map(),
  103. polygon_indices,
  104. mesh_geometry->get_normals(),
  105. &collect_all,
  106. HashMap<int, Vector3>());
  107. HashMap<int, Vector2> uvs_0;
  108. HashMap<int, HashMap<int, Vector2>> uvs_0_raw = extract_per_vertex_data(
  109. vertices.size(),
  110. mesh_geometry->get_edge_map(),
  111. polygon_indices,
  112. mesh_geometry->get_uv_0(),
  113. &collect_all,
  114. HashMap<int, Vector2>());
  115. HashMap<int, Vector2> uvs_1;
  116. HashMap<int, HashMap<int, Vector2>> uvs_1_raw = extract_per_vertex_data(
  117. vertices.size(),
  118. mesh_geometry->get_edge_map(),
  119. polygon_indices,
  120. mesh_geometry->get_uv_1(),
  121. &collect_all,
  122. HashMap<int, Vector2>());
  123. HashMap<int, Color> colors;
  124. HashMap<int, HashMap<int, Color>> colors_raw = extract_per_vertex_data(
  125. vertices.size(),
  126. mesh_geometry->get_edge_map(),
  127. polygon_indices,
  128. mesh_geometry->get_colors(),
  129. &collect_all,
  130. HashMap<int, Color>());
  131. // TODO what about tangents?
  132. // TODO what about bi-nomials?
  133. // TODO there is other?
  134. HashMap<int, SurfaceId> polygon_surfaces = extract_per_polygon(
  135. vertices.size(),
  136. polygon_indices,
  137. mesh_geometry->get_material_allocation_id(),
  138. -1);
  139. HashMap<String, MorphVertexData> morphs;
  140. extract_morphs(mesh_geometry, morphs);
  141. // TODO please add skinning.
  142. //mesh_id = mesh_geometry->ID();
  143. sanitize_vertex_weights(state);
  144. // Re organize polygon vertices to to correctly take into account strange
  145. // UVs.
  146. reorganize_vertices(
  147. polygon_indices,
  148. vertices,
  149. normals,
  150. uvs_0,
  151. uvs_1,
  152. colors,
  153. morphs,
  154. normals_raw,
  155. colors_raw,
  156. uvs_0_raw,
  157. uvs_1_raw);
  158. const int color_count = colors.size();
  159. print_verbose("Vertex color count: " + itos(color_count));
  160. // Make sure that from this moment on the mesh_geometry is no used anymore.
  161. // This is a safety step, because the mesh_geometry data are no more valid
  162. // at this point.
  163. const int vertex_count = vertices.size();
  164. print_verbose("Vertex count: " + itos(vertex_count));
  165. // The map key is the material allocator id that is also used as surface id.
  166. HashMap<SurfaceId, SurfaceData> surfaces;
  167. // Phase 2. For each material create a surface tool (So a different mesh).
  168. {
  169. if (polygon_surfaces.is_empty()) {
  170. // No material, just use the default one with index -1.
  171. // Set -1 to all polygons.
  172. const int polygon_count = count_polygons(polygon_indices);
  173. for (int p = 0; p < polygon_count; p += 1) {
  174. polygon_surfaces[p] = -1;
  175. }
  176. }
  177. // Create the surface now.
  178. for (const int *polygon_id = polygon_surfaces.next(nullptr); polygon_id != nullptr; polygon_id = polygon_surfaces.next(polygon_id)) {
  179. const int surface_id = polygon_surfaces[*polygon_id];
  180. if (surfaces.has(surface_id) == false) {
  181. SurfaceData sd;
  182. sd.surface_tool.instantiate();
  183. sd.surface_tool->begin(Mesh::PRIMITIVE_TRIANGLES);
  184. if (surface_id < 0) {
  185. // nothing to do
  186. } else if (surface_id < (int)material_lookup.size()) {
  187. const FBXDocParser::Material *mat_mapping = material_lookup.at(surface_id);
  188. const uint64_t mapping_id = mat_mapping->ID();
  189. if (state.cached_materials.has(mapping_id)) {
  190. sd.material = state.cached_materials[mapping_id];
  191. }
  192. } else {
  193. WARN_PRINT("out of bounds surface detected, FBX file has corrupt material data");
  194. }
  195. surfaces.set(surface_id, sd);
  196. }
  197. }
  198. }
  199. // Phase 3. Map the vertices relative to each surface, in this way we can
  200. // just insert the vertices that we need per each surface.
  201. {
  202. PolygonId polygon_index = -1;
  203. SurfaceId surface_id = -1;
  204. SurfaceData *surface_data = nullptr;
  205. for (size_t polygon_vertex = 0; polygon_vertex < polygon_indices.size(); polygon_vertex += 1) {
  206. if (is_start_of_polygon(polygon_indices, polygon_vertex)) {
  207. polygon_index += 1;
  208. ERR_FAIL_COND_V_MSG(polygon_surfaces.has(polygon_index) == false, nullptr, "The FBX file is corrupted, This surface_index is not expected.");
  209. surface_id = polygon_surfaces[polygon_index];
  210. surface_data = surfaces.getptr(surface_id);
  211. CRASH_COND(surface_data == nullptr); // Can't be null.
  212. }
  213. const int vertex = get_vertex_from_polygon_vertex(polygon_indices, polygon_vertex);
  214. // The vertex position in the surface
  215. // Uses a lookup table for speed with large scenes
  216. bool has_polygon_vertex_index = surface_data->lookup_table.has(vertex);
  217. int surface_polygon_vertex_index = -1;
  218. if (has_polygon_vertex_index) {
  219. surface_polygon_vertex_index = surface_data->lookup_table[vertex];
  220. } else {
  221. surface_polygon_vertex_index = surface_data->vertices_map.size();
  222. surface_data->lookup_table[vertex] = surface_polygon_vertex_index;
  223. surface_data->vertices_map.push_back(vertex);
  224. }
  225. surface_data->surface_polygon_vertex[polygon_index].push_back(surface_polygon_vertex_index);
  226. }
  227. }
  228. //print_verbose("[debug UV 1] UV1: " + itos(uvs_0.size()));
  229. //print_verbose("[debug UV 2] UV2: " + itos(uvs_1.size()));
  230. // Phase 4. Per each surface just insert the vertices and add the indices.
  231. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  232. SurfaceData *surface = surfaces.getptr(*surface_id);
  233. // Just add the vertices data.
  234. for (unsigned int i = 0; i < surface->vertices_map.size(); i += 1) {
  235. const Vertex vertex = surface->vertices_map[i];
  236. // This must be done before add_vertex because the surface tool is
  237. // expecting this before the st->add_vertex() call
  238. add_vertex(state,
  239. surface->surface_tool,
  240. state.scale,
  241. vertex,
  242. vertices,
  243. normals,
  244. uvs_0,
  245. uvs_1,
  246. colors);
  247. }
  248. // Triangulate the various polygons and add the indices.
  249. for (const PolygonId *polygon_id = surface->surface_polygon_vertex.next(nullptr); polygon_id != nullptr; polygon_id = surface->surface_polygon_vertex.next(polygon_id)) {
  250. const Vector<DataIndex> *indices = surface->surface_polygon_vertex.getptr(*polygon_id);
  251. triangulate_polygon(
  252. surface,
  253. *indices,
  254. vertices);
  255. }
  256. }
  257. // Phase 5. Compose the morphs if any.
  258. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  259. SurfaceData *surface = surfaces.getptr(*surface_id);
  260. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  261. MorphVertexData *morph_data = morphs.getptr(*morph_name);
  262. // As said by the docs, this is not supposed to be different than
  263. // vertex_count.
  264. CRASH_COND(morph_data->vertices.size() != vertex_count);
  265. CRASH_COND(morph_data->normals.size() != vertex_count);
  266. Vector3 *vertices_ptr = morph_data->vertices.ptrw();
  267. Vector3 *normals_ptr = morph_data->normals.ptrw();
  268. Ref<SurfaceTool> morph_st;
  269. morph_st.instantiate();
  270. morph_st->begin(Mesh::PRIMITIVE_TRIANGLES);
  271. for (unsigned int vi = 0; vi < surface->vertices_map.size(); vi += 1) {
  272. const Vertex &vertex = surface->vertices_map[vi];
  273. add_vertex(
  274. state,
  275. morph_st,
  276. state.scale,
  277. vertex,
  278. vertices,
  279. normals,
  280. uvs_0,
  281. uvs_1,
  282. colors,
  283. vertices_ptr[vertex],
  284. normals_ptr[vertex]);
  285. }
  286. if (state.is_blender_fbx) {
  287. morph_st->generate_normals();
  288. }
  289. morph_st->generate_tangents();
  290. surface->morphs.push_back(morph_st->commit_to_arrays());
  291. }
  292. }
  293. // Phase 6. Compose the mesh and return it.
  294. Ref<ImporterMesh> mesh;
  295. mesh.instantiate();
  296. // Add blend shape info.
  297. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  298. mesh->add_blend_shape(*morph_name);
  299. }
  300. // TODO always normalized, Why?
  301. mesh->set_blend_shape_mode(Mesh::BLEND_SHAPE_MODE_NORMALIZED);
  302. // Add surfaces.
  303. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  304. SurfaceData *surface = surfaces.getptr(*surface_id);
  305. if (state.is_blender_fbx) {
  306. surface->surface_tool->generate_normals();
  307. }
  308. // you can't generate them without a valid uv map.
  309. if (uvs_0_raw.size() > 0) {
  310. surface->surface_tool->generate_tangents();
  311. }
  312. Array mesh_array = surface->surface_tool->commit_to_arrays();
  313. Array blend_shapes = surface->morphs;
  314. if (surface->material.is_valid()) {
  315. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes, Dictionary(), surface->material, surface->material->get_name());
  316. } else {
  317. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes);
  318. }
  319. }
  320. ImporterMeshInstance3D *godot_mesh = memnew(ImporterMeshInstance3D);
  321. godot_mesh->set_mesh(mesh);
  322. const String name = ImportUtils::FBXNodeToName(model->Name());
  323. godot_mesh->set_name(name); // hurry up compiling >.<
  324. mesh->set_name("mesh3d-" + name);
  325. return godot_mesh;
  326. }
  327. void FBXMeshData::sanitize_vertex_weights(const ImportState &state) {
  328. const int max_vertex_influence_count = RS::ARRAY_WEIGHTS_SIZE;
  329. Map<int, int> skeleton_to_skin_bind_id;
  330. // TODO: error's need added
  331. const FBXDocParser::Skin *fbx_skin = mesh_geometry->DeformerSkin();
  332. if (fbx_skin == nullptr || fbx_skin->Clusters().size() == 0) {
  333. return; // do nothing
  334. }
  335. //
  336. // Precalculate the skin cluster mapping
  337. //
  338. int bind_id = 0;
  339. for (const FBXDocParser::Cluster *cluster : fbx_skin->Clusters()) {
  340. ERR_CONTINUE_MSG(!state.fbx_bone_map.has(cluster->TargetNode()->ID()), "Missing bone map for cluster target node with id " + uitos(cluster->TargetNode()->ID()) + ".");
  341. Ref<FBXBone> bone = state.fbx_bone_map[cluster->TargetNode()->ID()];
  342. skeleton_to_skin_bind_id.insert(bone->godot_bone_id, bind_id);
  343. bind_id++;
  344. }
  345. for (const Vertex *v = vertex_weights.next(nullptr); v != nullptr; v = vertex_weights.next(v)) {
  346. VertexWeightMapping *vm = vertex_weights.getptr(*v);
  347. ERR_CONTINUE(vm->bones.size() != vm->weights.size()); // No message, already checked.
  348. ERR_CONTINUE(vm->bones_ref.size() != vm->weights.size()); // No message, already checked.
  349. const int initial_size = vm->weights.size();
  350. {
  351. // Init bone id
  352. int *bones_ptr = vm->bones.ptrw();
  353. Ref<FBXBone> *bones_ref_ptr = vm->bones_ref.ptrw();
  354. for (int i = 0; i < vm->weights.size(); i += 1) {
  355. // At this point this is not possible because the skeleton is already initialized.
  356. CRASH_COND(bones_ref_ptr[i]->godot_bone_id == -2);
  357. bones_ptr[i] = skeleton_to_skin_bind_id[bones_ref_ptr[i]->godot_bone_id];
  358. }
  359. // From this point on the data is no more valid.
  360. vm->bones_ref.clear();
  361. }
  362. {
  363. // Sort
  364. float *weights_ptr = vm->weights.ptrw();
  365. int *bones_ptr = vm->bones.ptrw();
  366. for (int i = 0; i < vm->weights.size(); i += 1) {
  367. for (int x = i + 1; x < vm->weights.size(); x += 1) {
  368. if (weights_ptr[i] < weights_ptr[x]) {
  369. SWAP(weights_ptr[i], weights_ptr[x]);
  370. SWAP(bones_ptr[i], bones_ptr[x]);
  371. }
  372. }
  373. }
  374. }
  375. {
  376. // Resize
  377. vm->weights.resize(max_vertex_influence_count);
  378. vm->bones.resize(max_vertex_influence_count);
  379. float *weights_ptr = vm->weights.ptrw();
  380. int *bones_ptr = vm->bones.ptrw();
  381. for (int i = initial_size; i < max_vertex_influence_count; i += 1) {
  382. weights_ptr[i] = 0.0;
  383. bones_ptr[i] = 0;
  384. }
  385. // Normalize
  386. real_t sum = 0.0;
  387. for (int i = 0; i < max_vertex_influence_count; i += 1) {
  388. sum += weights_ptr[i];
  389. }
  390. if (sum > 0.0) {
  391. for (int i = 0; i < vm->weights.size(); i += 1) {
  392. weights_ptr[i] = weights_ptr[i] / sum;
  393. }
  394. }
  395. }
  396. }
  397. }
  398. void FBXMeshData::reorganize_vertices(
  399. // TODO: perf hotspot on insane files
  400. std::vector<int> &r_polygon_indices,
  401. std::vector<Vector3> &r_vertices,
  402. HashMap<int, Vector3> &r_normals,
  403. HashMap<int, Vector2> &r_uv_1,
  404. HashMap<int, Vector2> &r_uv_2,
  405. HashMap<int, Color> &r_color,
  406. HashMap<String, MorphVertexData> &r_morphs,
  407. HashMap<int, HashMap<int, Vector3>> &r_normals_raw,
  408. HashMap<int, HashMap<int, Color>> &r_colors_raw,
  409. HashMap<int, HashMap<int, Vector2>> &r_uv_1_raw,
  410. HashMap<int, HashMap<int, Vector2>> &r_uv_2_raw) {
  411. // Key: OldVertex; Value: [New vertices];
  412. HashMap<int, Vector<int>> duplicated_vertices;
  413. PolygonId polygon_index = -1;
  414. for (int pv = 0; pv < (int)r_polygon_indices.size(); pv += 1) {
  415. if (is_start_of_polygon(r_polygon_indices, pv)) {
  416. polygon_index += 1;
  417. }
  418. const Vertex index = get_vertex_from_polygon_vertex(r_polygon_indices, pv);
  419. bool need_duplication = false;
  420. Vector2 this_vert_poly_uv1 = Vector2();
  421. Vector2 this_vert_poly_uv2 = Vector2();
  422. Vector3 this_vert_poly_normal = Vector3();
  423. Color this_vert_poly_color = Color();
  424. // Take the normal and see if we need to duplicate this polygon.
  425. if (r_normals_raw.has(index)) {
  426. const HashMap<PolygonId, Vector3> *nrml_arr = r_normals_raw.getptr(index);
  427. if (nrml_arr->has(polygon_index)) {
  428. this_vert_poly_normal = nrml_arr->get(polygon_index);
  429. } else if (nrml_arr->has(-1)) {
  430. this_vert_poly_normal = nrml_arr->get(-1);
  431. } else {
  432. print_error("invalid normal detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  433. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  434. print_verbose("debug contents key: " + itos(*pid));
  435. if (nrml_arr->has(*pid)) {
  436. print_verbose("contents valid: " + nrml_arr->get(*pid));
  437. }
  438. }
  439. }
  440. // Now, check if we need to duplicate it.
  441. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  442. if (*pid == polygon_index) {
  443. continue;
  444. }
  445. const Vector3 vert_poly_normal = *nrml_arr->getptr(*pid);
  446. if (!vert_poly_normal.is_equal_approx(this_vert_poly_normal)) {
  447. // Yes this polygon need duplication.
  448. need_duplication = true;
  449. break;
  450. }
  451. }
  452. }
  453. // TODO: make me vertex color
  454. // Take the normal and see if we need to duplicate this polygon.
  455. if (r_colors_raw.has(index)) {
  456. const HashMap<PolygonId, Color> *color_arr = r_colors_raw.getptr(index);
  457. if (color_arr->has(polygon_index)) {
  458. this_vert_poly_color = color_arr->get(polygon_index);
  459. } else if (color_arr->has(-1)) {
  460. this_vert_poly_color = color_arr->get(-1);
  461. } else {
  462. print_error("invalid color detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  463. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  464. print_verbose("debug contents key: " + itos(*pid));
  465. if (color_arr->has(*pid)) {
  466. print_verbose("contents valid: " + color_arr->get(*pid));
  467. }
  468. }
  469. }
  470. // Now, check if we need to duplicate it.
  471. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  472. if (*pid == polygon_index) {
  473. continue;
  474. }
  475. const Color vert_poly_color = *color_arr->getptr(*pid);
  476. if (!this_vert_poly_color.is_equal_approx(vert_poly_color)) {
  477. // Yes this polygon need duplication.
  478. need_duplication = true;
  479. break;
  480. }
  481. }
  482. }
  483. // Take the UV1 and UV2 and see if we need to duplicate this polygon.
  484. {
  485. HashMap<int, HashMap<int, Vector2>> *uv_raw = &r_uv_1_raw;
  486. Vector2 *this_vert_poly_uv = &this_vert_poly_uv1;
  487. for (int kk = 0; kk < 2; kk++) {
  488. if (uv_raw->has(index)) {
  489. const HashMap<PolygonId, Vector2> *uvs = uv_raw->getptr(index);
  490. if (uvs->has(polygon_index)) {
  491. // This Polygon has its own uv.
  492. (*this_vert_poly_uv) = *uvs->getptr(polygon_index);
  493. // Check if we need to duplicate it.
  494. for (const PolygonId *pid = uvs->next(nullptr); pid != nullptr; pid = uvs->next(pid)) {
  495. if (*pid == polygon_index) {
  496. continue;
  497. }
  498. const Vector2 vert_poly_uv = *uvs->getptr(*pid);
  499. if (!vert_poly_uv.is_equal_approx(*this_vert_poly_uv)) {
  500. // Yes this polygon need duplication.
  501. need_duplication = true;
  502. break;
  503. }
  504. }
  505. } else if (uvs->has(-1)) {
  506. // It has the default UV.
  507. (*this_vert_poly_uv) = *uvs->getptr(-1);
  508. } else if (uvs->size() > 0) {
  509. // No uv, this is strange, just take the first and duplicate.
  510. (*this_vert_poly_uv) = *uvs->getptr(*uvs->next(nullptr));
  511. WARN_PRINT("No UVs for this polygon, while there is no default and some other polygons have it. This FBX file may be corrupted.");
  512. }
  513. }
  514. uv_raw = &r_uv_2_raw;
  515. this_vert_poly_uv = &this_vert_poly_uv2;
  516. }
  517. }
  518. // If we want to duplicate it, Let's see if we already duplicated this
  519. // vertex.
  520. if (need_duplication) {
  521. if (duplicated_vertices.has(index)) {
  522. Vertex similar_vertex = -1;
  523. // Let's see if one of the new vertices has the same data of this.
  524. const Vector<int> *new_vertices = duplicated_vertices.getptr(index);
  525. for (int j = 0; j < new_vertices->size(); j += 1) {
  526. const Vertex new_vertex = (*new_vertices)[j];
  527. bool same_uv1 = false;
  528. bool same_uv2 = false;
  529. bool same_normal = false;
  530. bool same_color = false;
  531. if (r_uv_1.has(new_vertex)) {
  532. if ((this_vert_poly_uv1 - (*r_uv_1.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  533. same_uv1 = true;
  534. }
  535. }
  536. if (r_uv_2.has(new_vertex)) {
  537. if ((this_vert_poly_uv2 - (*r_uv_2.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  538. same_uv2 = true;
  539. }
  540. }
  541. if (r_color.has(new_vertex)) {
  542. if (this_vert_poly_color.is_equal_approx((*r_color.getptr(new_vertex)))) {
  543. same_color = true;
  544. }
  545. }
  546. if (r_normals.has(new_vertex)) {
  547. if ((this_vert_poly_normal - (*r_normals.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  548. same_uv2 = true;
  549. }
  550. }
  551. if (same_uv1 && same_uv2 && same_normal && same_color) {
  552. similar_vertex = new_vertex;
  553. break;
  554. }
  555. }
  556. if (similar_vertex != -1) {
  557. // Update polygon.
  558. if (is_end_of_polygon(r_polygon_indices, pv)) {
  559. r_polygon_indices[pv] = ~similar_vertex;
  560. } else {
  561. r_polygon_indices[pv] = similar_vertex;
  562. }
  563. need_duplication = false;
  564. }
  565. }
  566. }
  567. if (need_duplication) {
  568. const Vertex old_index = index;
  569. const Vertex new_index = r_vertices.size();
  570. // Polygon index.
  571. if (is_end_of_polygon(r_polygon_indices, pv)) {
  572. r_polygon_indices[pv] = ~new_index;
  573. } else {
  574. r_polygon_indices[pv] = new_index;
  575. }
  576. // Vertex position.
  577. r_vertices.push_back(r_vertices[old_index]);
  578. // Normals
  579. if (r_normals_raw.has(old_index)) {
  580. r_normals.set(new_index, this_vert_poly_normal);
  581. r_normals_raw.getptr(old_index)->erase(polygon_index);
  582. r_normals_raw[new_index][polygon_index] = this_vert_poly_normal;
  583. }
  584. // Vertex Color
  585. if (r_colors_raw.has(old_index)) {
  586. r_color.set(new_index, this_vert_poly_color);
  587. r_colors_raw.getptr(old_index)->erase(polygon_index);
  588. r_colors_raw[new_index][polygon_index] = this_vert_poly_color;
  589. }
  590. // UV 0
  591. if (r_uv_1_raw.has(old_index)) {
  592. r_uv_1.set(new_index, this_vert_poly_uv1);
  593. r_uv_1_raw.getptr(old_index)->erase(polygon_index);
  594. r_uv_1_raw[new_index][polygon_index] = this_vert_poly_uv1;
  595. }
  596. // UV 1
  597. if (r_uv_2_raw.has(old_index)) {
  598. r_uv_2.set(new_index, this_vert_poly_uv2);
  599. r_uv_2_raw.getptr(old_index)->erase(polygon_index);
  600. r_uv_2_raw[new_index][polygon_index] = this_vert_poly_uv2;
  601. }
  602. // Morphs
  603. for (const String *mname = r_morphs.next(nullptr); mname != nullptr; mname = r_morphs.next(mname)) {
  604. MorphVertexData *d = r_morphs.getptr(*mname);
  605. // This can't never happen.
  606. CRASH_COND(d == nullptr);
  607. if (d->vertices.size() > old_index) {
  608. d->vertices.push_back(d->vertices[old_index]);
  609. }
  610. if (d->normals.size() > old_index) {
  611. d->normals.push_back(d->normals[old_index]);
  612. }
  613. }
  614. if (vertex_weights.has(old_index)) {
  615. vertex_weights.set(new_index, vertex_weights[old_index]);
  616. }
  617. duplicated_vertices[old_index].push_back(new_index);
  618. } else {
  619. if (r_normals_raw.has(index) &&
  620. r_normals.has(index) == false) {
  621. r_normals.set(index, this_vert_poly_normal);
  622. }
  623. if (r_colors_raw.has(index) && r_color.has(index) == false) {
  624. r_color.set(index, this_vert_poly_color);
  625. }
  626. if (r_uv_1_raw.has(index) &&
  627. r_uv_1.has(index) == false) {
  628. r_uv_1.set(index, this_vert_poly_uv1);
  629. }
  630. if (r_uv_2_raw.has(index) &&
  631. r_uv_2.has(index) == false) {
  632. r_uv_2.set(index, this_vert_poly_uv2);
  633. }
  634. }
  635. }
  636. }
  637. void FBXMeshData::add_vertex(
  638. const ImportState &state,
  639. Ref<SurfaceTool> p_surface_tool,
  640. real_t p_scale,
  641. Vertex p_vertex,
  642. const std::vector<Vector3> &p_vertices_position,
  643. const HashMap<int, Vector3> &p_normals,
  644. const HashMap<int, Vector2> &p_uvs_0,
  645. const HashMap<int, Vector2> &p_uvs_1,
  646. const HashMap<int, Color> &p_colors,
  647. const Vector3 &p_morph_value,
  648. const Vector3 &p_morph_normal) {
  649. ERR_FAIL_INDEX_MSG(p_vertex, (Vertex)p_vertices_position.size(), "FBX file is corrupted, the position of the vertex can't be retrieved.");
  650. if (p_normals.has(p_vertex) && !state.is_blender_fbx) {
  651. p_surface_tool->set_normal(p_normals[p_vertex] + p_morph_normal);
  652. }
  653. if (p_uvs_0.has(p_vertex)) {
  654. //print_verbose("uv1: [" + itos(p_vertex) + "] " + p_uvs_0[p_vertex]);
  655. // Inverts Y UV.
  656. p_surface_tool->set_uv(Vector2(p_uvs_0[p_vertex].x, 1 - p_uvs_0[p_vertex].y));
  657. }
  658. if (p_uvs_1.has(p_vertex)) {
  659. //print_verbose("uv2: [" + itos(p_vertex) + "] " + p_uvs_1[p_vertex]);
  660. // Inverts Y UV.
  661. p_surface_tool->set_uv2(Vector2(p_uvs_1[p_vertex].x, 1 - p_uvs_1[p_vertex].y));
  662. }
  663. if (p_colors.has(p_vertex)) {
  664. p_surface_tool->set_color(p_colors[p_vertex]);
  665. }
  666. // TODO what about binormals?
  667. // TODO there is other?
  668. if (vertex_weights.has(p_vertex)) {
  669. // Let's extract the weight info.
  670. const VertexWeightMapping *vm = vertex_weights.getptr(p_vertex);
  671. const Vector<int> &bones = vm->bones;
  672. // the bug is that the bone idx is wrong because it is not ref'ing the skin.
  673. if (bones.size() > RS::ARRAY_WEIGHTS_SIZE) {
  674. print_error("[weight overflow detected]");
  675. }
  676. p_surface_tool->set_weights(vm->weights);
  677. // 0 1 2 3 4 5 6 7 < local skeleton / skin for mesh
  678. // 0 1 2 3 4 5 6 7 8 9 10 < actual skeleton with all joints
  679. p_surface_tool->set_bones(bones);
  680. }
  681. // The surface tool want the vertex position as last thing.
  682. p_surface_tool->add_vertex((p_vertices_position[p_vertex] + p_morph_value) * p_scale);
  683. }
  684. void FBXMeshData::triangulate_polygon(SurfaceData *surface, const Vector<int> &p_polygon_vertex, const std::vector<Vector3> &p_vertices) const {
  685. Ref<SurfaceTool> st(surface->surface_tool);
  686. const int polygon_vertex_count = p_polygon_vertex.size();
  687. //const Vector<Vertex>& p_surface_vertex_map
  688. if (polygon_vertex_count == 1) {
  689. // point to triangle
  690. st->add_index(p_polygon_vertex[0]);
  691. st->add_index(p_polygon_vertex[0]);
  692. st->add_index(p_polygon_vertex[0]);
  693. return;
  694. } else if (polygon_vertex_count == 2) {
  695. // line to triangle
  696. st->add_index(p_polygon_vertex[1]);
  697. st->add_index(p_polygon_vertex[1]);
  698. st->add_index(p_polygon_vertex[0]);
  699. return;
  700. } else if (polygon_vertex_count == 3) {
  701. // triangle to triangle
  702. st->add_index(p_polygon_vertex[0]);
  703. st->add_index(p_polygon_vertex[2]);
  704. st->add_index(p_polygon_vertex[1]);
  705. return;
  706. } else if (polygon_vertex_count == 4) {
  707. // quad to triangle - this code is awesome for import times
  708. // it prevents triangles being generated slowly
  709. st->add_index(p_polygon_vertex[0]);
  710. st->add_index(p_polygon_vertex[2]);
  711. st->add_index(p_polygon_vertex[1]);
  712. st->add_index(p_polygon_vertex[2]);
  713. st->add_index(p_polygon_vertex[0]);
  714. st->add_index(p_polygon_vertex[3]);
  715. return;
  716. } else {
  717. // non triangulated - we must run the triangulation algorithm
  718. bool is_simple_convex = false;
  719. // this code is 'slow' but required it triangulates all the unsupported geometry.
  720. // Doesn't allow for bigger polygons because those are unlikely be convex
  721. if (polygon_vertex_count <= 6) {
  722. // Start from true, check if it's false.
  723. is_simple_convex = true;
  724. Vector3 first_vec;
  725. for (int i = 0; i < polygon_vertex_count; i += 1) {
  726. const Vector3 p1 = p_vertices[surface->vertices_map[p_polygon_vertex[i]]];
  727. const Vector3 p2 = p_vertices[surface->vertices_map[p_polygon_vertex[(i + 1) % polygon_vertex_count]]];
  728. const Vector3 p3 = p_vertices[surface->vertices_map[p_polygon_vertex[(i + 2) % polygon_vertex_count]]];
  729. const Vector3 edge1 = p1 - p2;
  730. const Vector3 edge2 = p3 - p2;
  731. const Vector3 res = edge1.normalized().cross(edge2.normalized()).normalized();
  732. if (i == 0) {
  733. first_vec = res;
  734. } else {
  735. if (first_vec.dot(res) < 0.0) {
  736. // Ok we found an angle that is not the same dir of the
  737. // others.
  738. is_simple_convex = false;
  739. break;
  740. }
  741. }
  742. }
  743. }
  744. if (is_simple_convex) {
  745. // This is a convex polygon, so just triangulate it.
  746. for (int i = 0; i < (polygon_vertex_count - 2); i += 1) {
  747. st->add_index(p_polygon_vertex[2 + i]);
  748. st->add_index(p_polygon_vertex[1 + i]);
  749. st->add_index(p_polygon_vertex[0]);
  750. }
  751. return;
  752. }
  753. }
  754. {
  755. // This is a concave polygon.
  756. std::vector<Vector3> poly_vertices(polygon_vertex_count);
  757. for (int i = 0; i < polygon_vertex_count; i += 1) {
  758. poly_vertices[i] = p_vertices[surface->vertices_map[p_polygon_vertex[i]]];
  759. }
  760. const Vector3 poly_norm = get_poly_normal(poly_vertices);
  761. if (poly_norm.length_squared() <= CMP_EPSILON) {
  762. ERR_FAIL_COND_MSG(poly_norm.length_squared() <= CMP_EPSILON, "The normal of this poly was not computed. Is this FBX file corrupted.");
  763. }
  764. // Select the plan coordinate.
  765. int axis_1_coord = 0;
  766. int axis_2_coord = 1;
  767. {
  768. real_t inv = poly_norm.z;
  769. const real_t axis_x = ABS(poly_norm.x);
  770. const real_t axis_y = ABS(poly_norm.y);
  771. const real_t axis_z = ABS(poly_norm.z);
  772. if (axis_x > axis_y) {
  773. if (axis_x > axis_z) {
  774. // For the most part the normal point toward X.
  775. axis_1_coord = 1;
  776. axis_2_coord = 2;
  777. inv = poly_norm.x;
  778. }
  779. } else if (axis_y > axis_z) {
  780. // For the most part the normal point toward Y.
  781. axis_1_coord = 2;
  782. axis_2_coord = 0;
  783. inv = poly_norm.y;
  784. }
  785. // Swap projection axes to take the negated projection vector into account
  786. if (inv < 0.0f) {
  787. SWAP(axis_1_coord, axis_2_coord);
  788. }
  789. }
  790. TPPLPoly tppl_poly;
  791. tppl_poly.Init(polygon_vertex_count);
  792. std::vector<Vector2> projected_vertices(polygon_vertex_count);
  793. for (int i = 0; i < polygon_vertex_count; i += 1) {
  794. const Vector2 pv(poly_vertices[i][axis_1_coord], poly_vertices[i][axis_2_coord]);
  795. projected_vertices[i] = pv;
  796. tppl_poly.GetPoint(i) = pv;
  797. }
  798. tppl_poly.SetOrientation(TPPL_ORIENTATION_CCW);
  799. List<TPPLPoly> out_poly;
  800. TPPLPartition tppl_partition;
  801. if (tppl_partition.Triangulate_OPT(&tppl_poly, &out_poly) == 0) { // Good result.
  802. if (tppl_partition.Triangulate_EC(&tppl_poly, &out_poly) == 0) { // Medium result.
  803. if (tppl_partition.Triangulate_MONO(&tppl_poly, &out_poly) == 0) { // Really poor result.
  804. ERR_FAIL_MSG("The triangulation of this polygon failed, please try to triangulate your mesh or check if it has broken polygons.");
  805. }
  806. }
  807. }
  808. std::vector<Vector2> tris(out_poly.size());
  809. for (List<TPPLPoly>::Element *I = out_poly.front(); I; I = I->next()) {
  810. TPPLPoly &tp = I->get();
  811. ERR_FAIL_COND_MSG(tp.GetNumPoints() != 3, "The triangulator returned more points, how this is possible?");
  812. // Find Index
  813. for (int i = 2; i >= 0; i -= 1) {
  814. const Vector2 vertex = tp.GetPoint(i);
  815. bool done = false;
  816. // Find Index
  817. for (int y = 0; y < polygon_vertex_count; y += 1) {
  818. if ((projected_vertices[y] - vertex).length_squared() <= CMP_EPSILON) {
  819. // This seems the right vertex
  820. st->add_index(p_polygon_vertex[y]);
  821. done = true;
  822. break;
  823. }
  824. }
  825. ERR_FAIL_COND(done == false);
  826. }
  827. }
  828. }
  829. }
  830. void FBXMeshData::gen_weight_info(Ref<SurfaceTool> st, Vertex vertex_id) const {
  831. if (vertex_weights.is_empty()) {
  832. return;
  833. }
  834. if (vertex_weights.has(vertex_id)) {
  835. // Let's extract the weight info.
  836. const VertexWeightMapping *vm = vertex_weights.getptr(vertex_id);
  837. st->set_weights(vm->weights);
  838. st->set_bones(vm->bones);
  839. }
  840. }
  841. int FBXMeshData::get_vertex_from_polygon_vertex(const std::vector<int> &p_polygon_indices, int p_index) const {
  842. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  843. return -1;
  844. }
  845. const int vertex = p_polygon_indices[p_index];
  846. if (vertex >= 0) {
  847. return vertex;
  848. } else {
  849. // Negative numbers are the end of the face, reversing the bits is
  850. // possible to obtain the positive correct vertex number.
  851. return ~vertex;
  852. }
  853. }
  854. bool FBXMeshData::is_end_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  855. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  856. return false;
  857. }
  858. const int vertex = p_polygon_indices[p_index];
  859. // If the index is negative this is the end of the Polygon.
  860. return vertex < 0;
  861. }
  862. bool FBXMeshData::is_start_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  863. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  864. return false;
  865. }
  866. if (p_index == 0) {
  867. return true;
  868. }
  869. // If the previous indices is negative this is the begin of a new Polygon.
  870. return p_polygon_indices[p_index - 1] < 0;
  871. }
  872. int FBXMeshData::count_polygons(const std::vector<int> &p_polygon_indices) const {
  873. // The negative numbers define the end of the polygon. Counting the amount of
  874. // negatives the numbers of polygons are obtained.
  875. int count = 0;
  876. for (size_t i = 0; i < p_polygon_indices.size(); i += 1) {
  877. if (p_polygon_indices[i] < 0) {
  878. count += 1;
  879. }
  880. }
  881. return count;
  882. }
  883. template <class R, class T>
  884. HashMap<int, R> FBXMeshData::extract_per_vertex_data(
  885. int p_vertex_count,
  886. const std::vector<FBXDocParser::MeshGeometry::Edge> &p_edge_map,
  887. const std::vector<int> &p_mesh_indices,
  888. const FBXDocParser::MeshGeometry::MappingData<T> &p_mapping_data,
  889. R (*collector_function)(const Vector<VertexData<T>> *p_vertex_data, R p_fall_back),
  890. R p_fall_back) const {
  891. /* When index_to_direct is set
  892. * index size is 184 ( contains index for the data array [values 0, 96] )
  893. * data size is 96 (contains uv coordinates)
  894. * this means index is simple data reduction basically
  895. */
  896. ////
  897. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0) {
  898. print_verbose("debug count: index size: " + itos(p_mapping_data.index.size()) + ", data size: " + itos(p_mapping_data.data.size()));
  899. print_verbose("vertex indices count: " + itos(p_mesh_indices.size()));
  900. print_verbose("Edge map size: " + itos(p_edge_map.size()));
  901. }
  902. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "FBX importer needs to map correctly to this field, please specify the override index name to fix this problem!");
  903. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "The FBX seems corrupted");
  904. // Aggregate vertex data.
  905. HashMap<Vertex, Vector<VertexData<T>>> aggregate_vertex_data;
  906. switch (p_mapping_data.map_type) {
  907. case FBXDocParser::MeshGeometry::MapType::none: {
  908. // No data nothing to do.
  909. return (HashMap<int, R>());
  910. }
  911. case FBXDocParser::MeshGeometry::MapType::vertex: {
  912. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct, (HashMap<int, R>()), "We will support in future");
  913. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  914. // The data is mapped per vertex directly.
  915. ERR_FAIL_COND_V_MSG((int)p_mapping_data.data.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR01");
  916. for (size_t vertex_index = 0; vertex_index < p_mapping_data.data.size(); vertex_index += 1) {
  917. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[vertex_index] });
  918. }
  919. } else {
  920. // The data is mapped per vertex using a reference.
  921. // The indices array, contains a *reference_id for each vertex.
  922. // * Note that the reference_id is the id of data into the data array.
  923. //
  924. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  925. ERR_FAIL_COND_V_MSG((int)p_mapping_data.index.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR02");
  926. for (size_t vertex_index = 0; vertex_index < p_mapping_data.index.size(); vertex_index += 1) {
  927. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[vertex_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR03.");
  928. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[p_mapping_data.index[vertex_index]] });
  929. }
  930. }
  931. } break;
  932. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  933. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  934. // The data is mapped using each index from the indexes array then direct to the data (data reduction algorithm)
  935. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  936. int polygon_id = -1;
  937. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  938. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  939. polygon_id += 1;
  940. }
  941. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  942. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  943. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  944. const int index_to_direct = p_mapping_data.index[polygon_vertex_index];
  945. T value = p_mapping_data.data[index_to_direct];
  946. aggregate_vertex_data[vertex_index].push_back({ polygon_id, value });
  947. }
  948. } else if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  949. // The data are mapped per polygon vertex directly.
  950. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  951. int polygon_id = -1;
  952. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.data.size(); polygon_vertex_index += 1) {
  953. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  954. polygon_id += 1;
  955. }
  956. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  957. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  958. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  959. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[polygon_vertex_index] });
  960. }
  961. } else {
  962. // The data is mapped per polygon_vertex using a reference.
  963. // The indices array, contains a *reference_id for each polygon_vertex.
  964. // * Note that the reference_id is the id of data into the data array.
  965. //
  966. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  967. ERR_FAIL_COND_V_MSG(p_mesh_indices.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR7");
  968. int polygon_id = -1;
  969. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  970. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  971. polygon_id += 1;
  972. }
  973. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  974. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR8");
  975. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR9.");
  976. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] < 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR10.");
  977. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] >= (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR11.");
  978. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[p_mapping_data.index[polygon_vertex_index]] });
  979. }
  980. }
  981. } break;
  982. case FBXDocParser::MeshGeometry::MapType::polygon: {
  983. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  984. // The data are mapped per polygon directly.
  985. const int polygon_count = count_polygons(p_mesh_indices);
  986. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR12");
  987. // Advance each polygon vertex, each new polygon advance the polygon index.
  988. int polygon_index = -1;
  989. for (size_t polygon_vertex_index = 0;
  990. polygon_vertex_index < p_mesh_indices.size();
  991. polygon_vertex_index += 1) {
  992. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  993. polygon_index += 1;
  994. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR13");
  995. }
  996. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  997. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR14");
  998. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[polygon_index] });
  999. }
  1000. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR16. Not all Polygons are present in the file.");
  1001. } else {
  1002. // The data is mapped per polygon using a reference.
  1003. // The indices array, contains a *reference_id for each polygon.
  1004. // * Note that the reference_id is the id of data into the data array.
  1005. //
  1006. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1007. const int polygon_count = count_polygons(p_mesh_indices);
  1008. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR17");
  1009. // Advance each polygon vertex, each new polygon advance the polygon index.
  1010. int polygon_index = -1;
  1011. for (size_t polygon_vertex_index = 0;
  1012. polygon_vertex_index < p_mesh_indices.size();
  1013. polygon_vertex_index += 1) {
  1014. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1015. polygon_index += 1;
  1016. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR18");
  1017. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[polygon_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR19");
  1018. }
  1019. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1020. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR20");
  1021. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[p_mapping_data.index[polygon_index]] });
  1022. }
  1023. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR22. Not all Polygons are present in the file.");
  1024. }
  1025. } break;
  1026. case FBXDocParser::MeshGeometry::MapType::edge: {
  1027. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1028. // The data are mapped per edge directly.
  1029. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR23");
  1030. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1031. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1032. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR24");
  1033. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR25");
  1034. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR26");
  1035. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR27");
  1036. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[edge_index] });
  1037. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[edge_index] });
  1038. }
  1039. } else {
  1040. // The data is mapped per edge using a reference.
  1041. // The indices array, contains a *reference_id for each polygon.
  1042. // * Note that the reference_id is the id of data into the data array.
  1043. //
  1044. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1045. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR28");
  1046. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1047. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1048. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR29");
  1049. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR30");
  1050. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR31");
  1051. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR32");
  1052. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_0], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR33");
  1053. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_1], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR34");
  1054. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1055. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1056. }
  1057. }
  1058. } break;
  1059. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1060. // No matter the mode, no matter the data size; The first always win
  1061. // and is set to all the vertices.
  1062. ERR_FAIL_COND_V_MSG(p_mapping_data.data.size() <= 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR35");
  1063. if (p_mapping_data.data.size() > 0) {
  1064. for (int vertex_index = 0; vertex_index < p_vertex_count; vertex_index += 1) {
  1065. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[0] });
  1066. }
  1067. }
  1068. } break;
  1069. }
  1070. if (aggregate_vertex_data.size() == 0) {
  1071. return (HashMap<int, R>());
  1072. }
  1073. // A map is used because turns out that the some FBX file are not well organized
  1074. // with vertices well compacted. Using a map allows avoid those issues.
  1075. HashMap<Vertex, R> result;
  1076. // Aggregate the collected data.
  1077. for (const Vertex *index = aggregate_vertex_data.next(nullptr); index != nullptr; index = aggregate_vertex_data.next(index)) {
  1078. Vector<VertexData<T>> *aggregated_vertex = aggregate_vertex_data.getptr(*index);
  1079. // This can't be null because we are just iterating.
  1080. CRASH_COND(aggregated_vertex == nullptr);
  1081. ERR_FAIL_INDEX_V_MSG(0, aggregated_vertex->size(), (HashMap<int, R>()), "The FBX file is corrupted, No valid data for this vertex index.");
  1082. result[*index] = collector_function(aggregated_vertex, p_fall_back);
  1083. }
  1084. // Sanitize the data now, if the file is broken we can try import it anyway.
  1085. bool problem_found = false;
  1086. for (size_t i = 0; i < p_mesh_indices.size(); i += 1) {
  1087. const Vertex vertex = get_vertex_from_polygon_vertex(p_mesh_indices, i);
  1088. if (result.has(vertex) == false) {
  1089. result[vertex] = p_fall_back;
  1090. problem_found = true;
  1091. }
  1092. }
  1093. if (problem_found) {
  1094. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN0.");
  1095. }
  1096. return result;
  1097. }
  1098. template <class T>
  1099. HashMap<int, T> FBXMeshData::extract_per_polygon(
  1100. int p_vertex_count,
  1101. const std::vector<int> &p_polygon_indices,
  1102. const FBXDocParser::MeshGeometry::MappingData<T> &p_fbx_data,
  1103. T p_fallback_value) const {
  1104. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_fbx_data.data.size() == 0, (HashMap<int, T>()), "invalid index to direct array");
  1105. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_fbx_data.index.size() == 0, (HashMap<int, T>()), "The FBX seems corrupted");
  1106. const int polygon_count = count_polygons(p_polygon_indices);
  1107. // Aggregate vertex data.
  1108. HashMap<int, Vector<T>> aggregate_polygon_data;
  1109. switch (p_fbx_data.map_type) {
  1110. case FBXDocParser::MeshGeometry::MapType::none: {
  1111. // No data nothing to do.
  1112. return (HashMap<int, T>());
  1113. }
  1114. case FBXDocParser::MeshGeometry::MapType::vertex: {
  1115. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per vertex. This should not happen.");
  1116. } break;
  1117. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  1118. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per polygon vertex. This should not happen.");
  1119. } break;
  1120. case FBXDocParser::MeshGeometry::MapType::polygon: {
  1121. if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  1122. // The data is stored efficiently index_to_direct allows less data in the FBX file.
  1123. for (int polygon_index = 0;
  1124. polygon_index < polygon_count;
  1125. polygon_index += 1) {
  1126. if (p_fbx_data.index.size() == 0) {
  1127. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1128. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1129. } else {
  1130. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1131. const int index_to_direct = p_fbx_data.index[polygon_index];
  1132. T value = p_fbx_data.data[index_to_direct];
  1133. aggregate_polygon_data[polygon_index].push_back(value);
  1134. }
  1135. }
  1136. } else if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1137. // The data are mapped per polygon directly.
  1138. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR51");
  1139. // Advance each polygon vertex, each new polygon advance the polygon index.
  1140. for (int polygon_index = 0;
  1141. polygon_index < polygon_count;
  1142. polygon_index += 1) {
  1143. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR52");
  1144. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1145. }
  1146. } else {
  1147. // The data is mapped per polygon using a reference.
  1148. // The indices array, contains a *reference_id for each polygon.
  1149. // * Note that the reference_id is the id of data into the data array.
  1150. //
  1151. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1152. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file seems corrupted: #ERR52");
  1153. // Advance each polygon vertex, each new polygon advance the polygon index.
  1154. for (int polygon_index = 0;
  1155. polygon_index < polygon_count;
  1156. polygon_index += 1) {
  1157. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR53");
  1158. ERR_FAIL_INDEX_V_MSG(p_fbx_data.index[polygon_index], (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR54");
  1159. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[p_fbx_data.index[polygon_index]]);
  1160. }
  1161. }
  1162. } break;
  1163. case FBXDocParser::MeshGeometry::MapType::edge: {
  1164. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per edge. This should not happen.");
  1165. } break;
  1166. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1167. // No matter the mode, no matter the data size; The first always win
  1168. // and is set to all the vertices.
  1169. ERR_FAIL_COND_V_MSG(p_fbx_data.data.size() <= 0, (HashMap<int, T>()), "FBX file seems corrupted: #ERR55");
  1170. if (p_fbx_data.data.size() > 0) {
  1171. for (int polygon_index = 0; polygon_index < polygon_count; polygon_index += 1) {
  1172. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[0]);
  1173. }
  1174. }
  1175. } break;
  1176. }
  1177. if (aggregate_polygon_data.size() == 0) {
  1178. return (HashMap<int, T>());
  1179. }
  1180. // A map is used because turns out that the some FBX file are not well organized
  1181. // with vertices well compacted. Using a map allows avoid those issues.
  1182. HashMap<int, T> polygons;
  1183. // Take the first value for each vertex.
  1184. for (const Vertex *index = aggregate_polygon_data.next(nullptr); index != nullptr; index = aggregate_polygon_data.next(index)) {
  1185. Vector<T> *aggregated_polygon = aggregate_polygon_data.getptr(*index);
  1186. // This can't be null because we are just iterating.
  1187. CRASH_COND(aggregated_polygon == nullptr);
  1188. ERR_FAIL_INDEX_V_MSG(0, (int)aggregated_polygon->size(), (HashMap<int, T>()), "The FBX file is corrupted, No valid data for this polygon index.");
  1189. // Validate the final value.
  1190. polygons[*index] = (*aggregated_polygon)[0];
  1191. }
  1192. // Sanitize the data now, if the file is broken we can try import it anyway.
  1193. bool problem_found = false;
  1194. for (int polygon_i = 0; polygon_i < polygon_count; polygon_i += 1) {
  1195. if (polygons.has(polygon_i) == false) {
  1196. polygons[polygon_i] = p_fallback_value;
  1197. problem_found = true;
  1198. }
  1199. }
  1200. if (problem_found) {
  1201. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN1.");
  1202. }
  1203. return polygons;
  1204. }
  1205. void FBXMeshData::extract_morphs(const FBXDocParser::MeshGeometry *mesh_geometry, HashMap<String, MorphVertexData> &r_data) {
  1206. r_data.clear();
  1207. const int vertex_count = mesh_geometry->get_vertices().size();
  1208. for (const FBXDocParser::BlendShape *blend_shape : mesh_geometry->get_blend_shapes()) {
  1209. for (const FBXDocParser::BlendShapeChannel *blend_shape_channel : blend_shape->BlendShapeChannels()) {
  1210. const std::vector<const FBXDocParser::ShapeGeometry *> &shape_geometries = blend_shape_channel->GetShapeGeometries();
  1211. for (const FBXDocParser::ShapeGeometry *shape_geometry : shape_geometries) {
  1212. String morph_name = ImportUtils::FBXAnimMeshName(shape_geometry->Name()).c_str();
  1213. if (morph_name.is_empty()) {
  1214. morph_name = "morph";
  1215. }
  1216. // TODO we have only these??
  1217. const std::vector<unsigned int> &morphs_vertex_indices = shape_geometry->GetIndices();
  1218. const std::vector<Vector3> &morphs_vertices = shape_geometry->GetVertices();
  1219. const std::vector<Vector3> &morphs_normals = shape_geometry->GetNormals();
  1220. ERR_FAIL_COND_MSG((int)morphs_vertex_indices.size() > vertex_count, "The FBX file is corrupted: #ERR103");
  1221. ERR_FAIL_COND_MSG(morphs_vertex_indices.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR104");
  1222. ERR_FAIL_COND_MSG((int)morphs_vertices.size() > vertex_count, "The FBX file is corrupted: #ERR105");
  1223. ERR_FAIL_COND_MSG(morphs_normals.size() != 0 && morphs_normals.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR106");
  1224. if (r_data.has(morph_name) == false) {
  1225. // This morph doesn't exist yet.
  1226. // Create it.
  1227. MorphVertexData md;
  1228. md.vertices.resize(vertex_count);
  1229. md.normals.resize(vertex_count);
  1230. r_data.set(morph_name, md);
  1231. }
  1232. MorphVertexData *data = r_data.getptr(morph_name);
  1233. Vector3 *data_vertices_ptr = data->vertices.ptrw();
  1234. Vector3 *data_normals_ptr = data->normals.ptrw();
  1235. for (int i = 0; i < (int)morphs_vertex_indices.size(); i += 1) {
  1236. const Vertex vertex = morphs_vertex_indices[i];
  1237. ERR_FAIL_INDEX_MSG(vertex, vertex_count, "The blend shapes of this FBX file are corrupted. It has a not valid vertex.");
  1238. data_vertices_ptr[vertex] = morphs_vertices[i];
  1239. if (morphs_normals.size() != 0) {
  1240. data_normals_ptr[vertex] = morphs_normals[i];
  1241. }
  1242. }
  1243. }
  1244. }
  1245. }
  1246. }