2
0

fbx_mesh_data.cpp 56 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437
  1. /*************************************************************************/
  2. /* fbx_mesh_data.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "fbx_mesh_data.h"
  31. #include "core/templates/local_vector.h"
  32. #include "scene/resources/mesh.h"
  33. #include "scene/resources/surface_tool.h"
  34. #include "thirdparty/misc/polypartition.h"
  35. template <class T>
  36. T collect_first(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  37. if (p_data->is_empty()) {
  38. return p_fall_back;
  39. }
  40. return (*p_data)[0].data;
  41. }
  42. template <class T>
  43. HashMap<int, T> collect_all(const Vector<VertexData<T>> *p_data, HashMap<int, T> p_fall_back) {
  44. if (p_data->is_empty()) {
  45. return p_fall_back;
  46. }
  47. HashMap<int, T> collection;
  48. for (int i = 0; i < p_data->size(); i += 1) {
  49. const VertexData<T> &vd = (*p_data)[i];
  50. collection[vd.polygon_index] = vd.data;
  51. }
  52. return collection;
  53. }
  54. template <class T>
  55. T collect_average(const Vector<VertexData<T>> *p_data, T p_fall_back) {
  56. if (p_data->is_empty()) {
  57. return p_fall_back;
  58. }
  59. T combined = (*p_data)[0].data; // Make sure the data is always correctly initialized.
  60. print_verbose("size of data: " + itos(p_data->size()));
  61. for (int i = 1; i < p_data->size(); i += 1) {
  62. combined += (*p_data)[i].data;
  63. }
  64. combined = combined / real_t(p_data->size());
  65. return combined.normalized();
  66. }
  67. HashMap<int, Vector3> collect_normal(const Vector<VertexData<Vector3>> *p_data, HashMap<int, Vector3> p_fall_back) {
  68. if (p_data->is_empty()) {
  69. return p_fall_back;
  70. }
  71. HashMap<int, Vector3> collection;
  72. for (int i = 0; i < p_data->size(); i += 1) {
  73. const VertexData<Vector3> &vd = (*p_data)[i];
  74. collection[vd.polygon_index] = vd.data;
  75. }
  76. return collection;
  77. }
  78. HashMap<int, Vector2> collect_uv(const Vector<VertexData<Vector2>> *p_data, HashMap<int, Vector2> p_fall_back) {
  79. if (p_data->is_empty()) {
  80. return p_fall_back;
  81. }
  82. HashMap<int, Vector2> collection;
  83. for (int i = 0; i < p_data->size(); i += 1) {
  84. const VertexData<Vector2> &vd = (*p_data)[i];
  85. collection[vd.polygon_index] = vd.data;
  86. }
  87. return collection;
  88. }
  89. EditorSceneImporterMeshNode3D *FBXMeshData::create_fbx_mesh(const ImportState &state, const FBXDocParser::MeshGeometry *p_mesh_geometry, const FBXDocParser::Model *model, bool use_compression) {
  90. mesh_geometry = p_mesh_geometry;
  91. // todo: make this just use a uint64_t FBX ID this is a copy of our original materials unfortunately.
  92. const std::vector<const FBXDocParser::Material *> &material_lookup = model->GetMaterials();
  93. // TODO: perf hotspot on large files
  94. // this can be a very large copy
  95. std::vector<int> polygon_indices = mesh_geometry->get_polygon_indices();
  96. std::vector<Vector3> vertices = mesh_geometry->get_vertices();
  97. // Phase 1. Parse all FBX data.
  98. HashMap<int, Vector3> normals;
  99. HashMap<int, HashMap<int, Vector3>> normals_raw = extract_per_vertex_data(
  100. vertices.size(),
  101. mesh_geometry->get_edge_map(),
  102. polygon_indices,
  103. mesh_geometry->get_normals(),
  104. &collect_all,
  105. HashMap<int, Vector3>());
  106. HashMap<int, Vector2> uvs_0;
  107. HashMap<int, HashMap<int, Vector2>> uvs_0_raw = extract_per_vertex_data(
  108. vertices.size(),
  109. mesh_geometry->get_edge_map(),
  110. polygon_indices,
  111. mesh_geometry->get_uv_0(),
  112. &collect_all,
  113. HashMap<int, Vector2>());
  114. HashMap<int, Vector2> uvs_1;
  115. HashMap<int, HashMap<int, Vector2>> uvs_1_raw = extract_per_vertex_data(
  116. vertices.size(),
  117. mesh_geometry->get_edge_map(),
  118. polygon_indices,
  119. mesh_geometry->get_uv_1(),
  120. &collect_all,
  121. HashMap<int, Vector2>());
  122. HashMap<int, Color> colors;
  123. HashMap<int, HashMap<int, Color>> colors_raw = extract_per_vertex_data(
  124. vertices.size(),
  125. mesh_geometry->get_edge_map(),
  126. polygon_indices,
  127. mesh_geometry->get_colors(),
  128. &collect_all,
  129. HashMap<int, Color>());
  130. // TODO what about tangents?
  131. // TODO what about bi-nomials?
  132. // TODO there is other?
  133. HashMap<int, SurfaceId> polygon_surfaces = extract_per_polygon(
  134. vertices.size(),
  135. polygon_indices,
  136. mesh_geometry->get_material_allocation_id(),
  137. -1);
  138. HashMap<String, MorphVertexData> morphs;
  139. extract_morphs(mesh_geometry, morphs);
  140. // TODO please add skinning.
  141. //mesh_id = mesh_geometry->ID();
  142. sanitize_vertex_weights(state);
  143. // Re organize polygon vertices to to correctly take into account strange
  144. // UVs.
  145. reorganize_vertices(
  146. polygon_indices,
  147. vertices,
  148. normals,
  149. uvs_0,
  150. uvs_1,
  151. colors,
  152. morphs,
  153. normals_raw,
  154. colors_raw,
  155. uvs_0_raw,
  156. uvs_1_raw);
  157. const int color_count = colors.size();
  158. print_verbose("Vertex color count: " + itos(color_count));
  159. // Make sure that from this moment on the mesh_geometry is no used anymore.
  160. // This is a safety step, because the mesh_geometry data are no more valid
  161. // at this point.
  162. const int vertex_count = vertices.size();
  163. print_verbose("Vertex count: " + itos(vertex_count));
  164. // The map key is the material allocator id that is also used as surface id.
  165. HashMap<SurfaceId, SurfaceData> surfaces;
  166. // Phase 2. For each material create a surface tool (So a different mesh).
  167. {
  168. if (polygon_surfaces.is_empty()) {
  169. // No material, just use the default one with index -1.
  170. // Set -1 to all polygons.
  171. const int polygon_count = count_polygons(polygon_indices);
  172. for (int p = 0; p < polygon_count; p += 1) {
  173. polygon_surfaces[p] = -1;
  174. }
  175. }
  176. // Create the surface now.
  177. for (const int *polygon_id = polygon_surfaces.next(nullptr); polygon_id != nullptr; polygon_id = polygon_surfaces.next(polygon_id)) {
  178. const int surface_id = polygon_surfaces[*polygon_id];
  179. if (surfaces.has(surface_id) == false) {
  180. SurfaceData sd;
  181. sd.surface_tool.instance();
  182. sd.surface_tool->begin(Mesh::PRIMITIVE_TRIANGLES);
  183. if (surface_id < 0) {
  184. // nothing to do
  185. } else if (surface_id < (int)material_lookup.size()) {
  186. const FBXDocParser::Material *mat_mapping = material_lookup.at(surface_id);
  187. const uint64_t mapping_id = mat_mapping->ID();
  188. if (state.cached_materials.has(mapping_id)) {
  189. sd.material = state.cached_materials[mapping_id];
  190. }
  191. } else {
  192. WARN_PRINT("out of bounds surface detected, FBX file has corrupt material data");
  193. }
  194. surfaces.set(surface_id, sd);
  195. }
  196. }
  197. }
  198. // Phase 3. Map the vertices relative to each surface, in this way we can
  199. // just insert the vertices that we need per each surface.
  200. {
  201. PolygonId polygon_index = -1;
  202. SurfaceId surface_id = -1;
  203. SurfaceData *surface_data = nullptr;
  204. for (size_t polygon_vertex = 0; polygon_vertex < polygon_indices.size(); polygon_vertex += 1) {
  205. if (is_start_of_polygon(polygon_indices, polygon_vertex)) {
  206. polygon_index += 1;
  207. ERR_FAIL_COND_V_MSG(polygon_surfaces.has(polygon_index) == false, nullptr, "The FBX file is corrupted, This surface_index is not expected.");
  208. surface_id = polygon_surfaces[polygon_index];
  209. surface_data = surfaces.getptr(surface_id);
  210. CRASH_COND(surface_data == nullptr); // Can't be null.
  211. }
  212. const int vertex = get_vertex_from_polygon_vertex(polygon_indices, polygon_vertex);
  213. // The vertex position in the surface
  214. // Uses a lookup table for speed with large scenes
  215. bool has_polygon_vertex_index = surface_data->lookup_table.has(vertex);
  216. int surface_polygon_vertex_index = -1;
  217. if (has_polygon_vertex_index) {
  218. surface_polygon_vertex_index = surface_data->lookup_table[vertex];
  219. } else {
  220. surface_polygon_vertex_index = surface_data->vertices_map.size();
  221. surface_data->lookup_table[vertex] = surface_polygon_vertex_index;
  222. surface_data->vertices_map.push_back(vertex);
  223. }
  224. surface_data->surface_polygon_vertex[polygon_index].push_back(surface_polygon_vertex_index);
  225. }
  226. }
  227. //print_verbose("[debug UV 1] UV1: " + itos(uvs_0.size()));
  228. //print_verbose("[debug UV 2] UV2: " + itos(uvs_1.size()));
  229. // Phase 4. Per each surface just insert the vertices and add the indices.
  230. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  231. SurfaceData *surface = surfaces.getptr(*surface_id);
  232. // Just add the vertices data.
  233. for (unsigned int i = 0; i < surface->vertices_map.size(); i += 1) {
  234. const Vertex vertex = surface->vertices_map[i];
  235. // This must be done before add_vertex because the surface tool is
  236. // expecting this before the st->add_vertex() call
  237. add_vertex(state,
  238. surface->surface_tool,
  239. state.scale,
  240. vertex,
  241. vertices,
  242. normals,
  243. uvs_0,
  244. uvs_1,
  245. colors);
  246. }
  247. // Triangulate the various polygons and add the indices.
  248. for (const PolygonId *polygon_id = surface->surface_polygon_vertex.next(nullptr); polygon_id != nullptr; polygon_id = surface->surface_polygon_vertex.next(polygon_id)) {
  249. const Vector<DataIndex> *indices = surface->surface_polygon_vertex.getptr(*polygon_id);
  250. triangulate_polygon(
  251. surface,
  252. *indices,
  253. vertices);
  254. }
  255. }
  256. // Phase 5. Compose the morphs if any.
  257. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  258. SurfaceData *surface = surfaces.getptr(*surface_id);
  259. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  260. MorphVertexData *morph_data = morphs.getptr(*morph_name);
  261. // As said by the docs, this is not supposed to be different than
  262. // vertex_count.
  263. CRASH_COND(morph_data->vertices.size() != vertex_count);
  264. CRASH_COND(morph_data->normals.size() != vertex_count);
  265. Vector3 *vertices_ptr = morph_data->vertices.ptrw();
  266. Vector3 *normals_ptr = morph_data->normals.ptrw();
  267. Ref<SurfaceTool> morph_st;
  268. morph_st.instance();
  269. morph_st->begin(Mesh::PRIMITIVE_TRIANGLES);
  270. for (unsigned int vi = 0; vi < surface->vertices_map.size(); vi += 1) {
  271. const Vertex &vertex = surface->vertices_map[vi];
  272. add_vertex(
  273. state,
  274. morph_st,
  275. state.scale,
  276. vertex,
  277. vertices,
  278. normals,
  279. uvs_0,
  280. uvs_1,
  281. colors,
  282. vertices_ptr[vertex],
  283. normals_ptr[vertex]);
  284. }
  285. if (state.is_blender_fbx) {
  286. morph_st->generate_normals();
  287. }
  288. morph_st->generate_tangents();
  289. surface->morphs.push_back(morph_st->commit_to_arrays());
  290. }
  291. }
  292. // Phase 6. Compose the mesh and return it.
  293. Ref<EditorSceneImporterMesh> mesh;
  294. mesh.instance();
  295. // Add blend shape info.
  296. for (const String *morph_name = morphs.next(nullptr); morph_name != nullptr; morph_name = morphs.next(morph_name)) {
  297. mesh->add_blend_shape(*morph_name);
  298. }
  299. // TODO always normalized, Why?
  300. mesh->set_blend_shape_mode(Mesh::BLEND_SHAPE_MODE_NORMALIZED);
  301. // Add surfaces.
  302. int in_mesh_surface_id = 0;
  303. for (const SurfaceId *surface_id = surfaces.next(nullptr); surface_id != nullptr; surface_id = surfaces.next(surface_id)) {
  304. SurfaceData *surface = surfaces.getptr(*surface_id);
  305. if (state.is_blender_fbx) {
  306. surface->surface_tool->generate_normals();
  307. }
  308. // you can't generate them without a valid uv map.
  309. if (uvs_0_raw.size() > 0) {
  310. surface->surface_tool->generate_tangents();
  311. }
  312. Array mesh_array = surface->surface_tool->commit_to_arrays();
  313. Array blend_shapes = surface->morphs;
  314. if (surface->material.is_valid()) {
  315. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes, Dictionary(), surface->material, surface->material->get_name());
  316. } else {
  317. mesh->add_surface(Mesh::PRIMITIVE_TRIANGLES, mesh_array, blend_shapes);
  318. }
  319. in_mesh_surface_id += 1;
  320. }
  321. EditorSceneImporterMeshNode3D *godot_mesh = memnew(EditorSceneImporterMeshNode3D);
  322. godot_mesh->set_mesh(mesh);
  323. const String name = ImportUtils::FBXNodeToName(model->Name());
  324. godot_mesh->set_name(name); // hurry up compiling >.<
  325. mesh->set_name("mesh3d-" + name);
  326. return godot_mesh;
  327. }
  328. void FBXMeshData::sanitize_vertex_weights(const ImportState &state) {
  329. const int max_vertex_influence_count = RS::ARRAY_WEIGHTS_SIZE;
  330. Map<int, int> skeleton_to_skin_bind_id;
  331. // TODO: error's need added
  332. const FBXDocParser::Skin *fbx_skin = mesh_geometry->DeformerSkin();
  333. if (fbx_skin == nullptr || fbx_skin->Clusters().size() == 0) {
  334. return; // do nothing
  335. }
  336. //
  337. // Precalculate the skin cluster mapping
  338. //
  339. int bind_id = 0;
  340. for (const FBXDocParser::Cluster *cluster : fbx_skin->Clusters()) {
  341. ERR_CONTINUE_MSG(!state.fbx_bone_map.has(cluster->TargetNode()->ID()), "Missing bone map for cluster target node with id " + uitos(cluster->TargetNode()->ID()) + ".");
  342. Ref<FBXBone> bone = state.fbx_bone_map[cluster->TargetNode()->ID()];
  343. skeleton_to_skin_bind_id.insert(bone->godot_bone_id, bind_id);
  344. bind_id++;
  345. }
  346. for (const Vertex *v = vertex_weights.next(nullptr); v != nullptr; v = vertex_weights.next(v)) {
  347. VertexWeightMapping *vm = vertex_weights.getptr(*v);
  348. ERR_CONTINUE(vm->bones.size() != vm->weights.size()); // No message, already checked.
  349. ERR_CONTINUE(vm->bones_ref.size() != vm->weights.size()); // No message, already checked.
  350. const int initial_size = vm->weights.size();
  351. {
  352. // Init bone id
  353. int *bones_ptr = vm->bones.ptrw();
  354. Ref<FBXBone> *bones_ref_ptr = vm->bones_ref.ptrw();
  355. for (int i = 0; i < vm->weights.size(); i += 1) {
  356. // At this point this is not possible because the skeleton is already initialized.
  357. CRASH_COND(bones_ref_ptr[i]->godot_bone_id == -2);
  358. bones_ptr[i] = skeleton_to_skin_bind_id[bones_ref_ptr[i]->godot_bone_id];
  359. }
  360. // From this point on the data is no more valid.
  361. vm->bones_ref.clear();
  362. }
  363. {
  364. // Sort
  365. real_t *weights_ptr = vm->weights.ptrw();
  366. int *bones_ptr = vm->bones.ptrw();
  367. for (int i = 0; i < vm->weights.size(); i += 1) {
  368. for (int x = i + 1; x < vm->weights.size(); x += 1) {
  369. if (weights_ptr[i] < weights_ptr[x]) {
  370. SWAP(weights_ptr[i], weights_ptr[x]);
  371. SWAP(bones_ptr[i], bones_ptr[x]);
  372. }
  373. }
  374. }
  375. }
  376. {
  377. // Resize
  378. vm->weights.resize(max_vertex_influence_count);
  379. vm->bones.resize(max_vertex_influence_count);
  380. real_t *weights_ptr = vm->weights.ptrw();
  381. int *bones_ptr = vm->bones.ptrw();
  382. for (int i = initial_size; i < max_vertex_influence_count; i += 1) {
  383. weights_ptr[i] = 0.0;
  384. bones_ptr[i] = 0;
  385. }
  386. // Normalize
  387. real_t sum = 0.0;
  388. for (int i = 0; i < max_vertex_influence_count; i += 1) {
  389. sum += weights_ptr[i];
  390. }
  391. if (sum > 0.0) {
  392. for (int i = 0; i < vm->weights.size(); i += 1) {
  393. weights_ptr[i] = weights_ptr[i] / sum;
  394. }
  395. }
  396. }
  397. }
  398. }
  399. void FBXMeshData::reorganize_vertices(
  400. // TODO: perf hotspot on insane files
  401. std::vector<int> &r_polygon_indices,
  402. std::vector<Vector3> &r_vertices,
  403. HashMap<int, Vector3> &r_normals,
  404. HashMap<int, Vector2> &r_uv_1,
  405. HashMap<int, Vector2> &r_uv_2,
  406. HashMap<int, Color> &r_color,
  407. HashMap<String, MorphVertexData> &r_morphs,
  408. HashMap<int, HashMap<int, Vector3>> &r_normals_raw,
  409. HashMap<int, HashMap<int, Color>> &r_colors_raw,
  410. HashMap<int, HashMap<int, Vector2>> &r_uv_1_raw,
  411. HashMap<int, HashMap<int, Vector2>> &r_uv_2_raw) {
  412. // Key: OldVertex; Value: [New vertices];
  413. HashMap<int, Vector<int>> duplicated_vertices;
  414. PolygonId polygon_index = -1;
  415. for (int pv = 0; pv < (int)r_polygon_indices.size(); pv += 1) {
  416. if (is_start_of_polygon(r_polygon_indices, pv)) {
  417. polygon_index += 1;
  418. }
  419. const Vertex index = get_vertex_from_polygon_vertex(r_polygon_indices, pv);
  420. bool need_duplication = false;
  421. Vector2 this_vert_poly_uv1 = Vector2();
  422. Vector2 this_vert_poly_uv2 = Vector2();
  423. Vector3 this_vert_poly_normal = Vector3();
  424. Color this_vert_poly_color = Color();
  425. // Take the normal and see if we need to duplicate this polygon.
  426. if (r_normals_raw.has(index)) {
  427. const HashMap<PolygonId, Vector3> *nrml_arr = r_normals_raw.getptr(index);
  428. if (nrml_arr->has(polygon_index)) {
  429. this_vert_poly_normal = nrml_arr->get(polygon_index);
  430. } else if (nrml_arr->has(-1)) {
  431. this_vert_poly_normal = nrml_arr->get(-1);
  432. } else {
  433. print_error("invalid normal detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  434. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  435. print_verbose("debug contents key: " + itos(*pid));
  436. if (nrml_arr->has(*pid)) {
  437. print_verbose("contents valid: " + nrml_arr->get(*pid));
  438. }
  439. }
  440. }
  441. // Now, check if we need to duplicate it.
  442. for (const PolygonId *pid = nrml_arr->next(nullptr); pid != nullptr; pid = nrml_arr->next(pid)) {
  443. if (*pid == polygon_index) {
  444. continue;
  445. }
  446. const Vector3 vert_poly_normal = *nrml_arr->getptr(*pid);
  447. if ((this_vert_poly_normal - vert_poly_normal).length_squared() > CMP_EPSILON) {
  448. // Yes this polygon need duplication.
  449. need_duplication = true;
  450. break;
  451. }
  452. }
  453. }
  454. // TODO: make me vertex color
  455. // Take the normal and see if we need to duplicate this polygon.
  456. if (r_colors_raw.has(index)) {
  457. const HashMap<PolygonId, Color> *color_arr = r_colors_raw.getptr(index);
  458. if (color_arr->has(polygon_index)) {
  459. this_vert_poly_color = color_arr->get(polygon_index);
  460. } else if (color_arr->has(-1)) {
  461. this_vert_poly_color = color_arr->get(-1);
  462. } else {
  463. print_error("invalid color detected: " + itos(index) + " polygon index: " + itos(polygon_index));
  464. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  465. print_verbose("debug contents key: " + itos(*pid));
  466. if (color_arr->has(*pid)) {
  467. print_verbose("contents valid: " + color_arr->get(*pid));
  468. }
  469. }
  470. }
  471. // Now, check if we need to duplicate it.
  472. for (const PolygonId *pid = color_arr->next(nullptr); pid != nullptr; pid = color_arr->next(pid)) {
  473. if (*pid == polygon_index) {
  474. continue;
  475. }
  476. const Color vert_poly_color = *color_arr->getptr(*pid);
  477. if (!this_vert_poly_color.is_equal_approx(vert_poly_color)) {
  478. // Yes this polygon need duplication.
  479. need_duplication = true;
  480. break;
  481. }
  482. }
  483. }
  484. // Take the UV1 and UV2 and see if we need to duplicate this polygon.
  485. {
  486. HashMap<int, HashMap<int, Vector2>> *uv_raw = &r_uv_1_raw;
  487. Vector2 *this_vert_poly_uv = &this_vert_poly_uv1;
  488. for (int kk = 0; kk < 2; kk++) {
  489. if (uv_raw->has(index)) {
  490. const HashMap<PolygonId, Vector2> *uvs = uv_raw->getptr(index);
  491. if (uvs->has(polygon_index)) {
  492. // This Polygon has its own uv.
  493. (*this_vert_poly_uv) = *uvs->getptr(polygon_index);
  494. // Check if we need to duplicate it.
  495. for (const PolygonId *pid = uvs->next(nullptr); pid != nullptr; pid = uvs->next(pid)) {
  496. if (*pid == polygon_index) {
  497. continue;
  498. }
  499. const Vector2 vert_poly_uv = *uvs->getptr(*pid);
  500. if (((*this_vert_poly_uv) - vert_poly_uv).length_squared() > CMP_EPSILON) {
  501. // Yes this polygon need duplication.
  502. need_duplication = true;
  503. break;
  504. }
  505. }
  506. } else if (uvs->has(-1)) {
  507. // It has the default UV.
  508. (*this_vert_poly_uv) = *uvs->getptr(-1);
  509. } else if (uvs->size() > 0) {
  510. // No uv, this is strange, just take the first and duplicate.
  511. (*this_vert_poly_uv) = *uvs->getptr(*uvs->next(nullptr));
  512. WARN_PRINT("No UVs for this polygon, while there is no default and some other polygons have it. This FBX file may be corrupted.");
  513. }
  514. }
  515. uv_raw = &r_uv_2_raw;
  516. this_vert_poly_uv = &this_vert_poly_uv2;
  517. }
  518. }
  519. // If we want to duplicate it, Let's see if we already duplicated this
  520. // vertex.
  521. if (need_duplication) {
  522. if (duplicated_vertices.has(index)) {
  523. Vertex similar_vertex = -1;
  524. // Let's see if one of the new vertices has the same data of this.
  525. const Vector<int> *new_vertices = duplicated_vertices.getptr(index);
  526. for (int j = 0; j < new_vertices->size(); j += 1) {
  527. const Vertex new_vertex = (*new_vertices)[j];
  528. bool same_uv1 = false;
  529. bool same_uv2 = false;
  530. bool same_normal = false;
  531. bool same_color = false;
  532. if (r_uv_1.has(new_vertex)) {
  533. if ((this_vert_poly_uv1 - (*r_uv_1.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  534. same_uv1 = true;
  535. }
  536. }
  537. if (r_uv_2.has(new_vertex)) {
  538. if ((this_vert_poly_uv2 - (*r_uv_2.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  539. same_uv2 = true;
  540. }
  541. }
  542. if (r_color.has(new_vertex)) {
  543. if (this_vert_poly_color.is_equal_approx((*r_color.getptr(new_vertex)))) {
  544. same_color = true;
  545. }
  546. }
  547. if (r_normals.has(new_vertex)) {
  548. if ((this_vert_poly_normal - (*r_normals.getptr(new_vertex))).length_squared() <= CMP_EPSILON) {
  549. same_uv2 = true;
  550. }
  551. }
  552. if (same_uv1 && same_uv2 && same_normal && same_color) {
  553. similar_vertex = new_vertex;
  554. break;
  555. }
  556. }
  557. if (similar_vertex != -1) {
  558. // Update polygon.
  559. if (is_end_of_polygon(r_polygon_indices, pv)) {
  560. r_polygon_indices[pv] = ~similar_vertex;
  561. } else {
  562. r_polygon_indices[pv] = similar_vertex;
  563. }
  564. need_duplication = false;
  565. }
  566. }
  567. }
  568. if (need_duplication) {
  569. const Vertex old_index = index;
  570. const Vertex new_index = r_vertices.size();
  571. // Polygon index.
  572. if (is_end_of_polygon(r_polygon_indices, pv)) {
  573. r_polygon_indices[pv] = ~new_index;
  574. } else {
  575. r_polygon_indices[pv] = new_index;
  576. }
  577. // Vertex position.
  578. r_vertices.push_back(r_vertices[old_index]);
  579. // Normals
  580. if (r_normals_raw.has(old_index)) {
  581. r_normals.set(new_index, this_vert_poly_normal);
  582. r_normals_raw.getptr(old_index)->erase(polygon_index);
  583. r_normals_raw[new_index][polygon_index] = this_vert_poly_normal;
  584. }
  585. // Vertex Color
  586. if (r_colors_raw.has(old_index)) {
  587. r_color.set(new_index, this_vert_poly_color);
  588. r_colors_raw.getptr(old_index)->erase(polygon_index);
  589. r_colors_raw[new_index][polygon_index] = this_vert_poly_color;
  590. }
  591. // UV 0
  592. if (r_uv_1_raw.has(old_index)) {
  593. r_uv_1.set(new_index, this_vert_poly_uv1);
  594. r_uv_1_raw.getptr(old_index)->erase(polygon_index);
  595. r_uv_1_raw[new_index][polygon_index] = this_vert_poly_uv1;
  596. }
  597. // UV 1
  598. if (r_uv_2_raw.has(old_index)) {
  599. r_uv_2.set(new_index, this_vert_poly_uv2);
  600. r_uv_2_raw.getptr(old_index)->erase(polygon_index);
  601. r_uv_2_raw[new_index][polygon_index] = this_vert_poly_uv2;
  602. }
  603. // Morphs
  604. for (const String *mname = r_morphs.next(nullptr); mname != nullptr; mname = r_morphs.next(mname)) {
  605. MorphVertexData *d = r_morphs.getptr(*mname);
  606. // This can't never happen.
  607. CRASH_COND(d == nullptr);
  608. if (d->vertices.size() > old_index) {
  609. d->vertices.push_back(d->vertices[old_index]);
  610. }
  611. if (d->normals.size() > old_index) {
  612. d->normals.push_back(d->normals[old_index]);
  613. }
  614. }
  615. if (vertex_weights.has(old_index)) {
  616. vertex_weights.set(new_index, vertex_weights[old_index]);
  617. }
  618. duplicated_vertices[old_index].push_back(new_index);
  619. } else {
  620. if (r_normals_raw.has(index) &&
  621. r_normals.has(index) == false) {
  622. r_normals.set(index, this_vert_poly_normal);
  623. }
  624. if (r_colors_raw.has(index) && r_color.has(index) == false) {
  625. r_color.set(index, this_vert_poly_color);
  626. }
  627. if (r_uv_1_raw.has(index) &&
  628. r_uv_1.has(index) == false) {
  629. r_uv_1.set(index, this_vert_poly_uv1);
  630. }
  631. if (r_uv_2_raw.has(index) &&
  632. r_uv_2.has(index) == false) {
  633. r_uv_2.set(index, this_vert_poly_uv2);
  634. }
  635. }
  636. }
  637. }
  638. void FBXMeshData::add_vertex(
  639. const ImportState &state,
  640. Ref<SurfaceTool> p_surface_tool,
  641. real_t p_scale,
  642. Vertex p_vertex,
  643. const std::vector<Vector3> &p_vertices_position,
  644. const HashMap<int, Vector3> &p_normals,
  645. const HashMap<int, Vector2> &p_uvs_0,
  646. const HashMap<int, Vector2> &p_uvs_1,
  647. const HashMap<int, Color> &p_colors,
  648. const Vector3 &p_morph_value,
  649. const Vector3 &p_morph_normal) {
  650. ERR_FAIL_INDEX_MSG(p_vertex, (Vertex)p_vertices_position.size(), "FBX file is corrupted, the position of the vertex can't be retrieved.");
  651. if (p_normals.has(p_vertex) && !state.is_blender_fbx) {
  652. p_surface_tool->set_normal(p_normals[p_vertex] + p_morph_normal);
  653. }
  654. if (p_uvs_0.has(p_vertex)) {
  655. //print_verbose("uv1: [" + itos(p_vertex) + "] " + p_uvs_0[p_vertex]);
  656. // Inverts Y UV.
  657. p_surface_tool->set_uv(Vector2(p_uvs_0[p_vertex].x, 1 - p_uvs_0[p_vertex].y));
  658. }
  659. if (p_uvs_1.has(p_vertex)) {
  660. //print_verbose("uv2: [" + itos(p_vertex) + "] " + p_uvs_1[p_vertex]);
  661. // Inverts Y UV.
  662. p_surface_tool->set_uv2(Vector2(p_uvs_1[p_vertex].x, 1 - p_uvs_1[p_vertex].y));
  663. }
  664. if (p_colors.has(p_vertex)) {
  665. p_surface_tool->set_color(p_colors[p_vertex]);
  666. }
  667. // TODO what about binormals?
  668. // TODO there is other?
  669. if (vertex_weights.has(p_vertex)) {
  670. // Let's extract the weight info.
  671. const VertexWeightMapping *vm = vertex_weights.getptr(p_vertex);
  672. const Vector<int> &bones = vm->bones;
  673. // the bug is that the bone idx is wrong because it is not ref'ing the skin.
  674. if (bones.size() > RS::ARRAY_WEIGHTS_SIZE) {
  675. print_error("[weight overflow detected]");
  676. }
  677. p_surface_tool->set_weights(vm->weights);
  678. // 0 1 2 3 4 5 6 7 < local skeleton / skin for mesh
  679. // 0 1 2 3 4 5 6 7 8 9 10 < actual skeleton with all joints
  680. p_surface_tool->set_bones(bones);
  681. }
  682. // The surface tool want the vertex position as last thing.
  683. p_surface_tool->add_vertex((p_vertices_position[p_vertex] + p_morph_value) * p_scale);
  684. }
  685. void FBXMeshData::triangulate_polygon(SurfaceData *surface, const Vector<int> &p_polygon_vertex, const std::vector<Vector3> &p_vertices) const {
  686. Ref<SurfaceTool> st(surface->surface_tool);
  687. const int polygon_vertex_count = p_polygon_vertex.size();
  688. //const Vector<Vertex>& p_surface_vertex_map
  689. if (polygon_vertex_count == 1) {
  690. // point to triangle
  691. st->add_index(p_polygon_vertex[0]);
  692. st->add_index(p_polygon_vertex[0]);
  693. st->add_index(p_polygon_vertex[0]);
  694. return;
  695. } else if (polygon_vertex_count == 2) {
  696. // line to triangle
  697. st->add_index(p_polygon_vertex[1]);
  698. st->add_index(p_polygon_vertex[1]);
  699. st->add_index(p_polygon_vertex[0]);
  700. return;
  701. } else if (polygon_vertex_count == 3) {
  702. // triangle to triangle
  703. st->add_index(p_polygon_vertex[0]);
  704. st->add_index(p_polygon_vertex[2]);
  705. st->add_index(p_polygon_vertex[1]);
  706. return;
  707. } else if (polygon_vertex_count == 4) {
  708. // quad to triangle - this code is awesome for import times
  709. // it prevents triangles being generated slowly
  710. st->add_index(p_polygon_vertex[0]);
  711. st->add_index(p_polygon_vertex[2]);
  712. st->add_index(p_polygon_vertex[1]);
  713. st->add_index(p_polygon_vertex[2]);
  714. st->add_index(p_polygon_vertex[0]);
  715. st->add_index(p_polygon_vertex[3]);
  716. return;
  717. } else {
  718. // non triangulated - we must run the triangulation algorithm
  719. bool is_simple_convex = false;
  720. // this code is 'slow' but required it triangulates all the unsupported geometry.
  721. // Doesn't allow for bigger polygons because those are unlikely be convex
  722. if (polygon_vertex_count <= 6) {
  723. // Start from true, check if it's false.
  724. is_simple_convex = true;
  725. Vector3 first_vec;
  726. for (int i = 0; i < polygon_vertex_count; i += 1) {
  727. const Vector3 p1 = p_vertices[surface->vertices_map[p_polygon_vertex[i]]];
  728. const Vector3 p2 = p_vertices[surface->vertices_map[p_polygon_vertex[(i + 1) % polygon_vertex_count]]];
  729. const Vector3 p3 = p_vertices[surface->vertices_map[p_polygon_vertex[(i + 2) % polygon_vertex_count]]];
  730. const Vector3 edge1 = p1 - p2;
  731. const Vector3 edge2 = p3 - p2;
  732. const Vector3 res = edge1.normalized().cross(edge2.normalized()).normalized();
  733. if (i == 0) {
  734. first_vec = res;
  735. } else {
  736. if (first_vec.dot(res) < 0.0) {
  737. // Ok we found an angle that is not the same dir of the
  738. // others.
  739. is_simple_convex = false;
  740. break;
  741. }
  742. }
  743. }
  744. }
  745. if (is_simple_convex) {
  746. // This is a convex polygon, so just triangulate it.
  747. for (int i = 0; i < (polygon_vertex_count - 2); i += 1) {
  748. st->add_index(p_polygon_vertex[2 + i]);
  749. st->add_index(p_polygon_vertex[1 + i]);
  750. st->add_index(p_polygon_vertex[0]);
  751. }
  752. return;
  753. }
  754. }
  755. {
  756. // This is a concave polygon.
  757. std::vector<Vector3> poly_vertices(polygon_vertex_count);
  758. for (int i = 0; i < polygon_vertex_count; i += 1) {
  759. poly_vertices[i] = p_vertices[surface->vertices_map[p_polygon_vertex[i]]];
  760. }
  761. const Vector3 poly_norm = get_poly_normal(poly_vertices);
  762. if (poly_norm.length_squared() <= CMP_EPSILON) {
  763. ERR_FAIL_COND_MSG(poly_norm.length_squared() <= CMP_EPSILON, "The normal of this poly was not computed. Is this FBX file corrupted.");
  764. }
  765. // Select the plan coordinate.
  766. int axis_1_coord = 0;
  767. int axis_2_coord = 1;
  768. {
  769. real_t inv = poly_norm.z;
  770. const real_t axis_x = ABS(poly_norm.x);
  771. const real_t axis_y = ABS(poly_norm.y);
  772. const real_t axis_z = ABS(poly_norm.z);
  773. if (axis_x > axis_y) {
  774. if (axis_x > axis_z) {
  775. // For the most part the normal point toward X.
  776. axis_1_coord = 1;
  777. axis_2_coord = 2;
  778. inv = poly_norm.x;
  779. }
  780. } else if (axis_y > axis_z) {
  781. // For the most part the normal point toward Y.
  782. axis_1_coord = 2;
  783. axis_2_coord = 0;
  784. inv = poly_norm.y;
  785. }
  786. // Swap projection axes to take the negated projection vector into account
  787. if (inv < 0.0f) {
  788. SWAP(axis_1_coord, axis_2_coord);
  789. }
  790. }
  791. TPPLPoly tppl_poly;
  792. tppl_poly.Init(polygon_vertex_count);
  793. std::vector<Vector2> projected_vertices(polygon_vertex_count);
  794. for (int i = 0; i < polygon_vertex_count; i += 1) {
  795. const Vector2 pv(poly_vertices[i][axis_1_coord], poly_vertices[i][axis_2_coord]);
  796. projected_vertices[i] = pv;
  797. tppl_poly.GetPoint(i) = pv;
  798. }
  799. tppl_poly.SetOrientation(TPPL_ORIENTATION_CCW);
  800. List<TPPLPoly> out_poly;
  801. TPPLPartition tppl_partition;
  802. if (tppl_partition.Triangulate_OPT(&tppl_poly, &out_poly) == 0) { // Good result.
  803. if (tppl_partition.Triangulate_EC(&tppl_poly, &out_poly) == 0) { // Medium result.
  804. if (tppl_partition.Triangulate_MONO(&tppl_poly, &out_poly) == 0) { // Really poor result.
  805. ERR_FAIL_MSG("The triangulation of this polygon failed, please try to triangulate your mesh or check if it has broken polygons.");
  806. }
  807. }
  808. }
  809. std::vector<Vector2> tris(out_poly.size());
  810. for (List<TPPLPoly>::Element *I = out_poly.front(); I; I = I->next()) {
  811. TPPLPoly &tp = I->get();
  812. ERR_FAIL_COND_MSG(tp.GetNumPoints() != 3, "The triangulator retuned more points, how this is possible?");
  813. // Find Index
  814. for (int i = 2; i >= 0; i -= 1) {
  815. const Vector2 vertex = tp.GetPoint(i);
  816. bool done = false;
  817. // Find Index
  818. for (int y = 0; y < polygon_vertex_count; y += 1) {
  819. if ((projected_vertices[y] - vertex).length_squared() <= CMP_EPSILON) {
  820. // This seems the right vertex
  821. st->add_index(p_polygon_vertex[y]);
  822. done = true;
  823. break;
  824. }
  825. }
  826. ERR_FAIL_COND(done == false);
  827. }
  828. }
  829. }
  830. }
  831. void FBXMeshData::gen_weight_info(Ref<SurfaceTool> st, Vertex vertex_id) const {
  832. if (vertex_weights.is_empty()) {
  833. return;
  834. }
  835. if (vertex_weights.has(vertex_id)) {
  836. // Let's extract the weight info.
  837. const VertexWeightMapping *vm = vertex_weights.getptr(vertex_id);
  838. st->set_weights(vm->weights);
  839. st->set_bones(vm->bones);
  840. }
  841. }
  842. int FBXMeshData::get_vertex_from_polygon_vertex(const std::vector<int> &p_polygon_indices, int p_index) const {
  843. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  844. return -1;
  845. }
  846. const int vertex = p_polygon_indices[p_index];
  847. if (vertex >= 0) {
  848. return vertex;
  849. } else {
  850. // Negative numbers are the end of the face, reversing the bits is
  851. // possible to obtain the positive correct vertex number.
  852. return ~vertex;
  853. }
  854. }
  855. bool FBXMeshData::is_end_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  856. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  857. return false;
  858. }
  859. const int vertex = p_polygon_indices[p_index];
  860. // If the index is negative this is the end of the Polygon.
  861. return vertex < 0;
  862. }
  863. bool FBXMeshData::is_start_of_polygon(const std::vector<int> &p_polygon_indices, int p_index) const {
  864. if (p_index < 0 || p_index >= (int)p_polygon_indices.size()) {
  865. return false;
  866. }
  867. if (p_index == 0) {
  868. return true;
  869. }
  870. // If the previous indices is negative this is the begin of a new Polygon.
  871. return p_polygon_indices[p_index - 1] < 0;
  872. }
  873. int FBXMeshData::count_polygons(const std::vector<int> &p_polygon_indices) const {
  874. // The negative numbers define the end of the polygon. Counting the amount of
  875. // negatives the numbers of polygons are obtained.
  876. int count = 0;
  877. for (size_t i = 0; i < p_polygon_indices.size(); i += 1) {
  878. if (p_polygon_indices[i] < 0) {
  879. count += 1;
  880. }
  881. }
  882. return count;
  883. }
  884. template <class R, class T>
  885. HashMap<int, R> FBXMeshData::extract_per_vertex_data(
  886. int p_vertex_count,
  887. const std::vector<FBXDocParser::MeshGeometry::Edge> &p_edge_map,
  888. const std::vector<int> &p_mesh_indices,
  889. const FBXDocParser::MeshGeometry::MappingData<T> &p_mapping_data,
  890. R (*collector_function)(const Vector<VertexData<T>> *p_vertex_data, R p_fall_back),
  891. R p_fall_back) const {
  892. /* When index_to_direct is set
  893. * index size is 184 ( contains index for the data array [values 0, 96] )
  894. * data size is 96 (contains uv coordinates)
  895. * this means index is simple data reduction basically
  896. */
  897. ////
  898. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0) {
  899. print_verbose("debug count: index size: " + itos(p_mapping_data.index.size()) + ", data size: " + itos(p_mapping_data.data.size()));
  900. print_verbose("vertex indices count: " + itos(p_mesh_indices.size()));
  901. print_verbose("Edge map size: " + itos(p_edge_map.size()));
  902. }
  903. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "FBX importer needs to map correctly to this field, please specify the override index name to fix this problem!");
  904. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_mapping_data.index.size() == 0, (HashMap<int, R>()), "The FBX seems corrupted");
  905. // Aggregate vertex data.
  906. HashMap<Vertex, Vector<VertexData<T>>> aggregate_vertex_data;
  907. switch (p_mapping_data.map_type) {
  908. case FBXDocParser::MeshGeometry::MapType::none: {
  909. // No data nothing to do.
  910. return (HashMap<int, R>());
  911. }
  912. case FBXDocParser::MeshGeometry::MapType::vertex: {
  913. ERR_FAIL_COND_V_MSG(p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct, (HashMap<int, R>()), "We will support in future");
  914. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  915. // The data is mapped per vertex directly.
  916. ERR_FAIL_COND_V_MSG((int)p_mapping_data.data.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR01");
  917. for (size_t vertex_index = 0; vertex_index < p_mapping_data.data.size(); vertex_index += 1) {
  918. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[vertex_index] });
  919. }
  920. } else {
  921. // The data is mapped per vertex using a reference.
  922. // The indices array, contains a *reference_id for each vertex.
  923. // * Note that the reference_id is the id of data into the data array.
  924. //
  925. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  926. ERR_FAIL_COND_V_MSG((int)p_mapping_data.index.size() != p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR02");
  927. for (size_t vertex_index = 0; vertex_index < p_mapping_data.index.size(); vertex_index += 1) {
  928. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[vertex_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR03.");
  929. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[p_mapping_data.index[vertex_index]] });
  930. }
  931. }
  932. } break;
  933. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  934. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  935. // The data is mapped using each index from the indexes array then direct to the data (data reduction algorithm)
  936. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  937. int polygon_id = -1;
  938. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  939. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  940. polygon_id += 1;
  941. }
  942. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  943. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  944. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  945. const int index_to_direct = p_mapping_data.index[polygon_vertex_index];
  946. T value = p_mapping_data.data[index_to_direct];
  947. aggregate_vertex_data[vertex_index].push_back({ polygon_id, value });
  948. }
  949. } else if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  950. // The data are mapped per polygon vertex directly.
  951. ERR_FAIL_COND_V_MSG((int)p_mesh_indices.size() != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR04");
  952. int polygon_id = -1;
  953. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.data.size(); polygon_vertex_index += 1) {
  954. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  955. polygon_id += 1;
  956. }
  957. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  958. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR05");
  959. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR06");
  960. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[polygon_vertex_index] });
  961. }
  962. } else {
  963. // The data is mapped per polygon_vertex using a reference.
  964. // The indices array, contains a *reference_id for each polygon_vertex.
  965. // * Note that the reference_id is the id of data into the data array.
  966. //
  967. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  968. ERR_FAIL_COND_V_MSG(p_mesh_indices.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR7");
  969. int polygon_id = -1;
  970. for (size_t polygon_vertex_index = 0; polygon_vertex_index < p_mapping_data.index.size(); polygon_vertex_index += 1) {
  971. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  972. polygon_id += 1;
  973. }
  974. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  975. ERR_FAIL_COND_V_MSG(vertex_index < 0, (HashMap<int, R>()), "FBX file corrupted: #ERR8");
  976. ERR_FAIL_COND_V_MSG(vertex_index >= p_vertex_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR9.");
  977. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] < 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR10.");
  978. ERR_FAIL_COND_V_MSG(p_mapping_data.index[polygon_vertex_index] >= (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR11.");
  979. aggregate_vertex_data[vertex_index].push_back({ polygon_id, p_mapping_data.data[p_mapping_data.index[polygon_vertex_index]] });
  980. }
  981. }
  982. } break;
  983. case FBXDocParser::MeshGeometry::MapType::polygon: {
  984. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  985. // The data are mapped per polygon directly.
  986. const int polygon_count = count_polygons(p_mesh_indices);
  987. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR12");
  988. // Advance each polygon vertex, each new polygon advance the polygon index.
  989. int polygon_index = -1;
  990. for (size_t polygon_vertex_index = 0;
  991. polygon_vertex_index < p_mesh_indices.size();
  992. polygon_vertex_index += 1) {
  993. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  994. polygon_index += 1;
  995. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR13");
  996. }
  997. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  998. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR14");
  999. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[polygon_index] });
  1000. }
  1001. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR16. Not all Polygons are present in the file.");
  1002. } else {
  1003. // The data is mapped per polygon using a reference.
  1004. // The indices array, contains a *reference_id for each polygon.
  1005. // * Note that the reference_id is the id of data into the data array.
  1006. //
  1007. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1008. const int polygon_count = count_polygons(p_mesh_indices);
  1009. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR17");
  1010. // Advance each polygon vertex, each new polygon advance the polygon index.
  1011. int polygon_index = -1;
  1012. for (size_t polygon_vertex_index = 0;
  1013. polygon_vertex_index < p_mesh_indices.size();
  1014. polygon_vertex_index += 1) {
  1015. if (is_start_of_polygon(p_mesh_indices, polygon_vertex_index)) {
  1016. polygon_index += 1;
  1017. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR18");
  1018. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[polygon_index], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR19");
  1019. }
  1020. const int vertex_index = get_vertex_from_polygon_vertex(p_mesh_indices, polygon_vertex_index);
  1021. ERR_FAIL_INDEX_V_MSG(vertex_index, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR20");
  1022. aggregate_vertex_data[vertex_index].push_back({ polygon_index, p_mapping_data.data[p_mapping_data.index[polygon_index]] });
  1023. }
  1024. ERR_FAIL_COND_V_MSG((polygon_index + 1) != polygon_count, (HashMap<int, R>()), "FBX file seems corrupted: #ERR22. Not all Polygons are present in the file.");
  1025. }
  1026. } break;
  1027. case FBXDocParser::MeshGeometry::MapType::edge: {
  1028. if (p_mapping_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1029. // The data are mapped per edge directly.
  1030. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR23");
  1031. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1032. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1033. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR24");
  1034. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR25");
  1035. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR26");
  1036. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR27");
  1037. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[edge_index] });
  1038. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[edge_index] });
  1039. }
  1040. } else {
  1041. // The data is mapped per edge using a reference.
  1042. // The indices array, contains a *reference_id for each polygon.
  1043. // * Note that the reference_id is the id of data into the data array.
  1044. //
  1045. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1046. ERR_FAIL_COND_V_MSG(p_edge_map.size() != p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file seems corrupted: #ERR28");
  1047. for (size_t edge_index = 0; edge_index < p_mapping_data.data.size(); edge_index += 1) {
  1048. const FBXDocParser::MeshGeometry::Edge edge = FBXDocParser::MeshGeometry::get_edge(p_edge_map, edge_index);
  1049. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR29");
  1050. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, p_vertex_count, (HashMap<int, R>()), "FBX file corrupted: #ERR30");
  1051. ERR_FAIL_INDEX_V_MSG(edge.vertex_0, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR31");
  1052. ERR_FAIL_INDEX_V_MSG(edge.vertex_1, (int)p_mapping_data.index.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR32");
  1053. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_0], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR33");
  1054. ERR_FAIL_INDEX_V_MSG(p_mapping_data.index[edge.vertex_1], (int)p_mapping_data.data.size(), (HashMap<int, R>()), "FBX file corrupted: #ERR34");
  1055. aggregate_vertex_data[edge.vertex_0].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1056. aggregate_vertex_data[edge.vertex_1].push_back({ -1, p_mapping_data.data[p_mapping_data.index[edge_index]] });
  1057. }
  1058. }
  1059. } break;
  1060. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1061. // No matter the mode, no matter the data size; The first always win
  1062. // and is set to all the vertices.
  1063. ERR_FAIL_COND_V_MSG(p_mapping_data.data.size() <= 0, (HashMap<int, R>()), "FBX file seems corrupted: #ERR35");
  1064. if (p_mapping_data.data.size() > 0) {
  1065. for (int vertex_index = 0; vertex_index < p_vertex_count; vertex_index += 1) {
  1066. aggregate_vertex_data[vertex_index].push_back({ -1, p_mapping_data.data[0] });
  1067. }
  1068. }
  1069. } break;
  1070. }
  1071. if (aggregate_vertex_data.size() == 0) {
  1072. return (HashMap<int, R>());
  1073. }
  1074. // A map is used because turns out that the some FBX file are not well organized
  1075. // with vertices well compacted. Using a map allows avoid those issues.
  1076. HashMap<Vertex, R> result;
  1077. // Aggregate the collected data.
  1078. for (const Vertex *index = aggregate_vertex_data.next(nullptr); index != nullptr; index = aggregate_vertex_data.next(index)) {
  1079. Vector<VertexData<T>> *aggregated_vertex = aggregate_vertex_data.getptr(*index);
  1080. // This can't be null because we are just iterating.
  1081. CRASH_COND(aggregated_vertex == nullptr);
  1082. ERR_FAIL_INDEX_V_MSG(0, aggregated_vertex->size(), (HashMap<int, R>()), "The FBX file is corrupted, No valid data for this vertex index.");
  1083. result[*index] = collector_function(aggregated_vertex, p_fall_back);
  1084. }
  1085. // Sanitize the data now, if the file is broken we can try import it anyway.
  1086. bool problem_found = false;
  1087. for (size_t i = 0; i < p_mesh_indices.size(); i += 1) {
  1088. const Vertex vertex = get_vertex_from_polygon_vertex(p_mesh_indices, i);
  1089. if (result.has(vertex) == false) {
  1090. result[vertex] = p_fall_back;
  1091. problem_found = true;
  1092. }
  1093. }
  1094. if (problem_found) {
  1095. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN0.");
  1096. }
  1097. return result;
  1098. }
  1099. template <class T>
  1100. HashMap<int, T> FBXMeshData::extract_per_polygon(
  1101. int p_vertex_count,
  1102. const std::vector<int> &p_polygon_indices,
  1103. const FBXDocParser::MeshGeometry::MappingData<T> &p_fbx_data,
  1104. T p_fallback_value) const {
  1105. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct && p_fbx_data.data.size() == 0, (HashMap<int, T>()), "invalid index to direct array");
  1106. ERR_FAIL_COND_V_MSG(p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index && p_fbx_data.index.size() == 0, (HashMap<int, T>()), "The FBX seems corrupted");
  1107. const int polygon_count = count_polygons(p_polygon_indices);
  1108. // Aggregate vertex data.
  1109. HashMap<int, Vector<T>> aggregate_polygon_data;
  1110. switch (p_fbx_data.map_type) {
  1111. case FBXDocParser::MeshGeometry::MapType::none: {
  1112. // No data nothing to do.
  1113. return (HashMap<int, T>());
  1114. }
  1115. case FBXDocParser::MeshGeometry::MapType::vertex: {
  1116. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per vertex. This should not happen.");
  1117. } break;
  1118. case FBXDocParser::MeshGeometry::MapType::polygon_vertex: {
  1119. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per polygon vertex. This should not happen.");
  1120. } break;
  1121. case FBXDocParser::MeshGeometry::MapType::polygon: {
  1122. if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::index_to_direct) {
  1123. // The data is stored efficiently index_to_direct allows less data in the FBX file.
  1124. for (int polygon_index = 0;
  1125. polygon_index < polygon_count;
  1126. polygon_index += 1) {
  1127. if (p_fbx_data.index.size() == 0) {
  1128. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1129. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1130. } else {
  1131. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR62");
  1132. const int index_to_direct = p_fbx_data.index[polygon_index];
  1133. T value = p_fbx_data.data[index_to_direct];
  1134. aggregate_polygon_data[polygon_index].push_back(value);
  1135. }
  1136. }
  1137. } else if (p_fbx_data.ref_type == FBXDocParser::MeshGeometry::ReferenceType::direct) {
  1138. // The data are mapped per polygon directly.
  1139. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR51");
  1140. // Advance each polygon vertex, each new polygon advance the polygon index.
  1141. for (int polygon_index = 0;
  1142. polygon_index < polygon_count;
  1143. polygon_index += 1) {
  1144. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR52");
  1145. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[polygon_index]);
  1146. }
  1147. } else {
  1148. // The data is mapped per polygon using a reference.
  1149. // The indices array, contains a *reference_id for each polygon.
  1150. // * Note that the reference_id is the id of data into the data array.
  1151. //
  1152. // https://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_layer_element_html
  1153. ERR_FAIL_COND_V_MSG(polygon_count != (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file seems corrupted: #ERR52");
  1154. // Advance each polygon vertex, each new polygon advance the polygon index.
  1155. for (int polygon_index = 0;
  1156. polygon_index < polygon_count;
  1157. polygon_index += 1) {
  1158. ERR_FAIL_INDEX_V_MSG(polygon_index, (int)p_fbx_data.index.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR53");
  1159. ERR_FAIL_INDEX_V_MSG(p_fbx_data.index[polygon_index], (int)p_fbx_data.data.size(), (HashMap<int, T>()), "FBX file is corrupted: #ERR54");
  1160. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[p_fbx_data.index[polygon_index]]);
  1161. }
  1162. }
  1163. } break;
  1164. case FBXDocParser::MeshGeometry::MapType::edge: {
  1165. ERR_FAIL_V_MSG((HashMap<int, T>()), "This data can't be extracted and organized per polygon, since into the FBX is mapped per edge. This should not happen.");
  1166. } break;
  1167. case FBXDocParser::MeshGeometry::MapType::all_the_same: {
  1168. // No matter the mode, no matter the data size; The first always win
  1169. // and is set to all the vertices.
  1170. ERR_FAIL_COND_V_MSG(p_fbx_data.data.size() <= 0, (HashMap<int, T>()), "FBX file seems corrupted: #ERR55");
  1171. if (p_fbx_data.data.size() > 0) {
  1172. for (int polygon_index = 0; polygon_index < polygon_count; polygon_index += 1) {
  1173. aggregate_polygon_data[polygon_index].push_back(p_fbx_data.data[0]);
  1174. }
  1175. }
  1176. } break;
  1177. }
  1178. if (aggregate_polygon_data.size() == 0) {
  1179. return (HashMap<int, T>());
  1180. }
  1181. // A map is used because turns out that the some FBX file are not well organized
  1182. // with vertices well compacted. Using a map allows avoid those issues.
  1183. HashMap<int, T> polygons;
  1184. // Take the first value for each vertex.
  1185. for (const Vertex *index = aggregate_polygon_data.next(nullptr); index != nullptr; index = aggregate_polygon_data.next(index)) {
  1186. Vector<T> *aggregated_polygon = aggregate_polygon_data.getptr(*index);
  1187. // This can't be null because we are just iterating.
  1188. CRASH_COND(aggregated_polygon == nullptr);
  1189. ERR_FAIL_INDEX_V_MSG(0, (int)aggregated_polygon->size(), (HashMap<int, T>()), "The FBX file is corrupted, No valid data for this polygon index.");
  1190. // Validate the final value.
  1191. polygons[*index] = (*aggregated_polygon)[0];
  1192. }
  1193. // Sanitize the data now, if the file is broken we can try import it anyway.
  1194. bool problem_found = false;
  1195. for (int polygon_i = 0; polygon_i < polygon_count; polygon_i += 1) {
  1196. if (polygons.has(polygon_i) == false) {
  1197. polygons[polygon_i] = p_fallback_value;
  1198. problem_found = true;
  1199. }
  1200. }
  1201. if (problem_found) {
  1202. WARN_PRINT("Some data is missing, this FBX file may be corrupted: #WARN1.");
  1203. }
  1204. return polygons;
  1205. }
  1206. void FBXMeshData::extract_morphs(const FBXDocParser::MeshGeometry *mesh_geometry, HashMap<String, MorphVertexData> &r_data) {
  1207. r_data.clear();
  1208. const int vertex_count = mesh_geometry->get_vertices().size();
  1209. for (const FBXDocParser::BlendShape *blend_shape : mesh_geometry->get_blend_shapes()) {
  1210. for (const FBXDocParser::BlendShapeChannel *blend_shape_channel : blend_shape->BlendShapeChannels()) {
  1211. const std::vector<const FBXDocParser::ShapeGeometry *> &shape_geometries = blend_shape_channel->GetShapeGeometries();
  1212. for (const FBXDocParser::ShapeGeometry *shape_geometry : shape_geometries) {
  1213. String morph_name = ImportUtils::FBXAnimMeshName(shape_geometry->Name()).c_str();
  1214. if (morph_name.is_empty()) {
  1215. morph_name = "morph";
  1216. }
  1217. // TODO we have only these??
  1218. const std::vector<unsigned int> &morphs_vertex_indices = shape_geometry->GetIndices();
  1219. const std::vector<Vector3> &morphs_vertices = shape_geometry->GetVertices();
  1220. const std::vector<Vector3> &morphs_normals = shape_geometry->GetNormals();
  1221. ERR_FAIL_COND_MSG((int)morphs_vertex_indices.size() > vertex_count, "The FBX file is corrupted: #ERR103");
  1222. ERR_FAIL_COND_MSG(morphs_vertex_indices.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR104");
  1223. ERR_FAIL_COND_MSG((int)morphs_vertices.size() > vertex_count, "The FBX file is corrupted: #ERR105");
  1224. ERR_FAIL_COND_MSG(morphs_normals.size() != 0 && morphs_normals.size() != morphs_vertices.size(), "The FBX file is corrupted: #ERR106");
  1225. if (r_data.has(morph_name) == false) {
  1226. // This morph doesn't exist yet.
  1227. // Create it.
  1228. MorphVertexData md;
  1229. md.vertices.resize(vertex_count);
  1230. md.normals.resize(vertex_count);
  1231. r_data.set(morph_name, md);
  1232. }
  1233. MorphVertexData *data = r_data.getptr(morph_name);
  1234. Vector3 *data_vertices_ptr = data->vertices.ptrw();
  1235. Vector3 *data_normals_ptr = data->normals.ptrw();
  1236. for (int i = 0; i < (int)morphs_vertex_indices.size(); i += 1) {
  1237. const Vertex vertex = morphs_vertex_indices[i];
  1238. ERR_FAIL_INDEX_MSG(vertex, vertex_count, "The blend shapes of this FBX file are corrupted. It has a not valid vertex.");
  1239. data_vertices_ptr[vertex] = morphs_vertices[i];
  1240. if (morphs_normals.size() != 0) {
  1241. data_normals_ptr[vertex] = morphs_normals[i];
  1242. }
  1243. }
  1244. }
  1245. }
  1246. }
  1247. }