vulkan_context.cpp 61 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668
  1. /*************************************************************************/
  2. /* vulkan_context.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "vulkan_context.h"
  31. #include "core/config/engine.h"
  32. #include "core/config/project_settings.h"
  33. #include "core/string/ustring.h"
  34. #include "core/version.h"
  35. #include "vk_enum_string_helper.h"
  36. #include <stdio.h>
  37. #include <stdlib.h>
  38. #include <string.h>
  39. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  40. #define APP_SHORT_NAME "GodotEngine"
  41. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_messenger_callback(
  42. VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  43. VkDebugUtilsMessageTypeFlagsEXT messageType,
  44. const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
  45. void *pUserData) {
  46. // This error needs to be ignored because the AMD allocator will mix up memory types on IGP processors.
  47. if (strstr(pCallbackData->pMessage, "Mapping an image with layout") != nullptr &&
  48. strstr(pCallbackData->pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr) {
  49. return VK_FALSE;
  50. }
  51. // This needs to be ignored because Validator is wrong here.
  52. if (strstr(pCallbackData->pMessage, "Invalid SPIR-V binary version 1.3") != nullptr) {
  53. return VK_FALSE;
  54. }
  55. // This needs to be ignored because Validator is wrong here.
  56. if (strstr(pCallbackData->pMessage, "Shader requires flag") != nullptr) {
  57. return VK_FALSE;
  58. }
  59. // This needs to be ignored because Validator is wrong here.
  60. if (strstr(pCallbackData->pMessage, "SPIR-V module not valid: Pointer operand") != nullptr &&
  61. strstr(pCallbackData->pMessage, "must be a memory object") != nullptr) {
  62. return VK_FALSE;
  63. }
  64. /*
  65. // This is a valid warning because its illegal in Vulkan, but in practice it should work according to VK_KHR_maintenance2
  66. if (strstr(pCallbackData->pMessage, "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  67. return VK_FALSE;
  68. }
  69. if (strstr(pCallbackData->pMessage, "VK_FORMAT_R4G4B4A4_UNORM_PACK16 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  70. return VK_FALSE;
  71. }
  72. */
  73. // Workaround for Vulkan-Loader usability bug: https://github.com/KhronosGroup/Vulkan-Loader/issues/262.
  74. if (strstr(pCallbackData->pMessage, "wrong ELF class: ELFCLASS32") != nullptr) {
  75. return VK_FALSE;
  76. }
  77. if (pCallbackData->pMessageIdName && strstr(pCallbackData->pMessageIdName, "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw") != nullptr) {
  78. return VK_FALSE;
  79. }
  80. String type_string;
  81. switch (messageType) {
  82. case (VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT):
  83. type_string = "GENERAL";
  84. break;
  85. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT):
  86. type_string = "VALIDATION";
  87. break;
  88. case (VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  89. type_string = "PERFORMANCE";
  90. break;
  91. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  92. type_string = "VALIDATION|PERFORMANCE";
  93. break;
  94. }
  95. String objects_string;
  96. if (pCallbackData->objectCount > 0) {
  97. objects_string = "\n\tObjects - " + String::num_int64(pCallbackData->objectCount);
  98. for (uint32_t object = 0; object < pCallbackData->objectCount; ++object) {
  99. objects_string +=
  100. "\n\t\tObject[" + String::num_int64(object) + "]" +
  101. " - " + string_VkObjectType(pCallbackData->pObjects[object].objectType) +
  102. ", Handle " + String::num_int64(pCallbackData->pObjects[object].objectHandle);
  103. if (nullptr != pCallbackData->pObjects[object].pObjectName && strlen(pCallbackData->pObjects[object].pObjectName) > 0) {
  104. objects_string += ", Name \"" + String(pCallbackData->pObjects[object].pObjectName) + "\"";
  105. }
  106. }
  107. }
  108. String labels_string;
  109. if (pCallbackData->cmdBufLabelCount > 0) {
  110. labels_string = "\n\tCommand Buffer Labels - " + String::num_int64(pCallbackData->cmdBufLabelCount);
  111. for (uint32_t cmd_buf_label = 0; cmd_buf_label < pCallbackData->cmdBufLabelCount; ++cmd_buf_label) {
  112. labels_string +=
  113. "\n\t\tLabel[" + String::num_int64(cmd_buf_label) + "]" +
  114. " - " + pCallbackData->pCmdBufLabels[cmd_buf_label].pLabelName +
  115. "{ ";
  116. for (int color_idx = 0; color_idx < 4; ++color_idx) {
  117. labels_string += String::num(pCallbackData->pCmdBufLabels[cmd_buf_label].color[color_idx]);
  118. if (color_idx < 3) {
  119. labels_string += ", ";
  120. }
  121. }
  122. labels_string += " }";
  123. }
  124. }
  125. String error_message(type_string +
  126. " - Message Id Number: " + String::num_int64(pCallbackData->messageIdNumber) +
  127. " | Message Id Name: " + pCallbackData->pMessageIdName +
  128. "\n\t" + pCallbackData->pMessage +
  129. objects_string + labels_string);
  130. // Convert VK severity to our own log macros.
  131. switch (messageSeverity) {
  132. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  133. print_verbose(error_message);
  134. break;
  135. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  136. print_line(error_message);
  137. break;
  138. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  139. WARN_PRINT(error_message);
  140. break;
  141. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  142. ERR_PRINT(error_message);
  143. CRASH_COND_MSG(Engine::get_singleton()->is_abort_on_gpu_errors_enabled(),
  144. "Crashing, because abort on GPU errors is enabled.");
  145. break;
  146. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT:
  147. break; // Shouldn't happen, only handling to make compilers happy.
  148. }
  149. return VK_FALSE;
  150. }
  151. VkBool32 VulkanContext::_check_layers(uint32_t check_count, const char **check_names, uint32_t layer_count, VkLayerProperties *layers) {
  152. for (uint32_t i = 0; i < check_count; i++) {
  153. VkBool32 found = 0;
  154. for (uint32_t j = 0; j < layer_count; j++) {
  155. if (!strcmp(check_names[i], layers[j].layerName)) {
  156. found = 1;
  157. break;
  158. }
  159. }
  160. if (!found) {
  161. WARN_PRINT("Can't find layer: " + String(check_names[i]));
  162. return 0;
  163. }
  164. }
  165. return 1;
  166. }
  167. Error VulkanContext::_create_validation_layers() {
  168. VkResult err;
  169. const char *instance_validation_layers_alt1[] = { "VK_LAYER_KHRONOS_validation" };
  170. const char *instance_validation_layers_alt2[] = { "VK_LAYER_LUNARG_standard_validation" };
  171. const char *instance_validation_layers_alt3[] = { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" };
  172. uint32_t instance_layer_count = 0;
  173. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
  174. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  175. VkBool32 validation_found = 0;
  176. uint32_t validation_layer_count = 0;
  177. const char **instance_validation_layers = nullptr;
  178. if (instance_layer_count > 0) {
  179. VkLayerProperties *instance_layers = (VkLayerProperties *)malloc(sizeof(VkLayerProperties) * instance_layer_count);
  180. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, instance_layers);
  181. if (err) {
  182. free(instance_layers);
  183. ERR_FAIL_V(ERR_CANT_CREATE);
  184. }
  185. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
  186. instance_validation_layers = instance_validation_layers_alt1;
  187. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  188. // use alternative (deprecated, removed in SDK 1.1.126.0) set of validation layers
  189. if (!validation_found) {
  190. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
  191. instance_validation_layers = instance_validation_layers_alt2;
  192. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  193. }
  194. // use alternative (deprecated, removed in SDK 1.1.121.1) set of validation layers
  195. if (!validation_found) {
  196. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt3);
  197. instance_validation_layers = instance_validation_layers_alt3;
  198. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  199. }
  200. free(instance_layers);
  201. }
  202. if (validation_found) {
  203. enabled_layer_count = validation_layer_count;
  204. for (uint32_t i = 0; i < validation_layer_count; i++) {
  205. enabled_layers[i] = instance_validation_layers[i];
  206. }
  207. } else {
  208. return ERR_CANT_CREATE;
  209. }
  210. return OK;
  211. }
  212. Error VulkanContext::_initialize_extensions() {
  213. uint32_t instance_extension_count = 0;
  214. enabled_extension_count = 0;
  215. enabled_layer_count = 0;
  216. enabled_debug_utils = false;
  217. /* Look for instance extensions */
  218. VkBool32 surfaceExtFound = 0;
  219. VkBool32 platformSurfaceExtFound = 0;
  220. memset(extension_names, 0, sizeof(extension_names));
  221. VkResult err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr);
  222. ERR_FAIL_COND_V(err != VK_SUCCESS && err != VK_INCOMPLETE, ERR_CANT_CREATE);
  223. if (instance_extension_count > 0) {
  224. VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  225. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions);
  226. if (err != VK_SUCCESS && err != VK_INCOMPLETE) {
  227. free(instance_extensions);
  228. ERR_FAIL_V(ERR_CANT_CREATE);
  229. }
  230. for (uint32_t i = 0; i < instance_extension_count; i++) {
  231. if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  232. surfaceExtFound = 1;
  233. extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
  234. }
  235. if (!strcmp(_get_platform_surface_extension(), instance_extensions[i].extensionName)) {
  236. platformSurfaceExtFound = 1;
  237. extension_names[enabled_extension_count++] = _get_platform_surface_extension();
  238. }
  239. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  240. if (use_validation_layers) {
  241. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  242. }
  243. }
  244. if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  245. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  246. enabled_debug_utils = true;
  247. }
  248. if (enabled_extension_count >= MAX_EXTENSIONS) {
  249. free(instance_extensions);
  250. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  251. }
  252. }
  253. free(instance_extensions);
  254. }
  255. ERR_FAIL_COND_V_MSG(!surfaceExtFound, ERR_CANT_CREATE, "No surface extension found, is a driver installed?");
  256. ERR_FAIL_COND_V_MSG(!platformSurfaceExtFound, ERR_CANT_CREATE, "No platform surface extension found, is a driver installed?");
  257. return OK;
  258. }
  259. Error VulkanContext::_create_physical_device() {
  260. /* Look for validation layers */
  261. if (use_validation_layers) {
  262. _create_validation_layers();
  263. }
  264. {
  265. Error err = _initialize_extensions();
  266. if (err != OK) {
  267. return err;
  268. }
  269. }
  270. CharString cs = ProjectSettings::get_singleton()->get("application/config/name").operator String().utf8();
  271. String name = "GodotEngine " + String(VERSION_FULL_NAME);
  272. CharString namecs = name.utf8();
  273. const VkApplicationInfo app = {
  274. /*sType*/ VK_STRUCTURE_TYPE_APPLICATION_INFO,
  275. /*pNext*/ nullptr,
  276. /*pApplicationName*/ cs.get_data(),
  277. /*applicationVersion*/ 0,
  278. /*pEngineName*/ namecs.get_data(),
  279. /*engineVersion*/ 0,
  280. /*apiVersion*/ VK_API_VERSION_1_0,
  281. };
  282. VkInstanceCreateInfo inst_info = {
  283. /*sType*/ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  284. /*pNext*/ nullptr,
  285. /*flags*/ 0,
  286. /*pApplicationInfo*/ &app,
  287. /*enabledLayerCount*/ enabled_layer_count,
  288. /*ppEnabledLayerNames*/ (const char *const *)enabled_layers,
  289. /*enabledExtensionCount*/ enabled_extension_count,
  290. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  291. };
  292. /*
  293. * This is info for a temp callback to use during CreateInstance.
  294. * After the instance is created, we use the instance-based
  295. * function to register the final callback.
  296. */
  297. VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info;
  298. if (use_validation_layers) {
  299. // VK_EXT_debug_utils style
  300. dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  301. dbg_messenger_create_info.pNext = nullptr;
  302. dbg_messenger_create_info.flags = 0;
  303. dbg_messenger_create_info.messageSeverity =
  304. VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  305. dbg_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
  306. VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
  307. VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  308. dbg_messenger_create_info.pfnUserCallback = _debug_messenger_callback;
  309. dbg_messenger_create_info.pUserData = this;
  310. inst_info.pNext = &dbg_messenger_create_info;
  311. }
  312. uint32_t gpu_count;
  313. VkResult err = vkCreateInstance(&inst_info, nullptr, &inst);
  314. ERR_FAIL_COND_V_MSG(err == VK_ERROR_INCOMPATIBLE_DRIVER, ERR_CANT_CREATE,
  315. "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
  316. "vkCreateInstance Failure");
  317. ERR_FAIL_COND_V_MSG(err == VK_ERROR_EXTENSION_NOT_PRESENT, ERR_CANT_CREATE,
  318. "Cannot find a specified extension library.\n"
  319. "Make sure your layers path is set appropriately.\n"
  320. "vkCreateInstance Failure");
  321. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE,
  322. "vkCreateInstance failed.\n\n"
  323. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  324. "Please look at the Getting Started guide for additional information.\n"
  325. "vkCreateInstance Failure");
  326. inst_initialized = true;
  327. /* Make initial call to query gpu_count, then second call for gpu info*/
  328. err = vkEnumeratePhysicalDevices(inst, &gpu_count, nullptr);
  329. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  330. ERR_FAIL_COND_V_MSG(gpu_count == 0, ERR_CANT_CREATE,
  331. "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
  332. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  333. "vkEnumeratePhysicalDevices Failure");
  334. VkPhysicalDevice *physical_devices = (VkPhysicalDevice *)malloc(sizeof(VkPhysicalDevice) * gpu_count);
  335. err = vkEnumeratePhysicalDevices(inst, &gpu_count, physical_devices);
  336. if (err) {
  337. free(physical_devices);
  338. ERR_FAIL_V(ERR_CANT_CREATE);
  339. }
  340. /* for now, just grab the first physical device */
  341. gpu = physical_devices[0];
  342. free(physical_devices);
  343. /* Look for device extensions */
  344. uint32_t device_extension_count = 0;
  345. VkBool32 swapchainExtFound = 0;
  346. enabled_extension_count = 0;
  347. memset(extension_names, 0, sizeof(extension_names));
  348. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr);
  349. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  350. if (device_extension_count > 0) {
  351. VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count);
  352. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions);
  353. if (err) {
  354. free(device_extensions);
  355. ERR_FAIL_V(ERR_CANT_CREATE);
  356. }
  357. for (uint32_t i = 0; i < device_extension_count; i++) {
  358. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
  359. swapchainExtFound = 1;
  360. extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  361. }
  362. if (enabled_extension_count >= MAX_EXTENSIONS) {
  363. free(device_extensions);
  364. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  365. }
  366. }
  367. if (VK_KHR_incremental_present_enabled) {
  368. // Even though the user "enabled" the extension via the command
  369. // line, we must make sure that it's enumerated for use with the
  370. // device. Therefore, disable it here, and re-enable it again if
  371. // enumerated.
  372. VK_KHR_incremental_present_enabled = false;
  373. for (uint32_t i = 0; i < device_extension_count; i++) {
  374. if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) {
  375. extension_names[enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
  376. VK_KHR_incremental_present_enabled = true;
  377. }
  378. if (enabled_extension_count >= MAX_EXTENSIONS) {
  379. free(device_extensions);
  380. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  381. }
  382. }
  383. }
  384. if (VK_GOOGLE_display_timing_enabled) {
  385. // Even though the user "enabled" the extension via the command
  386. // line, we must make sure that it's enumerated for use with the
  387. // device. Therefore, disable it here, and re-enable it again if
  388. // enumerated.
  389. VK_GOOGLE_display_timing_enabled = false;
  390. for (uint32_t i = 0; i < device_extension_count; i++) {
  391. if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) {
  392. extension_names[enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME;
  393. VK_GOOGLE_display_timing_enabled = true;
  394. }
  395. if (enabled_extension_count >= MAX_EXTENSIONS) {
  396. free(device_extensions);
  397. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  398. }
  399. }
  400. }
  401. free(device_extensions);
  402. }
  403. ERR_FAIL_COND_V_MSG(!swapchainExtFound, ERR_CANT_CREATE,
  404. "vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  405. " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n"
  406. "vkCreateInstance Failure");
  407. if (enabled_debug_utils) {
  408. // Setup VK_EXT_debug_utils function pointers always (we use them for
  409. // debug labels and names).
  410. CreateDebugUtilsMessengerEXT =
  411. (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT");
  412. DestroyDebugUtilsMessengerEXT =
  413. (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugUtilsMessengerEXT");
  414. SubmitDebugUtilsMessageEXT =
  415. (PFN_vkSubmitDebugUtilsMessageEXT)vkGetInstanceProcAddr(inst, "vkSubmitDebugUtilsMessageEXT");
  416. CmdBeginDebugUtilsLabelEXT =
  417. (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdBeginDebugUtilsLabelEXT");
  418. CmdEndDebugUtilsLabelEXT =
  419. (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdEndDebugUtilsLabelEXT");
  420. CmdInsertDebugUtilsLabelEXT =
  421. (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdInsertDebugUtilsLabelEXT");
  422. SetDebugUtilsObjectNameEXT =
  423. (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(inst, "vkSetDebugUtilsObjectNameEXT");
  424. if (nullptr == CreateDebugUtilsMessengerEXT || nullptr == DestroyDebugUtilsMessengerEXT ||
  425. nullptr == SubmitDebugUtilsMessageEXT || nullptr == CmdBeginDebugUtilsLabelEXT ||
  426. nullptr == CmdEndDebugUtilsLabelEXT || nullptr == CmdInsertDebugUtilsLabelEXT ||
  427. nullptr == SetDebugUtilsObjectNameEXT) {
  428. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  429. "GetProcAddr: Failed to init VK_EXT_debug_utils\n"
  430. "GetProcAddr: Failure");
  431. }
  432. err = CreateDebugUtilsMessengerEXT(inst, &dbg_messenger_create_info, nullptr, &dbg_messenger);
  433. switch (err) {
  434. case VK_SUCCESS:
  435. break;
  436. case VK_ERROR_OUT_OF_HOST_MEMORY:
  437. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  438. "CreateDebugUtilsMessengerEXT: out of host memory\n"
  439. "CreateDebugUtilsMessengerEXT Failure");
  440. break;
  441. default:
  442. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  443. "CreateDebugUtilsMessengerEXT: unknown failure\n"
  444. "CreateDebugUtilsMessengerEXT Failure");
  445. ERR_FAIL_V(ERR_CANT_CREATE);
  446. break;
  447. }
  448. }
  449. vkGetPhysicalDeviceProperties(gpu, &gpu_props);
  450. /* Call with NULL data to get count */
  451. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, nullptr);
  452. ERR_FAIL_COND_V(queue_family_count == 0, ERR_CANT_CREATE);
  453. queue_props = (VkQueueFamilyProperties *)malloc(queue_family_count * sizeof(VkQueueFamilyProperties));
  454. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, queue_props);
  455. // Query fine-grained feature support for this device.
  456. // If app has specific feature requirements it should check supported
  457. // features based on this query
  458. vkGetPhysicalDeviceFeatures(gpu, &physical_device_features);
  459. physical_device_features.robustBufferAccess = false; //turn off robust buffer access, which can hamper performance on some hardware
  460. #define GET_INSTANCE_PROC_ADDR(inst, entrypoint) \
  461. { \
  462. fp##entrypoint = (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint); \
  463. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  464. "vkGetInstanceProcAddr failed to find vk" #entrypoint); \
  465. }
  466. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceSupportKHR);
  467. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  468. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceFormatsKHR);
  469. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfacePresentModesKHR);
  470. GET_INSTANCE_PROC_ADDR(inst, GetSwapchainImagesKHR);
  471. return OK;
  472. }
  473. Error VulkanContext::_create_device() {
  474. VkResult err;
  475. float queue_priorities[1] = { 0.0 };
  476. VkDeviceQueueCreateInfo queues[2];
  477. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  478. queues[0].pNext = nullptr;
  479. queues[0].queueFamilyIndex = graphics_queue_family_index;
  480. queues[0].queueCount = 1;
  481. queues[0].pQueuePriorities = queue_priorities;
  482. queues[0].flags = 0;
  483. VkDeviceCreateInfo sdevice = {
  484. /*sType*/ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  485. /*pNext*/ nullptr,
  486. /*flags*/ 0,
  487. /*queueCreateInfoCount*/ 1,
  488. /*pQueueCreateInfos*/ queues,
  489. /*enabledLayerCount*/ 0,
  490. /*ppEnabledLayerNames*/ nullptr,
  491. /*enabledExtensionCount*/ enabled_extension_count,
  492. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  493. /*pEnabledFeatures*/ &physical_device_features, // If specific features are required, pass them in here
  494. };
  495. if (separate_present_queue) {
  496. queues[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  497. queues[1].pNext = nullptr;
  498. queues[1].queueFamilyIndex = present_queue_family_index;
  499. queues[1].queueCount = 1;
  500. queues[1].pQueuePriorities = queue_priorities;
  501. queues[1].flags = 0;
  502. sdevice.queueCreateInfoCount = 2;
  503. }
  504. err = vkCreateDevice(gpu, &sdevice, nullptr, &device);
  505. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  506. return OK;
  507. }
  508. Error VulkanContext::_initialize_queues(VkSurfaceKHR surface) {
  509. // Iterate over each queue to learn whether it supports presenting:
  510. VkBool32 *supportsPresent = (VkBool32 *)malloc(queue_family_count * sizeof(VkBool32));
  511. for (uint32_t i = 0; i < queue_family_count; i++) {
  512. fpGetPhysicalDeviceSurfaceSupportKHR(gpu, i, surface, &supportsPresent[i]);
  513. }
  514. // Search for a graphics and a present queue in the array of queue
  515. // families, try to find one that supports both
  516. uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
  517. uint32_t presentQueueFamilyIndex = UINT32_MAX;
  518. for (uint32_t i = 0; i < queue_family_count; i++) {
  519. if ((queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  520. if (graphicsQueueFamilyIndex == UINT32_MAX) {
  521. graphicsQueueFamilyIndex = i;
  522. }
  523. if (supportsPresent[i] == VK_TRUE) {
  524. graphicsQueueFamilyIndex = i;
  525. presentQueueFamilyIndex = i;
  526. break;
  527. }
  528. }
  529. }
  530. if (presentQueueFamilyIndex == UINT32_MAX) {
  531. // If didn't find a queue that supports both graphics and present, then
  532. // find a separate present queue.
  533. for (uint32_t i = 0; i < queue_family_count; ++i) {
  534. if (supportsPresent[i] == VK_TRUE) {
  535. presentQueueFamilyIndex = i;
  536. break;
  537. }
  538. }
  539. }
  540. free(supportsPresent);
  541. // Generate error if could not find both a graphics and a present queue
  542. ERR_FAIL_COND_V_MSG(graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX, ERR_CANT_CREATE,
  543. "Could not find both graphics and present queues\n");
  544. graphics_queue_family_index = graphicsQueueFamilyIndex;
  545. present_queue_family_index = presentQueueFamilyIndex;
  546. separate_present_queue = (graphics_queue_family_index != present_queue_family_index);
  547. _create_device();
  548. static PFN_vkGetDeviceProcAddr g_gdpa = nullptr;
  549. #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
  550. { \
  551. if (!g_gdpa) \
  552. g_gdpa = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(inst, "vkGetDeviceProcAddr"); \
  553. fp##entrypoint = (PFN_vk##entrypoint)g_gdpa(dev, "vk" #entrypoint); \
  554. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  555. "vkGetDeviceProcAddr failed to find vk" #entrypoint); \
  556. }
  557. GET_DEVICE_PROC_ADDR(device, CreateSwapchainKHR);
  558. GET_DEVICE_PROC_ADDR(device, DestroySwapchainKHR);
  559. GET_DEVICE_PROC_ADDR(device, GetSwapchainImagesKHR);
  560. GET_DEVICE_PROC_ADDR(device, AcquireNextImageKHR);
  561. GET_DEVICE_PROC_ADDR(device, QueuePresentKHR);
  562. if (VK_GOOGLE_display_timing_enabled) {
  563. GET_DEVICE_PROC_ADDR(device, GetRefreshCycleDurationGOOGLE);
  564. GET_DEVICE_PROC_ADDR(device, GetPastPresentationTimingGOOGLE);
  565. }
  566. vkGetDeviceQueue(device, graphics_queue_family_index, 0, &graphics_queue);
  567. if (!separate_present_queue) {
  568. present_queue = graphics_queue;
  569. } else {
  570. vkGetDeviceQueue(device, present_queue_family_index, 0, &present_queue);
  571. }
  572. // Get the list of VkFormat's that are supported:
  573. uint32_t formatCount;
  574. VkResult err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, nullptr);
  575. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  576. VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  577. err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, surfFormats);
  578. if (err) {
  579. free(surfFormats);
  580. ERR_FAIL_V(ERR_CANT_CREATE);
  581. }
  582. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  583. // the surface has no preferred format. Otherwise, at least one
  584. // supported format will be returned.
  585. if (true || (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)) {
  586. format = VK_FORMAT_B8G8R8A8_UNORM;
  587. } else {
  588. if (formatCount < 1) {
  589. free(surfFormats);
  590. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "formatCount less than 1");
  591. }
  592. format = surfFormats[0].format;
  593. }
  594. color_space = surfFormats[0].colorSpace;
  595. free(surfFormats);
  596. Error serr = _create_semaphores();
  597. if (serr) {
  598. return serr;
  599. }
  600. queues_initialized = true;
  601. return OK;
  602. }
  603. Error VulkanContext::_create_semaphores() {
  604. VkResult err;
  605. // Create semaphores to synchronize acquiring presentable buffers before
  606. // rendering and waiting for drawing to be complete before presenting
  607. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  608. /*sType*/ VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  609. /*pNext*/ nullptr,
  610. /*flags*/ 0,
  611. };
  612. // Create fences that we can use to throttle if we get too far
  613. // ahead of the image presents
  614. VkFenceCreateInfo fence_ci = {
  615. /*sType*/ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
  616. /*pNext*/ nullptr,
  617. /*flags*/ VK_FENCE_CREATE_SIGNALED_BIT
  618. };
  619. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  620. err = vkCreateFence(device, &fence_ci, nullptr, &fences[i]);
  621. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  622. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_acquired_semaphores[i]);
  623. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  624. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &draw_complete_semaphores[i]);
  625. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  626. if (separate_present_queue) {
  627. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_ownership_semaphores[i]);
  628. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  629. }
  630. }
  631. frame_index = 0;
  632. // Get Memory information and properties
  633. vkGetPhysicalDeviceMemoryProperties(gpu, &memory_properties);
  634. return OK;
  635. }
  636. Error VulkanContext::_window_create(DisplayServer::WindowID p_window_id, VkSurfaceKHR p_surface, int p_width, int p_height) {
  637. ERR_FAIL_COND_V(windows.has(p_window_id), ERR_INVALID_PARAMETER);
  638. if (!queues_initialized) {
  639. // We use a single GPU, but we need a surface to initialize the
  640. // queues, so this process must be deferred until a surface
  641. // is created.
  642. Error err = _initialize_queues(p_surface);
  643. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  644. }
  645. Window window;
  646. window.surface = p_surface;
  647. window.width = p_width;
  648. window.height = p_height;
  649. Error err = _update_swap_chain(&window);
  650. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  651. windows[p_window_id] = window;
  652. return OK;
  653. }
  654. void VulkanContext::window_resize(DisplayServer::WindowID p_window, int p_width, int p_height) {
  655. ERR_FAIL_COND(!windows.has(p_window));
  656. windows[p_window].width = p_width;
  657. windows[p_window].height = p_height;
  658. _update_swap_chain(&windows[p_window]);
  659. }
  660. int VulkanContext::window_get_width(DisplayServer::WindowID p_window) {
  661. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  662. return windows[p_window].width;
  663. }
  664. int VulkanContext::window_get_height(DisplayServer::WindowID p_window) {
  665. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  666. return windows[p_window].height;
  667. }
  668. VkRenderPass VulkanContext::window_get_render_pass(DisplayServer::WindowID p_window) {
  669. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  670. Window *w = &windows[p_window];
  671. //vulkan use of currentbuffer
  672. return w->render_pass;
  673. }
  674. VkFramebuffer VulkanContext::window_get_framebuffer(DisplayServer::WindowID p_window) {
  675. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  676. ERR_FAIL_COND_V(!buffers_prepared, VK_NULL_HANDLE);
  677. Window *w = &windows[p_window];
  678. //vulkan use of currentbuffer
  679. return w->swapchain_image_resources[w->current_buffer].framebuffer;
  680. }
  681. void VulkanContext::window_destroy(DisplayServer::WindowID p_window_id) {
  682. ERR_FAIL_COND(!windows.has(p_window_id));
  683. _clean_up_swap_chain(&windows[p_window_id]);
  684. vkDestroySurfaceKHR(inst, windows[p_window_id].surface, nullptr);
  685. windows.erase(p_window_id);
  686. }
  687. Error VulkanContext::_clean_up_swap_chain(Window *window) {
  688. if (!window->swapchain) {
  689. return OK;
  690. }
  691. vkDeviceWaitIdle(device);
  692. //this destroys images associated it seems
  693. fpDestroySwapchainKHR(device, window->swapchain, nullptr);
  694. window->swapchain = VK_NULL_HANDLE;
  695. vkDestroyRenderPass(device, window->render_pass, nullptr);
  696. if (window->swapchain_image_resources) {
  697. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  698. vkDestroyImageView(device, window->swapchain_image_resources[i].view, nullptr);
  699. vkDestroyFramebuffer(device, window->swapchain_image_resources[i].framebuffer, nullptr);
  700. }
  701. free(window->swapchain_image_resources);
  702. window->swapchain_image_resources = nullptr;
  703. }
  704. if (separate_present_queue) {
  705. vkDestroyCommandPool(device, window->present_cmd_pool, nullptr);
  706. }
  707. return OK;
  708. }
  709. Error VulkanContext::_update_swap_chain(Window *window) {
  710. VkResult err;
  711. if (window->swapchain) {
  712. _clean_up_swap_chain(window);
  713. }
  714. // Check the surface capabilities and formats
  715. VkSurfaceCapabilitiesKHR surfCapabilities;
  716. err = fpGetPhysicalDeviceSurfaceCapabilitiesKHR(gpu, window->surface, &surfCapabilities);
  717. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  718. uint32_t presentModeCount;
  719. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, nullptr);
  720. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  721. VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  722. ERR_FAIL_COND_V(!presentModes, ERR_CANT_CREATE);
  723. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, presentModes);
  724. if (err) {
  725. free(presentModes);
  726. ERR_FAIL_V(ERR_CANT_CREATE);
  727. }
  728. VkExtent2D swapchainExtent;
  729. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  730. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  731. // If the surface size is undefined, the size is set to the size
  732. // of the images requested, which must fit within the minimum and
  733. // maximum values.
  734. swapchainExtent.width = window->width;
  735. swapchainExtent.height = window->height;
  736. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  737. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  738. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  739. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  740. }
  741. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  742. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  743. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  744. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  745. }
  746. } else {
  747. // If the surface size is defined, the swap chain size must match
  748. swapchainExtent = surfCapabilities.currentExtent;
  749. window->width = surfCapabilities.currentExtent.width;
  750. window->height = surfCapabilities.currentExtent.height;
  751. }
  752. if (window->width == 0 || window->height == 0) {
  753. free(presentModes);
  754. //likely window minimized, no swapchain created
  755. return OK;
  756. }
  757. // The FIFO present mode is guaranteed by the spec to be supported
  758. // and to have no tearing. It's a great default present mode to use.
  759. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  760. // There are times when you may wish to use another present mode. The
  761. // following code shows how to select them, and the comments provide some
  762. // reasons you may wish to use them.
  763. //
  764. // It should be noted that Vulkan 1.0 doesn't provide a method for
  765. // synchronizing rendering with the presentation engine's display. There
  766. // is a method provided for throttling rendering with the display, but
  767. // there are some presentation engines for which this method will not work.
  768. // If an application doesn't throttle its rendering, and if it renders much
  769. // faster than the refresh rate of the display, this can waste power on
  770. // mobile devices. That is because power is being spent rendering images
  771. // that may never be seen.
  772. // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care about
  773. // tearing, or have some way of synchronizing their rendering with the
  774. // display.
  775. // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
  776. // generally render a new presentable image every refresh cycle, but are
  777. // occasionally early. In this case, the application wants the new image
  778. // to be displayed instead of the previously-queued-for-presentation image
  779. // that has not yet been displayed.
  780. // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
  781. // render a new presentable image every refresh cycle, but are occasionally
  782. // late. In this case (perhaps because of stuttering/latency concerns),
  783. // the application wants the late image to be immediately displayed, even
  784. // though that may mean some tearing.
  785. if (window->presentMode != swapchainPresentMode) {
  786. for (size_t i = 0; i < presentModeCount; ++i) {
  787. if (presentModes[i] == window->presentMode) {
  788. swapchainPresentMode = window->presentMode;
  789. break;
  790. }
  791. }
  792. }
  793. free(presentModes);
  794. ERR_FAIL_COND_V_MSG(swapchainPresentMode != window->presentMode, ERR_CANT_CREATE, "Present mode specified is not supported\n");
  795. // Determine the number of VkImages to use in the swap chain.
  796. // Application desires to acquire 3 images at a time for triple
  797. // buffering
  798. uint32_t desiredNumOfSwapchainImages = 3;
  799. if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
  800. desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  801. }
  802. // If maxImageCount is 0, we can ask for as many images as we want;
  803. // otherwise we're limited to maxImageCount
  804. if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  805. // Application must settle for fewer images than desired:
  806. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  807. }
  808. VkSurfaceTransformFlagsKHR preTransform;
  809. if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  810. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  811. } else {
  812. preTransform = surfCapabilities.currentTransform;
  813. }
  814. // Find a supported composite alpha mode - one of these is guaranteed to be set
  815. VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  816. VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
  817. VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  818. VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
  819. VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
  820. VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
  821. };
  822. for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
  823. if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
  824. compositeAlpha = compositeAlphaFlags[i];
  825. break;
  826. }
  827. }
  828. VkSwapchainCreateInfoKHR swapchain_ci = {
  829. /*sType*/ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  830. /*pNext*/ nullptr,
  831. /*flags*/ 0,
  832. /*surface*/ window->surface,
  833. /*minImageCount*/ desiredNumOfSwapchainImages,
  834. /*imageFormat*/ format,
  835. /*imageColorSpace*/ color_space,
  836. /*imageExtent*/ {
  837. /*width*/ swapchainExtent.width,
  838. /*height*/ swapchainExtent.height,
  839. },
  840. /*imageArrayLayers*/ 1,
  841. /*imageUsage*/ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  842. /*imageSharingMode*/ VK_SHARING_MODE_EXCLUSIVE,
  843. /*queueFamilyIndexCount*/ 0,
  844. /*pQueueFamilyIndices*/ nullptr,
  845. /*preTransform*/ (VkSurfaceTransformFlagBitsKHR)preTransform,
  846. /*compositeAlpha*/ compositeAlpha,
  847. /*presentMode*/ swapchainPresentMode,
  848. /*clipped*/ true,
  849. /*oldSwapchain*/ VK_NULL_HANDLE,
  850. };
  851. err = fpCreateSwapchainKHR(device, &swapchain_ci, nullptr, &window->swapchain);
  852. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  853. uint32_t sp_image_count;
  854. err = fpGetSwapchainImagesKHR(device, window->swapchain, &sp_image_count, nullptr);
  855. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  856. if (swapchainImageCount == 0) {
  857. //assign here for the first time.
  858. swapchainImageCount = sp_image_count;
  859. } else {
  860. ERR_FAIL_COND_V(swapchainImageCount != sp_image_count, ERR_BUG);
  861. }
  862. VkImage *swapchainImages = (VkImage *)malloc(swapchainImageCount * sizeof(VkImage));
  863. ERR_FAIL_COND_V(!swapchainImages, ERR_CANT_CREATE);
  864. err = fpGetSwapchainImagesKHR(device, window->swapchain, &swapchainImageCount, swapchainImages);
  865. if (err) {
  866. free(swapchainImages);
  867. ERR_FAIL_V(ERR_CANT_CREATE);
  868. }
  869. window->swapchain_image_resources =
  870. (SwapchainImageResources *)malloc(sizeof(SwapchainImageResources) * swapchainImageCount);
  871. if (!window->swapchain_image_resources) {
  872. free(swapchainImages);
  873. ERR_FAIL_V(ERR_CANT_CREATE);
  874. }
  875. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  876. VkImageViewCreateInfo color_image_view = {
  877. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  878. /*pNext*/ nullptr,
  879. /*flags*/ 0,
  880. /*image*/ swapchainImages[i],
  881. /*viewType*/ VK_IMAGE_VIEW_TYPE_2D,
  882. /*format*/ format,
  883. /*components*/ {
  884. /*r*/ VK_COMPONENT_SWIZZLE_R,
  885. /*g*/ VK_COMPONENT_SWIZZLE_G,
  886. /*b*/ VK_COMPONENT_SWIZZLE_B,
  887. /*a*/ VK_COMPONENT_SWIZZLE_A,
  888. },
  889. /*subresourceRange*/ { /*aspectMask*/ VK_IMAGE_ASPECT_COLOR_BIT,
  890. /*baseMipLevel*/ 0,
  891. /*levelCount*/ 1,
  892. /*baseArrayLayer*/ 0,
  893. /*layerCount*/ 1 },
  894. };
  895. window->swapchain_image_resources[i].image = swapchainImages[i];
  896. color_image_view.image = window->swapchain_image_resources[i].image;
  897. err = vkCreateImageView(device, &color_image_view, nullptr, &window->swapchain_image_resources[i].view);
  898. if (err) {
  899. free(swapchainImages);
  900. ERR_FAIL_V(ERR_CANT_CREATE);
  901. }
  902. }
  903. free(swapchainImages);
  904. /******** FRAMEBUFFER ************/
  905. {
  906. const VkAttachmentDescription attachment = {
  907. /*flags*/ 0,
  908. /*format*/ format,
  909. /*samples*/ VK_SAMPLE_COUNT_1_BIT,
  910. /*loadOp*/ VK_ATTACHMENT_LOAD_OP_CLEAR,
  911. /*storeOp*/ VK_ATTACHMENT_STORE_OP_STORE,
  912. /*stencilLoadOp*/ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  913. /*stencilStoreOp*/ VK_ATTACHMENT_STORE_OP_DONT_CARE,
  914. /*initialLayout*/ VK_IMAGE_LAYOUT_UNDEFINED,
  915. /*finalLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  916. };
  917. const VkAttachmentReference color_reference = {
  918. /*attachment*/ 0,
  919. /*layout*/ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  920. };
  921. const VkSubpassDescription subpass = {
  922. /*flags*/ 0,
  923. /*pipelineBindPoint*/ VK_PIPELINE_BIND_POINT_GRAPHICS,
  924. /*inputAttachmentCount*/ 0,
  925. /*pInputAttachments*/ nullptr,
  926. /*colorAttachmentCount*/ 1,
  927. /*pColorAttachments*/ &color_reference,
  928. /*pResolveAttachments*/ nullptr,
  929. /*pDepthStencilAttachment*/ nullptr,
  930. /*preserveAttachmentCount*/ 0,
  931. /*pPreserveAttachments*/ nullptr,
  932. };
  933. const VkRenderPassCreateInfo rp_info = {
  934. /*sTyp*/ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  935. /*pNext*/ nullptr,
  936. /*flags*/ 0,
  937. /*attachmentCount*/ 1,
  938. /*pAttachments*/ &attachment,
  939. /*subpassCount*/ 1,
  940. /*pSubpasses*/ &subpass,
  941. /*dependencyCount*/ 0,
  942. /*pDependencies*/ nullptr,
  943. };
  944. err = vkCreateRenderPass(device, &rp_info, nullptr, &window->render_pass);
  945. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  946. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  947. const VkFramebufferCreateInfo fb_info = {
  948. /*sType*/ VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  949. /*pNext*/ nullptr,
  950. /*flags*/ 0,
  951. /*renderPass*/ window->render_pass,
  952. /*attachmentCount*/ 1,
  953. /*pAttachments*/ &window->swapchain_image_resources[i].view,
  954. /*width*/ (uint32_t)window->width,
  955. /*height*/ (uint32_t)window->height,
  956. /*layers*/ 1,
  957. };
  958. err = vkCreateFramebuffer(device, &fb_info, nullptr, &window->swapchain_image_resources[i].framebuffer);
  959. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  960. }
  961. }
  962. /******** SEPARATE PRESENT QUEUE ************/
  963. if (separate_present_queue) {
  964. const VkCommandPoolCreateInfo present_cmd_pool_info = {
  965. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  966. /*pNext*/ nullptr,
  967. /*flags*/ 0,
  968. /*queueFamilyIndex*/ present_queue_family_index,
  969. };
  970. err = vkCreateCommandPool(device, &present_cmd_pool_info, nullptr, &window->present_cmd_pool);
  971. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  972. const VkCommandBufferAllocateInfo present_cmd_info = {
  973. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  974. /*pNext*/ nullptr,
  975. /*commandPool*/ window->present_cmd_pool,
  976. /*level*/ VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  977. /*commandBufferCount*/ 1,
  978. };
  979. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  980. err = vkAllocateCommandBuffers(device, &present_cmd_info,
  981. &window->swapchain_image_resources[i].graphics_to_present_cmd);
  982. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  983. const VkCommandBufferBeginInfo cmd_buf_info = {
  984. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  985. /*pNext*/ nullptr,
  986. /*flags*/ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
  987. /*pInheritanceInfo*/ nullptr,
  988. };
  989. err = vkBeginCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd, &cmd_buf_info);
  990. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  991. VkImageMemoryBarrier image_ownership_barrier = {
  992. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  993. /*pNext*/ nullptr,
  994. /*srcAccessMask*/ 0,
  995. /*dstAccessMask*/ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  996. /*oldLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  997. /*newLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  998. /*srcQueueFamilyIndex*/ graphics_queue_family_index,
  999. /*dstQueueFamilyIndex*/ present_queue_family_index,
  1000. /*image*/ window->swapchain_image_resources[i].image,
  1001. /*subresourceRange*/ { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 }
  1002. };
  1003. vkCmdPipelineBarrier(window->swapchain_image_resources[i].graphics_to_present_cmd, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  1004. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_ownership_barrier);
  1005. err = vkEndCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd);
  1006. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1007. }
  1008. }
  1009. //reset current buffer
  1010. window->current_buffer = 0;
  1011. return OK;
  1012. }
  1013. Error VulkanContext::initialize() {
  1014. Error err = _create_physical_device();
  1015. if (err) {
  1016. return err;
  1017. }
  1018. device_initialized = true;
  1019. return OK;
  1020. }
  1021. void VulkanContext::set_setup_buffer(const VkCommandBuffer &pCommandBuffer) {
  1022. command_buffer_queue.write[0] = pCommandBuffer;
  1023. }
  1024. void VulkanContext::append_command_buffer(const VkCommandBuffer &pCommandBuffer) {
  1025. if (command_buffer_queue.size() <= command_buffer_count) {
  1026. command_buffer_queue.resize(command_buffer_count + 1);
  1027. }
  1028. command_buffer_queue.write[command_buffer_count] = pCommandBuffer;
  1029. command_buffer_count++;
  1030. }
  1031. void VulkanContext::flush(bool p_flush_setup, bool p_flush_pending) {
  1032. // ensure everything else pending is executed
  1033. vkDeviceWaitIdle(device);
  1034. //flush the pending setup buffer
  1035. if (p_flush_setup && command_buffer_queue[0]) {
  1036. //use a fence to wait for everything done
  1037. VkSubmitInfo submit_info;
  1038. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1039. submit_info.pNext = nullptr;
  1040. submit_info.pWaitDstStageMask = nullptr;
  1041. submit_info.waitSemaphoreCount = 0;
  1042. submit_info.pWaitSemaphores = nullptr;
  1043. submit_info.commandBufferCount = 1;
  1044. submit_info.pCommandBuffers = command_buffer_queue.ptr();
  1045. submit_info.signalSemaphoreCount = 0;
  1046. submit_info.pSignalSemaphores = nullptr;
  1047. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1048. command_buffer_queue.write[0] = nullptr;
  1049. ERR_FAIL_COND(err);
  1050. vkDeviceWaitIdle(device);
  1051. }
  1052. if (p_flush_pending && command_buffer_count > 1) {
  1053. //use a fence to wait for everything done
  1054. VkSubmitInfo submit_info;
  1055. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1056. submit_info.pNext = nullptr;
  1057. submit_info.pWaitDstStageMask = nullptr;
  1058. submit_info.waitSemaphoreCount = 0;
  1059. submit_info.pWaitSemaphores = nullptr;
  1060. submit_info.commandBufferCount = command_buffer_count - 1;
  1061. submit_info.pCommandBuffers = command_buffer_queue.ptr() + 1;
  1062. submit_info.signalSemaphoreCount = 0;
  1063. submit_info.pSignalSemaphores = nullptr;
  1064. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1065. ERR_FAIL_COND(err);
  1066. vkDeviceWaitIdle(device);
  1067. command_buffer_count = 1;
  1068. }
  1069. }
  1070. Error VulkanContext::prepare_buffers() {
  1071. if (!queues_initialized) {
  1072. return OK;
  1073. }
  1074. VkResult err;
  1075. // Ensure no more than FRAME_LAG renderings are outstanding
  1076. vkWaitForFences(device, 1, &fences[frame_index], VK_TRUE, UINT64_MAX);
  1077. vkResetFences(device, 1, &fences[frame_index]);
  1078. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1079. Window *w = &E->get();
  1080. if (w->swapchain == VK_NULL_HANDLE) {
  1081. continue;
  1082. }
  1083. do {
  1084. // Get the index of the next available swapchain image:
  1085. err =
  1086. fpAcquireNextImageKHR(device, w->swapchain, UINT64_MAX,
  1087. image_acquired_semaphores[frame_index], VK_NULL_HANDLE, &w->current_buffer);
  1088. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1089. // swapchain is out of date (e.g. the window was resized) and
  1090. // must be recreated:
  1091. print_line("early out of data");
  1092. //resize_notify();
  1093. _update_swap_chain(w);
  1094. } else if (err == VK_SUBOPTIMAL_KHR) {
  1095. print_line("early suboptimal");
  1096. // swapchain is not as optimal as it could be, but the platform's
  1097. // presentation engine will still present the image correctly.
  1098. break;
  1099. } else {
  1100. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1101. }
  1102. } while (err != VK_SUCCESS);
  1103. }
  1104. buffers_prepared = true;
  1105. return OK;
  1106. }
  1107. Error VulkanContext::swap_buffers() {
  1108. if (!queues_initialized) {
  1109. return OK;
  1110. }
  1111. // print_line("swapbuffers?");
  1112. VkResult err;
  1113. #if 0
  1114. if (VK_GOOGLE_display_timing_enabled) {
  1115. // Look at what happened to previous presents, and make appropriate
  1116. // adjustments in timing:
  1117. DemoUpdateTargetIPD(demo);
  1118. // Note: a real application would position its geometry to that it's in
  1119. // the correct locatoin for when the next image is presented. It might
  1120. // also wait, so that there's less latency between any input and when
  1121. // the next image is rendered/presented. This demo program is so
  1122. // simple that it doesn't do either of those.
  1123. }
  1124. #endif
  1125. // Wait for the image acquired semaphore to be signaled to ensure
  1126. // that the image won't be rendered to until the presentation
  1127. // engine has fully released ownership to the application, and it is
  1128. // okay to render to the image.
  1129. const VkCommandBuffer *commands_ptr = nullptr;
  1130. uint32_t commands_to_submit = 0;
  1131. if (command_buffer_queue[0] == nullptr) {
  1132. //no setup command, but commands to submit, submit from the first and skip command
  1133. if (command_buffer_count > 1) {
  1134. commands_ptr = command_buffer_queue.ptr() + 1;
  1135. commands_to_submit = command_buffer_count - 1;
  1136. }
  1137. } else {
  1138. commands_ptr = command_buffer_queue.ptr();
  1139. commands_to_submit = command_buffer_count;
  1140. }
  1141. VkPipelineStageFlags pipe_stage_flags;
  1142. VkSubmitInfo submit_info;
  1143. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1144. submit_info.pNext = nullptr;
  1145. submit_info.pWaitDstStageMask = &pipe_stage_flags;
  1146. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1147. submit_info.waitSemaphoreCount = 1;
  1148. submit_info.pWaitSemaphores = &image_acquired_semaphores[frame_index];
  1149. submit_info.commandBufferCount = commands_to_submit;
  1150. submit_info.pCommandBuffers = commands_ptr;
  1151. submit_info.signalSemaphoreCount = 1;
  1152. submit_info.pSignalSemaphores = &draw_complete_semaphores[frame_index];
  1153. err = vkQueueSubmit(graphics_queue, 1, &submit_info, fences[frame_index]);
  1154. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1155. command_buffer_queue.write[0] = nullptr;
  1156. command_buffer_count = 1;
  1157. if (separate_present_queue) {
  1158. // If we are using separate queues, change image ownership to the
  1159. // present queue before presenting, waiting for the draw complete
  1160. // semaphore and signalling the ownership released semaphore when finished
  1161. VkFence nullFence = VK_NULL_HANDLE;
  1162. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1163. submit_info.waitSemaphoreCount = 1;
  1164. submit_info.pWaitSemaphores = &draw_complete_semaphores[frame_index];
  1165. submit_info.commandBufferCount = 0;
  1166. VkCommandBuffer *cmdbufptr = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer *) * windows.size());
  1167. submit_info.pCommandBuffers = cmdbufptr;
  1168. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1169. Window *w = &E->get();
  1170. if (w->swapchain == VK_NULL_HANDLE) {
  1171. continue;
  1172. }
  1173. cmdbufptr[submit_info.commandBufferCount] = w->swapchain_image_resources[w->current_buffer].graphics_to_present_cmd;
  1174. submit_info.commandBufferCount++;
  1175. }
  1176. submit_info.signalSemaphoreCount = 1;
  1177. submit_info.pSignalSemaphores = &image_ownership_semaphores[frame_index];
  1178. err = vkQueueSubmit(present_queue, 1, &submit_info, nullFence);
  1179. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1180. }
  1181. // If we are using separate queues we have to wait for image ownership,
  1182. // otherwise wait for draw complete
  1183. VkPresentInfoKHR present = {
  1184. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  1185. /*pNext*/ nullptr,
  1186. /*waitSemaphoreCount*/ 1,
  1187. /*pWaitSemaphores*/ (separate_present_queue) ? &image_ownership_semaphores[frame_index] : &draw_complete_semaphores[frame_index],
  1188. /*swapchainCount*/ 0,
  1189. /*pSwapchain*/ nullptr,
  1190. /*pImageIndices*/ nullptr,
  1191. /*pResults*/ nullptr,
  1192. };
  1193. VkSwapchainKHR *pSwapchains = (VkSwapchainKHR *)alloca(sizeof(VkSwapchainKHR *) * windows.size());
  1194. uint32_t *pImageIndices = (uint32_t *)alloca(sizeof(uint32_t *) * windows.size());
  1195. present.pSwapchains = pSwapchains;
  1196. present.pImageIndices = pImageIndices;
  1197. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1198. Window *w = &E->get();
  1199. if (w->swapchain == VK_NULL_HANDLE) {
  1200. continue;
  1201. }
  1202. pSwapchains[present.swapchainCount] = w->swapchain;
  1203. pImageIndices[present.swapchainCount] = w->current_buffer;
  1204. present.swapchainCount++;
  1205. }
  1206. #if 0
  1207. if (VK_KHR_incremental_present_enabled) {
  1208. // If using VK_KHR_incremental_present, we provide a hint of the region
  1209. // that contains changed content relative to the previously-presented
  1210. // image. The implementation can use this hint in order to save
  1211. // work/power (by only copying the region in the hint). The
  1212. // implementation is free to ignore the hint though, and so we must
  1213. // ensure that the entire image has the correctly-drawn content.
  1214. uint32_t eighthOfWidth = width / 8;
  1215. uint32_t eighthOfHeight = height / 8;
  1216. VkRectLayerKHR rect = {
  1217. /*offset.x*/ eighthOfWidth,
  1218. /*offset.y*/ eighthOfHeight,
  1219. /*extent.width*/ eighthOfWidth * 6,
  1220. /*extent.height*/ eighthOfHeight * 6,
  1221. /*layer*/ 0,
  1222. };
  1223. VkPresentRegionKHR region = {
  1224. /*rectangleCount*/ 1,
  1225. /*pRectangles*/ &rect,
  1226. };
  1227. VkPresentRegionsKHR regions = {
  1228. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
  1229. /*pNext*/ present.pNext,
  1230. /*swapchainCount*/ present.swapchainCount,
  1231. /*pRegions*/ &region,
  1232. };
  1233. present.pNext = &regions;
  1234. }
  1235. #endif
  1236. #if 0
  1237. if (VK_GOOGLE_display_timing_enabled) {
  1238. VkPresentTimeGOOGLE ptime;
  1239. if (prev_desired_present_time == 0) {
  1240. // This must be the first present for this swapchain.
  1241. //
  1242. // We don't know where we are relative to the presentation engine's
  1243. // display's refresh cycle. We also don't know how long rendering
  1244. // takes. Let's make a grossly-simplified assumption that the
  1245. // desiredPresentTime should be half way between now and
  1246. // now+target_IPD. We will adjust over time.
  1247. uint64_t curtime = getTimeInNanoseconds();
  1248. if (curtime == 0) {
  1249. // Since we didn't find out the current time, don't give a
  1250. // desiredPresentTime:
  1251. ptime.desiredPresentTime = 0;
  1252. } else {
  1253. ptime.desiredPresentTime = curtime + (target_IPD >> 1);
  1254. }
  1255. } else {
  1256. ptime.desiredPresentTime = (prev_desired_present_time + target_IPD);
  1257. }
  1258. ptime.presentID = next_present_id++;
  1259. prev_desired_present_time = ptime.desiredPresentTime;
  1260. VkPresentTimesInfoGOOGLE present_time = {
  1261. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
  1262. /*pNext*/ present.pNext,
  1263. /*swapchainCount*/ present.swapchainCount,
  1264. /*pTimes*/ &ptime,
  1265. };
  1266. if (VK_GOOGLE_display_timing_enabled) {
  1267. present.pNext = &present_time;
  1268. }
  1269. }
  1270. #endif
  1271. static int total_frames = 0;
  1272. total_frames++;
  1273. // print_line("current buffer: " + itos(current_buffer));
  1274. err = fpQueuePresentKHR(present_queue, &present);
  1275. frame_index += 1;
  1276. frame_index %= FRAME_LAG;
  1277. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1278. // swapchain is out of date (e.g. the window was resized) and
  1279. // must be recreated:
  1280. print_line("out of date");
  1281. resize_notify();
  1282. } else if (err == VK_SUBOPTIMAL_KHR) {
  1283. // swapchain is not as optimal as it could be, but the platform's
  1284. // presentation engine will still present the image correctly.
  1285. print_line("suboptimal");
  1286. } else {
  1287. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1288. }
  1289. buffers_prepared = false;
  1290. return OK;
  1291. }
  1292. void VulkanContext::resize_notify() {
  1293. }
  1294. VkDevice VulkanContext::get_device() {
  1295. return device;
  1296. }
  1297. VkPhysicalDevice VulkanContext::get_physical_device() {
  1298. return gpu;
  1299. }
  1300. int VulkanContext::get_swapchain_image_count() const {
  1301. return swapchainImageCount;
  1302. }
  1303. uint32_t VulkanContext::get_graphics_queue() const {
  1304. return graphics_queue_family_index;
  1305. }
  1306. VkFormat VulkanContext::get_screen_format() const {
  1307. return format;
  1308. }
  1309. VkPhysicalDeviceLimits VulkanContext::get_device_limits() const {
  1310. return gpu_props.limits;
  1311. }
  1312. RID VulkanContext::local_device_create() {
  1313. LocalDevice ld;
  1314. { //create device
  1315. VkResult err;
  1316. float queue_priorities[1] = { 0.0 };
  1317. VkDeviceQueueCreateInfo queues[2];
  1318. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  1319. queues[0].pNext = nullptr;
  1320. queues[0].queueFamilyIndex = graphics_queue_family_index;
  1321. queues[0].queueCount = 1;
  1322. queues[0].pQueuePriorities = queue_priorities;
  1323. queues[0].flags = 0;
  1324. VkDeviceCreateInfo sdevice = {
  1325. /*sType =*/VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1326. /*pNext */ nullptr,
  1327. /*flags */ 0,
  1328. /*queueCreateInfoCount */ 1,
  1329. /*pQueueCreateInfos */ queues,
  1330. /*enabledLayerCount */ 0,
  1331. /*ppEnabledLayerNames */ nullptr,
  1332. /*enabledExtensionCount */ enabled_extension_count,
  1333. /*ppEnabledExtensionNames */ (const char *const *)extension_names,
  1334. /*pEnabledFeatures */ &physical_device_features, // If specific features are required, pass them in here
  1335. };
  1336. err = vkCreateDevice(gpu, &sdevice, nullptr, &ld.device);
  1337. ERR_FAIL_COND_V(err, RID());
  1338. }
  1339. { //create graphics queue
  1340. vkGetDeviceQueue(ld.device, graphics_queue_family_index, 0, &ld.queue);
  1341. }
  1342. return local_device_owner.make_rid(ld);
  1343. }
  1344. VkDevice VulkanContext::local_device_get_vk_device(RID p_local_device) {
  1345. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1346. return ld->device;
  1347. }
  1348. void VulkanContext::local_device_push_command_buffers(RID p_local_device, const VkCommandBuffer *p_buffers, int p_count) {
  1349. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1350. ERR_FAIL_COND(ld->waiting);
  1351. VkSubmitInfo submit_info;
  1352. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1353. submit_info.pNext = nullptr;
  1354. submit_info.pWaitDstStageMask = nullptr;
  1355. submit_info.waitSemaphoreCount = 0;
  1356. submit_info.pWaitSemaphores = nullptr;
  1357. submit_info.commandBufferCount = p_count;
  1358. submit_info.pCommandBuffers = p_buffers;
  1359. submit_info.signalSemaphoreCount = 0;
  1360. submit_info.pSignalSemaphores = nullptr;
  1361. VkResult err = vkQueueSubmit(ld->queue, 1, &submit_info, VK_NULL_HANDLE);
  1362. if (err == VK_ERROR_OUT_OF_HOST_MEMORY) {
  1363. print_line("out of host memory");
  1364. }
  1365. if (err == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
  1366. print_line("out of device memory");
  1367. }
  1368. if (err == VK_ERROR_DEVICE_LOST) {
  1369. print_line("device lost");
  1370. }
  1371. ERR_FAIL_COND(err);
  1372. ld->waiting = true;
  1373. }
  1374. void VulkanContext::local_device_sync(RID p_local_device) {
  1375. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1376. ERR_FAIL_COND(!ld->waiting);
  1377. vkDeviceWaitIdle(ld->device);
  1378. ld->waiting = false;
  1379. }
  1380. void VulkanContext::local_device_free(RID p_local_device) {
  1381. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1382. vkDestroyDevice(ld->device, nullptr);
  1383. local_device_owner.free(p_local_device);
  1384. }
  1385. void VulkanContext::command_begin_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1386. if (!enabled_debug_utils) {
  1387. return;
  1388. }
  1389. VkDebugUtilsLabelEXT label;
  1390. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1391. label.pNext = nullptr;
  1392. label.pLabelName = p_label_name.utf8().get_data();
  1393. label.color[0] = p_color[0];
  1394. label.color[1] = p_color[1];
  1395. label.color[2] = p_color[2];
  1396. label.color[3] = p_color[3];
  1397. CmdBeginDebugUtilsLabelEXT(p_command_buffer, &label);
  1398. }
  1399. void VulkanContext::command_insert_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1400. if (!enabled_debug_utils) {
  1401. return;
  1402. }
  1403. VkDebugUtilsLabelEXT label;
  1404. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1405. label.pNext = nullptr;
  1406. label.pLabelName = p_label_name.utf8().get_data();
  1407. label.color[0] = p_color[0];
  1408. label.color[1] = p_color[1];
  1409. label.color[2] = p_color[2];
  1410. label.color[3] = p_color[3];
  1411. CmdInsertDebugUtilsLabelEXT(p_command_buffer, &label);
  1412. }
  1413. void VulkanContext::command_end_label(VkCommandBuffer p_command_buffer) {
  1414. if (!enabled_debug_utils) {
  1415. return;
  1416. }
  1417. CmdEndDebugUtilsLabelEXT(p_command_buffer);
  1418. }
  1419. void VulkanContext::set_object_name(VkObjectType p_object_type, uint64_t p_object_handle, String p_object_name) {
  1420. if (!enabled_debug_utils) {
  1421. return;
  1422. }
  1423. VkDebugUtilsObjectNameInfoEXT name_info;
  1424. name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  1425. name_info.pNext = nullptr;
  1426. name_info.objectType = p_object_type;
  1427. name_info.objectHandle = p_object_handle;
  1428. CharString object_name = p_object_name.utf8();
  1429. name_info.pObjectName = object_name.get_data();
  1430. SetDebugUtilsObjectNameEXT(device, &name_info);
  1431. }
  1432. VulkanContext::VulkanContext() {
  1433. use_validation_layers = Engine::get_singleton()->is_validation_layers_enabled();
  1434. command_buffer_queue.resize(1); // First one is always the setup command.
  1435. command_buffer_queue.write[0] = nullptr;
  1436. }
  1437. VulkanContext::~VulkanContext() {
  1438. if (queue_props) {
  1439. free(queue_props);
  1440. }
  1441. if (device_initialized) {
  1442. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1443. vkDestroyFence(device, fences[i], nullptr);
  1444. vkDestroySemaphore(device, image_acquired_semaphores[i], nullptr);
  1445. vkDestroySemaphore(device, draw_complete_semaphores[i], nullptr);
  1446. if (separate_present_queue) {
  1447. vkDestroySemaphore(device, image_ownership_semaphores[i], nullptr);
  1448. }
  1449. }
  1450. if (inst_initialized && use_validation_layers) {
  1451. DestroyDebugUtilsMessengerEXT(inst, dbg_messenger, nullptr);
  1452. }
  1453. vkDestroyDevice(device, nullptr);
  1454. }
  1455. if (inst_initialized) {
  1456. vkDestroyInstance(inst, nullptr);
  1457. }
  1458. }