vulkan_context.cpp 58 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597
  1. /*************************************************************************/
  2. /* vulkan_context.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "vulkan_context.h"
  31. #include "core/engine.h"
  32. #include "core/project_settings.h"
  33. #include "core/ustring.h"
  34. #include "core/version.h"
  35. #include "vk_enum_string_helper.h"
  36. #include <stdio.h>
  37. #include <stdlib.h>
  38. #include <string.h>
  39. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  40. #define APP_SHORT_NAME "GodotEngine"
  41. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_messenger_callback(
  42. VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  43. VkDebugUtilsMessageTypeFlagsEXT messageType,
  44. const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
  45. void *pUserData) {
  46. // This error needs to be ignored because the AMD allocator will mix up memory types on IGP processors.
  47. if (strstr(pCallbackData->pMessage, "Mapping an image with layout") != nullptr &&
  48. strstr(pCallbackData->pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr) {
  49. return VK_FALSE;
  50. }
  51. // This needs to be ignored because Validator is wrong here.
  52. if (strstr(pCallbackData->pMessage, "SPIR-V module not valid: Pointer operand") != nullptr &&
  53. strstr(pCallbackData->pMessage, "must be a memory object") != nullptr) {
  54. return VK_FALSE;
  55. }
  56. // Workaround for Vulkan-Loader usability bug: https://github.com/KhronosGroup/Vulkan-Loader/issues/262.
  57. if (strstr(pCallbackData->pMessage, "wrong ELF class: ELFCLASS32") != nullptr) {
  58. return VK_FALSE;
  59. }
  60. if (pCallbackData->pMessageIdName && strstr(pCallbackData->pMessageIdName, "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw") != nullptr) {
  61. return VK_FALSE;
  62. }
  63. String type_string;
  64. switch (messageType) {
  65. case (VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT):
  66. type_string = "GENERAL";
  67. break;
  68. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT):
  69. type_string = "VALIDATION";
  70. break;
  71. case (VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  72. type_string = "PERFORMANCE";
  73. break;
  74. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  75. type_string = "VALIDATION|PERFORMANCE";
  76. break;
  77. }
  78. String objects_string;
  79. if (pCallbackData->objectCount > 0) {
  80. objects_string = "\n\tObjects - " + String::num_int64(pCallbackData->objectCount);
  81. for (uint32_t object = 0; object < pCallbackData->objectCount; ++object) {
  82. objects_string +=
  83. "\n\t\tObject[" + String::num_int64(object) + "]" +
  84. " - " + string_VkObjectType(pCallbackData->pObjects[object].objectType) +
  85. ", Handle " + String::num_int64(pCallbackData->pObjects[object].objectHandle);
  86. if (nullptr != pCallbackData->pObjects[object].pObjectName && strlen(pCallbackData->pObjects[object].pObjectName) > 0) {
  87. objects_string += ", Name \"" + String(pCallbackData->pObjects[object].pObjectName) + "\"";
  88. }
  89. }
  90. }
  91. String labels_string;
  92. if (pCallbackData->cmdBufLabelCount > 0) {
  93. labels_string = "\n\tCommand Buffer Labels - " + String::num_int64(pCallbackData->cmdBufLabelCount);
  94. for (uint32_t cmd_buf_label = 0; cmd_buf_label < pCallbackData->cmdBufLabelCount; ++cmd_buf_label) {
  95. labels_string +=
  96. "\n\t\tLabel[" + String::num_int64(cmd_buf_label) + "]" +
  97. " - " + pCallbackData->pCmdBufLabels[cmd_buf_label].pLabelName +
  98. "{ ";
  99. for (int color_idx = 0; color_idx < 4; ++color_idx) {
  100. labels_string += String::num(pCallbackData->pCmdBufLabels[cmd_buf_label].color[color_idx]);
  101. if (color_idx < 3) {
  102. labels_string += ", ";
  103. }
  104. }
  105. labels_string += " }";
  106. }
  107. }
  108. String error_message(type_string +
  109. " - Message Id Number: " + String::num_int64(pCallbackData->messageIdNumber) +
  110. " | Message Id Name: " + pCallbackData->pMessageIdName +
  111. "\n\t" + pCallbackData->pMessage +
  112. objects_string + labels_string);
  113. // Convert VK severity to our own log macros.
  114. switch (messageSeverity) {
  115. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  116. print_verbose(error_message);
  117. break;
  118. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  119. print_line(error_message);
  120. break;
  121. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  122. WARN_PRINT(error_message);
  123. break;
  124. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  125. ERR_PRINT(error_message);
  126. CRASH_COND_MSG(Engine::get_singleton()->is_abort_on_gpu_errors_enabled(),
  127. "Crashing, because abort on GPU errors is enabled.");
  128. break;
  129. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT:
  130. break; // Shouldn't happen, only handling to make compilers happy.
  131. }
  132. return VK_FALSE;
  133. }
  134. VkBool32 VulkanContext::_check_layers(uint32_t check_count, const char **check_names, uint32_t layer_count, VkLayerProperties *layers) {
  135. for (uint32_t i = 0; i < check_count; i++) {
  136. VkBool32 found = 0;
  137. for (uint32_t j = 0; j < layer_count; j++) {
  138. if (!strcmp(check_names[i], layers[j].layerName)) {
  139. found = 1;
  140. break;
  141. }
  142. }
  143. if (!found) {
  144. WARN_PRINT("Can't find layer: " + String(check_names[i]));
  145. return 0;
  146. }
  147. }
  148. return 1;
  149. }
  150. Error VulkanContext::_create_validation_layers() {
  151. VkResult err;
  152. const char *instance_validation_layers_alt1[] = { "VK_LAYER_KHRONOS_validation" };
  153. const char *instance_validation_layers_alt2[] = { "VK_LAYER_LUNARG_standard_validation" };
  154. const char *instance_validation_layers_alt3[] = { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" };
  155. uint32_t instance_layer_count = 0;
  156. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
  157. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  158. VkBool32 validation_found = 0;
  159. uint32_t validation_layer_count = 0;
  160. const char **instance_validation_layers = nullptr;
  161. if (instance_layer_count > 0) {
  162. VkLayerProperties *instance_layers = (VkLayerProperties *)malloc(sizeof(VkLayerProperties) * instance_layer_count);
  163. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, instance_layers);
  164. if (err) {
  165. free(instance_layers);
  166. ERR_FAIL_V(ERR_CANT_CREATE);
  167. }
  168. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
  169. instance_validation_layers = instance_validation_layers_alt1;
  170. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  171. // use alternative (deprecated, removed in SDK 1.1.126.0) set of validation layers
  172. if (!validation_found) {
  173. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
  174. instance_validation_layers = instance_validation_layers_alt2;
  175. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  176. }
  177. // use alternative (deprecated, removed in SDK 1.1.121.1) set of validation layers
  178. if (!validation_found) {
  179. validation_layer_count = ARRAY_SIZE(instance_validation_layers_alt3);
  180. instance_validation_layers = instance_validation_layers_alt3;
  181. validation_found = _check_layers(validation_layer_count, instance_validation_layers, instance_layer_count, instance_layers);
  182. }
  183. free(instance_layers);
  184. }
  185. if (validation_found) {
  186. enabled_layer_count = validation_layer_count;
  187. for (uint32_t i = 0; i < validation_layer_count; i++) {
  188. enabled_layers[i] = instance_validation_layers[i];
  189. }
  190. } else {
  191. return ERR_CANT_CREATE;
  192. }
  193. return OK;
  194. }
  195. Error VulkanContext::_initialize_extensions() {
  196. VkResult err;
  197. uint32_t instance_extension_count = 0;
  198. enabled_extension_count = 0;
  199. enabled_layer_count = 0;
  200. /* Look for instance extensions */
  201. VkBool32 surfaceExtFound = 0;
  202. VkBool32 platformSurfaceExtFound = 0;
  203. memset(extension_names, 0, sizeof(extension_names));
  204. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr);
  205. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  206. if (instance_extension_count > 0) {
  207. VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  208. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions);
  209. if (err) {
  210. free(instance_extensions);
  211. ERR_FAIL_V(ERR_CANT_CREATE);
  212. }
  213. for (uint32_t i = 0; i < instance_extension_count; i++) {
  214. if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  215. surfaceExtFound = 1;
  216. extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
  217. }
  218. if (!strcmp(_get_platform_surface_extension(), instance_extensions[i].extensionName)) {
  219. platformSurfaceExtFound = 1;
  220. extension_names[enabled_extension_count++] = _get_platform_surface_extension();
  221. }
  222. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  223. if (use_validation_layers) {
  224. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  225. }
  226. }
  227. if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  228. if (use_validation_layers) {
  229. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  230. }
  231. }
  232. if (enabled_extension_count >= MAX_EXTENSIONS) {
  233. free(instance_extensions);
  234. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  235. }
  236. }
  237. free(instance_extensions);
  238. }
  239. ERR_FAIL_COND_V_MSG(!surfaceExtFound, ERR_CANT_CREATE, "No surface extension found, is a driver installed?");
  240. ERR_FAIL_COND_V_MSG(!platformSurfaceExtFound, ERR_CANT_CREATE, "No platform surface extension found, is a driver installed?");
  241. return OK;
  242. }
  243. Error VulkanContext::_create_physical_device() {
  244. /* Look for validation layers */
  245. if (use_validation_layers) {
  246. _create_validation_layers();
  247. }
  248. {
  249. Error err = _initialize_extensions();
  250. if (err != OK) {
  251. return err;
  252. }
  253. }
  254. CharString cs = ProjectSettings::get_singleton()->get("application/config/name").operator String().utf8();
  255. String name = "GodotEngine " + String(VERSION_FULL_NAME);
  256. CharString namecs = name.utf8();
  257. const VkApplicationInfo app = {
  258. /*sType*/ VK_STRUCTURE_TYPE_APPLICATION_INFO,
  259. /*pNext*/ nullptr,
  260. /*pApplicationName*/ cs.get_data(),
  261. /*applicationVersion*/ 0,
  262. /*pEngineName*/ namecs.get_data(),
  263. /*engineVersion*/ 0,
  264. /*apiVersion*/ VK_API_VERSION_1_0,
  265. };
  266. VkInstanceCreateInfo inst_info = {
  267. /*sType*/ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  268. /*pNext*/ nullptr,
  269. /*flags*/ 0,
  270. /*pApplicationInfo*/ &app,
  271. /*enabledLayerCount*/ enabled_layer_count,
  272. /*ppEnabledLayerNames*/ (const char *const *)instance_validation_layers,
  273. /*enabledExtensionCount*/ enabled_extension_count,
  274. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  275. };
  276. /*
  277. * This is info for a temp callback to use during CreateInstance.
  278. * After the instance is created, we use the instance-based
  279. * function to register the final callback.
  280. */
  281. VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info;
  282. if (use_validation_layers) {
  283. // VK_EXT_debug_utils style
  284. dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  285. dbg_messenger_create_info.pNext = nullptr;
  286. dbg_messenger_create_info.flags = 0;
  287. dbg_messenger_create_info.messageSeverity =
  288. VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  289. dbg_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
  290. VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
  291. VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  292. dbg_messenger_create_info.pfnUserCallback = _debug_messenger_callback;
  293. dbg_messenger_create_info.pUserData = this;
  294. inst_info.pNext = &dbg_messenger_create_info;
  295. }
  296. uint32_t gpu_count;
  297. VkResult err = vkCreateInstance(&inst_info, nullptr, &inst);
  298. ERR_FAIL_COND_V_MSG(err == VK_ERROR_INCOMPATIBLE_DRIVER, ERR_CANT_CREATE,
  299. "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
  300. "vkCreateInstance Failure");
  301. ERR_FAIL_COND_V_MSG(err == VK_ERROR_EXTENSION_NOT_PRESENT, ERR_CANT_CREATE,
  302. "Cannot find a specified extension library.\n"
  303. "Make sure your layers path is set appropriately.\n"
  304. "vkCreateInstance Failure");
  305. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE,
  306. "vkCreateInstance failed.\n\n"
  307. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  308. "Please look at the Getting Started guide for additional information.\n"
  309. "vkCreateInstance Failure");
  310. inst_initialized = true;
  311. /* Make initial call to query gpu_count, then second call for gpu info*/
  312. err = vkEnumeratePhysicalDevices(inst, &gpu_count, nullptr);
  313. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  314. ERR_FAIL_COND_V_MSG(gpu_count == 0, ERR_CANT_CREATE,
  315. "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
  316. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  317. "vkEnumeratePhysicalDevices Failure");
  318. VkPhysicalDevice *physical_devices = (VkPhysicalDevice *)malloc(sizeof(VkPhysicalDevice) * gpu_count);
  319. err = vkEnumeratePhysicalDevices(inst, &gpu_count, physical_devices);
  320. if (err) {
  321. free(physical_devices);
  322. ERR_FAIL_V(ERR_CANT_CREATE);
  323. }
  324. /* for now, just grab the first physical device */
  325. gpu = physical_devices[0];
  326. free(physical_devices);
  327. /* Look for device extensions */
  328. uint32_t device_extension_count = 0;
  329. VkBool32 swapchainExtFound = 0;
  330. enabled_extension_count = 0;
  331. memset(extension_names, 0, sizeof(extension_names));
  332. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr);
  333. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  334. if (device_extension_count > 0) {
  335. VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count);
  336. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions);
  337. if (err) {
  338. free(device_extensions);
  339. ERR_FAIL_V(ERR_CANT_CREATE);
  340. }
  341. for (uint32_t i = 0; i < device_extension_count; i++) {
  342. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
  343. swapchainExtFound = 1;
  344. extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  345. }
  346. if (enabled_extension_count >= MAX_EXTENSIONS) {
  347. free(device_extensions);
  348. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  349. }
  350. }
  351. if (VK_KHR_incremental_present_enabled) {
  352. // Even though the user "enabled" the extension via the command
  353. // line, we must make sure that it's enumerated for use with the
  354. // device. Therefore, disable it here, and re-enable it again if
  355. // enumerated.
  356. VK_KHR_incremental_present_enabled = false;
  357. for (uint32_t i = 0; i < device_extension_count; i++) {
  358. if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) {
  359. extension_names[enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
  360. VK_KHR_incremental_present_enabled = true;
  361. }
  362. if (enabled_extension_count >= MAX_EXTENSIONS) {
  363. free(device_extensions);
  364. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  365. }
  366. }
  367. }
  368. if (VK_GOOGLE_display_timing_enabled) {
  369. // Even though the user "enabled" the extension via the command
  370. // line, we must make sure that it's enumerated for use with the
  371. // device. Therefore, disable it here, and re-enable it again if
  372. // enumerated.
  373. VK_GOOGLE_display_timing_enabled = false;
  374. for (uint32_t i = 0; i < device_extension_count; i++) {
  375. if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) {
  376. extension_names[enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME;
  377. VK_GOOGLE_display_timing_enabled = true;
  378. }
  379. if (enabled_extension_count >= MAX_EXTENSIONS) {
  380. free(device_extensions);
  381. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  382. }
  383. }
  384. }
  385. free(device_extensions);
  386. }
  387. ERR_FAIL_COND_V_MSG(!swapchainExtFound, ERR_CANT_CREATE,
  388. "vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  389. " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n"
  390. "vkCreateInstance Failure");
  391. if (use_validation_layers) {
  392. // Setup VK_EXT_debug_utils function pointers always (we use them for
  393. // debug labels and names).
  394. CreateDebugUtilsMessengerEXT =
  395. (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT");
  396. DestroyDebugUtilsMessengerEXT =
  397. (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugUtilsMessengerEXT");
  398. SubmitDebugUtilsMessageEXT =
  399. (PFN_vkSubmitDebugUtilsMessageEXT)vkGetInstanceProcAddr(inst, "vkSubmitDebugUtilsMessageEXT");
  400. CmdBeginDebugUtilsLabelEXT =
  401. (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdBeginDebugUtilsLabelEXT");
  402. CmdEndDebugUtilsLabelEXT =
  403. (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdEndDebugUtilsLabelEXT");
  404. CmdInsertDebugUtilsLabelEXT =
  405. (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdInsertDebugUtilsLabelEXT");
  406. SetDebugUtilsObjectNameEXT =
  407. (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(inst, "vkSetDebugUtilsObjectNameEXT");
  408. if (nullptr == CreateDebugUtilsMessengerEXT || nullptr == DestroyDebugUtilsMessengerEXT ||
  409. nullptr == SubmitDebugUtilsMessageEXT || nullptr == CmdBeginDebugUtilsLabelEXT ||
  410. nullptr == CmdEndDebugUtilsLabelEXT || nullptr == CmdInsertDebugUtilsLabelEXT ||
  411. nullptr == SetDebugUtilsObjectNameEXT) {
  412. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  413. "GetProcAddr: Failed to init VK_EXT_debug_utils\n"
  414. "GetProcAddr: Failure");
  415. }
  416. err = CreateDebugUtilsMessengerEXT(inst, &dbg_messenger_create_info, nullptr, &dbg_messenger);
  417. switch (err) {
  418. case VK_SUCCESS:
  419. break;
  420. case VK_ERROR_OUT_OF_HOST_MEMORY:
  421. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  422. "CreateDebugUtilsMessengerEXT: out of host memory\n"
  423. "CreateDebugUtilsMessengerEXT Failure");
  424. break;
  425. default:
  426. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  427. "CreateDebugUtilsMessengerEXT: unknown failure\n"
  428. "CreateDebugUtilsMessengerEXT Failure");
  429. ERR_FAIL_V(ERR_CANT_CREATE);
  430. break;
  431. }
  432. }
  433. vkGetPhysicalDeviceProperties(gpu, &gpu_props);
  434. /* Call with NULL data to get count */
  435. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, nullptr);
  436. ERR_FAIL_COND_V(queue_family_count == 0, ERR_CANT_CREATE);
  437. queue_props = (VkQueueFamilyProperties *)malloc(queue_family_count * sizeof(VkQueueFamilyProperties));
  438. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, queue_props);
  439. // Query fine-grained feature support for this device.
  440. // If app has specific feature requirements it should check supported
  441. // features based on this query
  442. vkGetPhysicalDeviceFeatures(gpu, &physical_device_features);
  443. #define GET_INSTANCE_PROC_ADDR(inst, entrypoint) \
  444. { \
  445. fp##entrypoint = (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint); \
  446. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  447. "vkGetInstanceProcAddr failed to find vk" #entrypoint); \
  448. }
  449. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceSupportKHR);
  450. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  451. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceFormatsKHR);
  452. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfacePresentModesKHR);
  453. GET_INSTANCE_PROC_ADDR(inst, GetSwapchainImagesKHR);
  454. return OK;
  455. }
  456. Error VulkanContext::_create_device() {
  457. VkResult err;
  458. float queue_priorities[1] = { 0.0 };
  459. VkDeviceQueueCreateInfo queues[2];
  460. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  461. queues[0].pNext = nullptr;
  462. queues[0].queueFamilyIndex = graphics_queue_family_index;
  463. queues[0].queueCount = 1;
  464. queues[0].pQueuePriorities = queue_priorities;
  465. queues[0].flags = 0;
  466. VkDeviceCreateInfo sdevice = {
  467. /*sType*/ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  468. /*pNext*/ nullptr,
  469. /*flags*/ 0,
  470. /*queueCreateInfoCount*/ 1,
  471. /*pQueueCreateInfos*/ queues,
  472. /*enabledLayerCount*/ 0,
  473. /*ppEnabledLayerNames*/ nullptr,
  474. /*enabledExtensionCount*/ enabled_extension_count,
  475. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  476. /*pEnabledFeatures*/ &physical_device_features, // If specific features are required, pass them in here
  477. };
  478. if (separate_present_queue) {
  479. queues[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  480. queues[1].pNext = nullptr;
  481. queues[1].queueFamilyIndex = present_queue_family_index;
  482. queues[1].queueCount = 1;
  483. queues[1].pQueuePriorities = queue_priorities;
  484. queues[1].flags = 0;
  485. sdevice.queueCreateInfoCount = 2;
  486. }
  487. err = vkCreateDevice(gpu, &sdevice, nullptr, &device);
  488. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  489. return OK;
  490. }
  491. Error VulkanContext::_initialize_queues(VkSurfaceKHR surface) {
  492. // Iterate over each queue to learn whether it supports presenting:
  493. VkBool32 *supportsPresent = (VkBool32 *)malloc(queue_family_count * sizeof(VkBool32));
  494. for (uint32_t i = 0; i < queue_family_count; i++) {
  495. fpGetPhysicalDeviceSurfaceSupportKHR(gpu, i, surface, &supportsPresent[i]);
  496. }
  497. // Search for a graphics and a present queue in the array of queue
  498. // families, try to find one that supports both
  499. uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
  500. uint32_t presentQueueFamilyIndex = UINT32_MAX;
  501. for (uint32_t i = 0; i < queue_family_count; i++) {
  502. if ((queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  503. if (graphicsQueueFamilyIndex == UINT32_MAX) {
  504. graphicsQueueFamilyIndex = i;
  505. }
  506. if (supportsPresent[i] == VK_TRUE) {
  507. graphicsQueueFamilyIndex = i;
  508. presentQueueFamilyIndex = i;
  509. break;
  510. }
  511. }
  512. }
  513. if (presentQueueFamilyIndex == UINT32_MAX) {
  514. // If didn't find a queue that supports both graphics and present, then
  515. // find a separate present queue.
  516. for (uint32_t i = 0; i < queue_family_count; ++i) {
  517. if (supportsPresent[i] == VK_TRUE) {
  518. presentQueueFamilyIndex = i;
  519. break;
  520. }
  521. }
  522. }
  523. free(supportsPresent);
  524. // Generate error if could not find both a graphics and a present queue
  525. ERR_FAIL_COND_V_MSG(graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX, ERR_CANT_CREATE,
  526. "Could not find both graphics and present queues\n");
  527. graphics_queue_family_index = graphicsQueueFamilyIndex;
  528. present_queue_family_index = presentQueueFamilyIndex;
  529. separate_present_queue = (graphics_queue_family_index != present_queue_family_index);
  530. _create_device();
  531. static PFN_vkGetDeviceProcAddr g_gdpa = nullptr;
  532. #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
  533. { \
  534. if (!g_gdpa) \
  535. g_gdpa = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(inst, "vkGetDeviceProcAddr"); \
  536. fp##entrypoint = (PFN_vk##entrypoint)g_gdpa(dev, "vk" #entrypoint); \
  537. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  538. "vkGetDeviceProcAddr failed to find vk" #entrypoint); \
  539. }
  540. GET_DEVICE_PROC_ADDR(device, CreateSwapchainKHR);
  541. GET_DEVICE_PROC_ADDR(device, DestroySwapchainKHR);
  542. GET_DEVICE_PROC_ADDR(device, GetSwapchainImagesKHR);
  543. GET_DEVICE_PROC_ADDR(device, AcquireNextImageKHR);
  544. GET_DEVICE_PROC_ADDR(device, QueuePresentKHR);
  545. if (VK_GOOGLE_display_timing_enabled) {
  546. GET_DEVICE_PROC_ADDR(device, GetRefreshCycleDurationGOOGLE);
  547. GET_DEVICE_PROC_ADDR(device, GetPastPresentationTimingGOOGLE);
  548. }
  549. vkGetDeviceQueue(device, graphics_queue_family_index, 0, &graphics_queue);
  550. if (!separate_present_queue) {
  551. present_queue = graphics_queue;
  552. } else {
  553. vkGetDeviceQueue(device, present_queue_family_index, 0, &present_queue);
  554. }
  555. // Get the list of VkFormat's that are supported:
  556. uint32_t formatCount;
  557. VkResult err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, nullptr);
  558. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  559. VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  560. err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, surfFormats);
  561. if (err) {
  562. free(surfFormats);
  563. ERR_FAIL_V(ERR_CANT_CREATE);
  564. }
  565. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  566. // the surface has no preferred format. Otherwise, at least one
  567. // supported format will be returned.
  568. if (true || (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)) {
  569. format = VK_FORMAT_B8G8R8A8_UNORM;
  570. } else {
  571. if (formatCount < 1) {
  572. free(surfFormats);
  573. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "formatCount less than 1");
  574. }
  575. format = surfFormats[0].format;
  576. }
  577. color_space = surfFormats[0].colorSpace;
  578. free(surfFormats);
  579. Error serr = _create_semaphores();
  580. if (serr) {
  581. return serr;
  582. }
  583. queues_initialized = true;
  584. return OK;
  585. }
  586. Error VulkanContext::_create_semaphores() {
  587. VkResult err;
  588. // Create semaphores to synchronize acquiring presentable buffers before
  589. // rendering and waiting for drawing to be complete before presenting
  590. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  591. /*sType*/ VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  592. /*pNext*/ nullptr,
  593. /*flags*/ 0,
  594. };
  595. // Create fences that we can use to throttle if we get too far
  596. // ahead of the image presents
  597. VkFenceCreateInfo fence_ci = {
  598. /*sType*/ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
  599. /*pNext*/ nullptr,
  600. /*flags*/ VK_FENCE_CREATE_SIGNALED_BIT
  601. };
  602. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  603. err = vkCreateFence(device, &fence_ci, nullptr, &fences[i]);
  604. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  605. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_acquired_semaphores[i]);
  606. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  607. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &draw_complete_semaphores[i]);
  608. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  609. if (separate_present_queue) {
  610. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_ownership_semaphores[i]);
  611. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  612. }
  613. }
  614. frame_index = 0;
  615. // Get Memory information and properties
  616. vkGetPhysicalDeviceMemoryProperties(gpu, &memory_properties);
  617. return OK;
  618. }
  619. Error VulkanContext::_window_create(DisplayServer::WindowID p_window_id, VkSurfaceKHR p_surface, int p_width, int p_height) {
  620. ERR_FAIL_COND_V(windows.has(p_window_id), ERR_INVALID_PARAMETER);
  621. if (!queues_initialized) {
  622. // We use a single GPU, but we need a surface to initialize the
  623. // queues, so this process must be deferred until a surface
  624. // is created.
  625. _initialize_queues(p_surface);
  626. }
  627. Window window;
  628. window.surface = p_surface;
  629. window.width = p_width;
  630. window.height = p_height;
  631. Error err = _update_swap_chain(&window);
  632. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  633. windows[p_window_id] = window;
  634. return OK;
  635. }
  636. void VulkanContext::window_resize(DisplayServer::WindowID p_window, int p_width, int p_height) {
  637. ERR_FAIL_COND(!windows.has(p_window));
  638. windows[p_window].width = p_width;
  639. windows[p_window].height = p_height;
  640. _update_swap_chain(&windows[p_window]);
  641. }
  642. int VulkanContext::window_get_width(DisplayServer::WindowID p_window) {
  643. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  644. return windows[p_window].width;
  645. }
  646. int VulkanContext::window_get_height(DisplayServer::WindowID p_window) {
  647. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  648. return windows[p_window].height;
  649. }
  650. VkRenderPass VulkanContext::window_get_render_pass(DisplayServer::WindowID p_window) {
  651. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  652. Window *w = &windows[p_window];
  653. //vulkan use of currentbuffer
  654. return w->render_pass;
  655. }
  656. VkFramebuffer VulkanContext::window_get_framebuffer(DisplayServer::WindowID p_window) {
  657. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  658. ERR_FAIL_COND_V(!buffers_prepared, VK_NULL_HANDLE);
  659. Window *w = &windows[p_window];
  660. //vulkan use of currentbuffer
  661. return w->swapchain_image_resources[w->current_buffer].framebuffer;
  662. }
  663. void VulkanContext::window_destroy(DisplayServer::WindowID p_window_id) {
  664. ERR_FAIL_COND(!windows.has(p_window_id));
  665. _clean_up_swap_chain(&windows[p_window_id]);
  666. vkDestroySurfaceKHR(inst, windows[p_window_id].surface, nullptr);
  667. windows.erase(p_window_id);
  668. }
  669. Error VulkanContext::_clean_up_swap_chain(Window *window) {
  670. if (!window->swapchain) {
  671. return OK;
  672. }
  673. vkDeviceWaitIdle(device);
  674. //this destroys images associated it seems
  675. fpDestroySwapchainKHR(device, window->swapchain, nullptr);
  676. window->swapchain = VK_NULL_HANDLE;
  677. vkDestroyRenderPass(device, window->render_pass, nullptr);
  678. if (window->swapchain_image_resources) {
  679. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  680. vkDestroyImageView(device, window->swapchain_image_resources[i].view, nullptr);
  681. vkDestroyFramebuffer(device, window->swapchain_image_resources[i].framebuffer, nullptr);
  682. }
  683. free(window->swapchain_image_resources);
  684. window->swapchain_image_resources = nullptr;
  685. }
  686. if (separate_present_queue) {
  687. vkDestroyCommandPool(device, window->present_cmd_pool, nullptr);
  688. }
  689. return OK;
  690. }
  691. Error VulkanContext::_update_swap_chain(Window *window) {
  692. VkResult err;
  693. if (window->swapchain) {
  694. _clean_up_swap_chain(window);
  695. }
  696. // Check the surface capabilities and formats
  697. VkSurfaceCapabilitiesKHR surfCapabilities;
  698. err = fpGetPhysicalDeviceSurfaceCapabilitiesKHR(gpu, window->surface, &surfCapabilities);
  699. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  700. uint32_t presentModeCount;
  701. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, nullptr);
  702. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  703. VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  704. ERR_FAIL_COND_V(!presentModes, ERR_CANT_CREATE);
  705. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, presentModes);
  706. if (err) {
  707. free(presentModes);
  708. ERR_FAIL_V(ERR_CANT_CREATE);
  709. }
  710. VkExtent2D swapchainExtent;
  711. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  712. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  713. // If the surface size is undefined, the size is set to the size
  714. // of the images requested, which must fit within the minimum and
  715. // maximum values.
  716. swapchainExtent.width = window->width;
  717. swapchainExtent.height = window->height;
  718. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  719. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  720. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  721. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  722. }
  723. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  724. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  725. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  726. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  727. }
  728. } else {
  729. // If the surface size is defined, the swap chain size must match
  730. swapchainExtent = surfCapabilities.currentExtent;
  731. window->width = surfCapabilities.currentExtent.width;
  732. window->height = surfCapabilities.currentExtent.height;
  733. }
  734. if (window->width == 0 || window->height == 0) {
  735. free(presentModes);
  736. //likely window minimized, no swapchain created
  737. return OK;
  738. }
  739. // The FIFO present mode is guaranteed by the spec to be supported
  740. // and to have no tearing. It's a great default present mode to use.
  741. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  742. // There are times when you may wish to use another present mode. The
  743. // following code shows how to select them, and the comments provide some
  744. // reasons you may wish to use them.
  745. //
  746. // It should be noted that Vulkan 1.0 doesn't provide a method for
  747. // synchronizing rendering with the presentation engine's display. There
  748. // is a method provided for throttling rendering with the display, but
  749. // there are some presentation engines for which this method will not work.
  750. // If an application doesn't throttle its rendering, and if it renders much
  751. // faster than the refresh rate of the display, this can waste power on
  752. // mobile devices. That is because power is being spent rendering images
  753. // that may never be seen.
  754. // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care about
  755. // tearing, or have some way of synchronizing their rendering with the
  756. // display.
  757. // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
  758. // generally render a new presentable image every refresh cycle, but are
  759. // occasionally early. In this case, the application wants the new image
  760. // to be displayed instead of the previously-queued-for-presentation image
  761. // that has not yet been displayed.
  762. // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
  763. // render a new presentable image every refresh cycle, but are occasionally
  764. // late. In this case (perhaps because of stuttering/latency concerns),
  765. // the application wants the late image to be immediately displayed, even
  766. // though that may mean some tearing.
  767. if (window->presentMode != swapchainPresentMode) {
  768. for (size_t i = 0; i < presentModeCount; ++i) {
  769. if (presentModes[i] == window->presentMode) {
  770. swapchainPresentMode = window->presentMode;
  771. break;
  772. }
  773. }
  774. }
  775. free(presentModes);
  776. ERR_FAIL_COND_V_MSG(swapchainPresentMode != window->presentMode, ERR_CANT_CREATE, "Present mode specified is not supported\n");
  777. // Determine the number of VkImages to use in the swap chain.
  778. // Application desires to acquire 3 images at a time for triple
  779. // buffering
  780. uint32_t desiredNumOfSwapchainImages = 3;
  781. if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
  782. desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  783. }
  784. // If maxImageCount is 0, we can ask for as many images as we want;
  785. // otherwise we're limited to maxImageCount
  786. if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  787. // Application must settle for fewer images than desired:
  788. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  789. }
  790. VkSurfaceTransformFlagsKHR preTransform;
  791. if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  792. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  793. } else {
  794. preTransform = surfCapabilities.currentTransform;
  795. }
  796. // Find a supported composite alpha mode - one of these is guaranteed to be set
  797. VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  798. VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
  799. VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  800. VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
  801. VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
  802. VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
  803. };
  804. for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
  805. if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
  806. compositeAlpha = compositeAlphaFlags[i];
  807. break;
  808. }
  809. }
  810. VkSwapchainCreateInfoKHR swapchain_ci = {
  811. /*sType*/ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  812. /*pNext*/ nullptr,
  813. /*flags*/ 0,
  814. /*surface*/ window->surface,
  815. /*minImageCount*/ desiredNumOfSwapchainImages,
  816. /*imageFormat*/ format,
  817. /*imageColorSpace*/ color_space,
  818. /*imageExtent*/ {
  819. /*width*/ swapchainExtent.width,
  820. /*height*/ swapchainExtent.height,
  821. },
  822. /*imageArrayLayers*/ 1,
  823. /*imageUsage*/ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  824. /*imageSharingMode*/ VK_SHARING_MODE_EXCLUSIVE,
  825. /*queueFamilyIndexCount*/ 0,
  826. /*pQueueFamilyIndices*/ nullptr,
  827. /*preTransform*/ (VkSurfaceTransformFlagBitsKHR)preTransform,
  828. /*compositeAlpha*/ compositeAlpha,
  829. /*presentMode*/ swapchainPresentMode,
  830. /*clipped*/ true,
  831. /*oldSwapchain*/ VK_NULL_HANDLE,
  832. };
  833. err = fpCreateSwapchainKHR(device, &swapchain_ci, nullptr, &window->swapchain);
  834. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  835. uint32_t sp_image_count;
  836. err = fpGetSwapchainImagesKHR(device, window->swapchain, &sp_image_count, nullptr);
  837. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  838. if (swapchainImageCount == 0) {
  839. //assign here for the first time.
  840. swapchainImageCount = sp_image_count;
  841. } else {
  842. ERR_FAIL_COND_V(swapchainImageCount != sp_image_count, ERR_BUG);
  843. }
  844. VkImage *swapchainImages = (VkImage *)malloc(swapchainImageCount * sizeof(VkImage));
  845. ERR_FAIL_COND_V(!swapchainImages, ERR_CANT_CREATE);
  846. err = fpGetSwapchainImagesKHR(device, window->swapchain, &swapchainImageCount, swapchainImages);
  847. if (err) {
  848. free(swapchainImages);
  849. ERR_FAIL_V(ERR_CANT_CREATE);
  850. }
  851. window->swapchain_image_resources =
  852. (SwapchainImageResources *)malloc(sizeof(SwapchainImageResources) * swapchainImageCount);
  853. if (!window->swapchain_image_resources) {
  854. free(swapchainImages);
  855. ERR_FAIL_V(ERR_CANT_CREATE);
  856. }
  857. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  858. VkImageViewCreateInfo color_image_view = {
  859. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  860. /*pNext*/ nullptr,
  861. /*flags*/ 0,
  862. /*image*/ swapchainImages[i],
  863. /*viewType*/ VK_IMAGE_VIEW_TYPE_2D,
  864. /*format*/ format,
  865. /*components*/ {
  866. /*r*/ VK_COMPONENT_SWIZZLE_R,
  867. /*g*/ VK_COMPONENT_SWIZZLE_G,
  868. /*b*/ VK_COMPONENT_SWIZZLE_B,
  869. /*a*/ VK_COMPONENT_SWIZZLE_A,
  870. },
  871. /*subresourceRange*/ { /*aspectMask*/ VK_IMAGE_ASPECT_COLOR_BIT,
  872. /*baseMipLevel*/ 0,
  873. /*levelCount*/ 1,
  874. /*baseArrayLayer*/ 0,
  875. /*layerCount*/ 1 },
  876. };
  877. window->swapchain_image_resources[i].image = swapchainImages[i];
  878. color_image_view.image = window->swapchain_image_resources[i].image;
  879. err = vkCreateImageView(device, &color_image_view, nullptr, &window->swapchain_image_resources[i].view);
  880. if (err) {
  881. free(swapchainImages);
  882. ERR_FAIL_V(ERR_CANT_CREATE);
  883. }
  884. }
  885. free(swapchainImages);
  886. /******** FRAMEBUFFER ************/
  887. {
  888. const VkAttachmentDescription attachment = {
  889. /*flags*/ 0,
  890. /*format*/ format,
  891. /*samples*/ VK_SAMPLE_COUNT_1_BIT,
  892. /*loadOp*/ VK_ATTACHMENT_LOAD_OP_CLEAR,
  893. /*storeOp*/ VK_ATTACHMENT_STORE_OP_STORE,
  894. /*stencilLoadOp*/ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  895. /*stencilStoreOp*/ VK_ATTACHMENT_STORE_OP_DONT_CARE,
  896. /*initialLayout*/ VK_IMAGE_LAYOUT_UNDEFINED,
  897. /*finalLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  898. };
  899. const VkAttachmentReference color_reference = {
  900. /*attachment*/ 0,
  901. /*layout*/ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  902. };
  903. const VkSubpassDescription subpass = {
  904. /*flags*/ 0,
  905. /*pipelineBindPoint*/ VK_PIPELINE_BIND_POINT_GRAPHICS,
  906. /*inputAttachmentCount*/ 0,
  907. /*pInputAttachments*/ nullptr,
  908. /*colorAttachmentCount*/ 1,
  909. /*pColorAttachments*/ &color_reference,
  910. /*pResolveAttachments*/ nullptr,
  911. /*pDepthStencilAttachment*/ nullptr,
  912. /*preserveAttachmentCount*/ 0,
  913. /*pPreserveAttachments*/ nullptr,
  914. };
  915. const VkRenderPassCreateInfo rp_info = {
  916. /*sTyp*/ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  917. /*pNext*/ nullptr,
  918. /*flags*/ 0,
  919. /*attachmentCount*/ 1,
  920. /*pAttachments*/ &attachment,
  921. /*subpassCount*/ 1,
  922. /*pSubpasses*/ &subpass,
  923. /*dependencyCount*/ 0,
  924. /*pDependencies*/ nullptr,
  925. };
  926. err = vkCreateRenderPass(device, &rp_info, nullptr, &window->render_pass);
  927. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  928. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  929. const VkFramebufferCreateInfo fb_info = {
  930. /*sType*/ VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  931. /*pNext*/ nullptr,
  932. /*flags*/ 0,
  933. /*renderPass*/ window->render_pass,
  934. /*attachmentCount*/ 1,
  935. /*pAttachments*/ &window->swapchain_image_resources[i].view,
  936. /*width*/ (uint32_t)window->width,
  937. /*height*/ (uint32_t)window->height,
  938. /*layers*/ 1,
  939. };
  940. err = vkCreateFramebuffer(device, &fb_info, nullptr, &window->swapchain_image_resources[i].framebuffer);
  941. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  942. }
  943. }
  944. /******** SEPARATE PRESENT QUEUE ************/
  945. if (separate_present_queue) {
  946. const VkCommandPoolCreateInfo present_cmd_pool_info = {
  947. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  948. /*pNext*/ nullptr,
  949. /*flags*/ 0,
  950. /*queueFamilyIndex*/ present_queue_family_index,
  951. };
  952. err = vkCreateCommandPool(device, &present_cmd_pool_info, nullptr, &window->present_cmd_pool);
  953. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  954. const VkCommandBufferAllocateInfo present_cmd_info = {
  955. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  956. /*pNext*/ nullptr,
  957. /*commandPool*/ window->present_cmd_pool,
  958. /*level*/ VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  959. /*commandBufferCount*/ 1,
  960. };
  961. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  962. err = vkAllocateCommandBuffers(device, &present_cmd_info,
  963. &window->swapchain_image_resources[i].graphics_to_present_cmd);
  964. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  965. const VkCommandBufferBeginInfo cmd_buf_info = {
  966. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  967. /*pNext*/ nullptr,
  968. /*flags*/ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
  969. /*pInheritanceInfo*/ nullptr,
  970. };
  971. err = vkBeginCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd, &cmd_buf_info);
  972. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  973. VkImageMemoryBarrier image_ownership_barrier = {
  974. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  975. /*pNext*/ nullptr,
  976. /*srcAccessMask*/ 0,
  977. /*dstAccessMask*/ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  978. /*oldLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  979. /*newLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  980. /*srcQueueFamilyIndex*/ graphics_queue_family_index,
  981. /*dstQueueFamilyIndex*/ present_queue_family_index,
  982. /*image*/ window->swapchain_image_resources[i].image,
  983. /*subresourceRange*/ { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 }
  984. };
  985. vkCmdPipelineBarrier(window->swapchain_image_resources[i].graphics_to_present_cmd, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  986. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_ownership_barrier);
  987. err = vkEndCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd);
  988. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  989. }
  990. }
  991. //reset current buffer
  992. window->current_buffer = 0;
  993. return OK;
  994. }
  995. Error VulkanContext::initialize() {
  996. Error err = _create_physical_device();
  997. if (err) {
  998. return err;
  999. }
  1000. device_initialized = true;
  1001. return OK;
  1002. }
  1003. void VulkanContext::set_setup_buffer(const VkCommandBuffer &pCommandBuffer) {
  1004. command_buffer_queue.write[0] = pCommandBuffer;
  1005. }
  1006. void VulkanContext::append_command_buffer(const VkCommandBuffer &pCommandBuffer) {
  1007. if (command_buffer_queue.size() <= command_buffer_count) {
  1008. command_buffer_queue.resize(command_buffer_count + 1);
  1009. }
  1010. command_buffer_queue.write[command_buffer_count] = pCommandBuffer;
  1011. command_buffer_count++;
  1012. }
  1013. void VulkanContext::flush(bool p_flush_setup, bool p_flush_pending) {
  1014. // ensure everything else pending is executed
  1015. vkDeviceWaitIdle(device);
  1016. //flush the pending setup buffer
  1017. if (p_flush_setup && command_buffer_queue[0]) {
  1018. //use a fence to wait for everything done
  1019. VkSubmitInfo submit_info;
  1020. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1021. submit_info.pNext = nullptr;
  1022. submit_info.pWaitDstStageMask = nullptr;
  1023. submit_info.waitSemaphoreCount = 0;
  1024. submit_info.pWaitSemaphores = nullptr;
  1025. submit_info.commandBufferCount = 1;
  1026. submit_info.pCommandBuffers = command_buffer_queue.ptr();
  1027. submit_info.signalSemaphoreCount = 0;
  1028. submit_info.pSignalSemaphores = nullptr;
  1029. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1030. command_buffer_queue.write[0] = nullptr;
  1031. ERR_FAIL_COND(err);
  1032. vkDeviceWaitIdle(device);
  1033. }
  1034. if (p_flush_pending && command_buffer_count > 1) {
  1035. //use a fence to wait for everything done
  1036. VkSubmitInfo submit_info;
  1037. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1038. submit_info.pNext = nullptr;
  1039. submit_info.pWaitDstStageMask = nullptr;
  1040. submit_info.waitSemaphoreCount = 0;
  1041. submit_info.pWaitSemaphores = nullptr;
  1042. submit_info.commandBufferCount = command_buffer_count - 1;
  1043. submit_info.pCommandBuffers = command_buffer_queue.ptr() + 1;
  1044. submit_info.signalSemaphoreCount = 0;
  1045. submit_info.pSignalSemaphores = nullptr;
  1046. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1047. ERR_FAIL_COND(err);
  1048. vkDeviceWaitIdle(device);
  1049. command_buffer_count = 1;
  1050. }
  1051. }
  1052. Error VulkanContext::prepare_buffers() {
  1053. if (!queues_initialized) {
  1054. return OK;
  1055. }
  1056. VkResult err;
  1057. // Ensure no more than FRAME_LAG renderings are outstanding
  1058. vkWaitForFences(device, 1, &fences[frame_index], VK_TRUE, UINT64_MAX);
  1059. vkResetFences(device, 1, &fences[frame_index]);
  1060. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1061. Window *w = &E->get();
  1062. if (w->swapchain == VK_NULL_HANDLE) {
  1063. continue;
  1064. }
  1065. do {
  1066. // Get the index of the next available swapchain image:
  1067. err =
  1068. fpAcquireNextImageKHR(device, w->swapchain, UINT64_MAX,
  1069. image_acquired_semaphores[frame_index], VK_NULL_HANDLE, &w->current_buffer);
  1070. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1071. // swapchain is out of date (e.g. the window was resized) and
  1072. // must be recreated:
  1073. print_line("early out of data");
  1074. //resize_notify();
  1075. _update_swap_chain(w);
  1076. } else if (err == VK_SUBOPTIMAL_KHR) {
  1077. print_line("early suboptimal");
  1078. // swapchain is not as optimal as it could be, but the platform's
  1079. // presentation engine will still present the image correctly.
  1080. break;
  1081. } else {
  1082. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1083. }
  1084. } while (err != VK_SUCCESS);
  1085. }
  1086. buffers_prepared = true;
  1087. return OK;
  1088. }
  1089. Error VulkanContext::swap_buffers() {
  1090. if (!queues_initialized) {
  1091. return OK;
  1092. }
  1093. // print_line("swapbuffers?");
  1094. VkResult err;
  1095. #if 0
  1096. if (VK_GOOGLE_display_timing_enabled) {
  1097. // Look at what happened to previous presents, and make appropriate
  1098. // adjustments in timing:
  1099. DemoUpdateTargetIPD(demo);
  1100. // Note: a real application would position its geometry to that it's in
  1101. // the correct locatoin for when the next image is presented. It might
  1102. // also wait, so that there's less latency between any input and when
  1103. // the next image is rendered/presented. This demo program is so
  1104. // simple that it doesn't do either of those.
  1105. }
  1106. #endif
  1107. // Wait for the image acquired semaphore to be signaled to ensure
  1108. // that the image won't be rendered to until the presentation
  1109. // engine has fully released ownership to the application, and it is
  1110. // okay to render to the image.
  1111. const VkCommandBuffer *commands_ptr = nullptr;
  1112. uint32_t commands_to_submit = 0;
  1113. if (command_buffer_queue[0] == nullptr) {
  1114. //no setup command, but commands to submit, submit from the first and skip command
  1115. if (command_buffer_count > 1) {
  1116. commands_ptr = command_buffer_queue.ptr() + 1;
  1117. commands_to_submit = command_buffer_count - 1;
  1118. }
  1119. } else {
  1120. commands_ptr = command_buffer_queue.ptr();
  1121. commands_to_submit = command_buffer_count;
  1122. }
  1123. VkPipelineStageFlags pipe_stage_flags;
  1124. VkSubmitInfo submit_info;
  1125. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1126. submit_info.pNext = nullptr;
  1127. submit_info.pWaitDstStageMask = &pipe_stage_flags;
  1128. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1129. submit_info.waitSemaphoreCount = 1;
  1130. submit_info.pWaitSemaphores = &image_acquired_semaphores[frame_index];
  1131. submit_info.commandBufferCount = commands_to_submit;
  1132. submit_info.pCommandBuffers = commands_ptr;
  1133. submit_info.signalSemaphoreCount = 1;
  1134. submit_info.pSignalSemaphores = &draw_complete_semaphores[frame_index];
  1135. err = vkQueueSubmit(graphics_queue, 1, &submit_info, fences[frame_index]);
  1136. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1137. command_buffer_queue.write[0] = nullptr;
  1138. command_buffer_count = 1;
  1139. if (separate_present_queue) {
  1140. // If we are using separate queues, change image ownership to the
  1141. // present queue before presenting, waiting for the draw complete
  1142. // semaphore and signalling the ownership released semaphore when finished
  1143. VkFence nullFence = VK_NULL_HANDLE;
  1144. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1145. submit_info.waitSemaphoreCount = 1;
  1146. submit_info.pWaitSemaphores = &draw_complete_semaphores[frame_index];
  1147. submit_info.commandBufferCount = 0;
  1148. VkCommandBuffer *cmdbufptr = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer *) * windows.size());
  1149. submit_info.pCommandBuffers = cmdbufptr;
  1150. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1151. Window *w = &E->get();
  1152. if (w->swapchain == VK_NULL_HANDLE) {
  1153. continue;
  1154. }
  1155. cmdbufptr[submit_info.commandBufferCount] = w->swapchain_image_resources[w->current_buffer].graphics_to_present_cmd;
  1156. submit_info.commandBufferCount++;
  1157. }
  1158. submit_info.signalSemaphoreCount = 1;
  1159. submit_info.pSignalSemaphores = &image_ownership_semaphores[frame_index];
  1160. err = vkQueueSubmit(present_queue, 1, &submit_info, nullFence);
  1161. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1162. }
  1163. // If we are using separate queues we have to wait for image ownership,
  1164. // otherwise wait for draw complete
  1165. VkPresentInfoKHR present = {
  1166. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  1167. /*pNext*/ nullptr,
  1168. /*waitSemaphoreCount*/ 1,
  1169. /*pWaitSemaphores*/ (separate_present_queue) ? &image_ownership_semaphores[frame_index] : &draw_complete_semaphores[frame_index],
  1170. /*swapchainCount*/ 0,
  1171. /*pSwapchain*/ nullptr,
  1172. /*pImageIndices*/ nullptr,
  1173. /*pResults*/ nullptr,
  1174. };
  1175. VkSwapchainKHR *pSwapchains = (VkSwapchainKHR *)alloca(sizeof(VkSwapchainKHR *) * windows.size());
  1176. uint32_t *pImageIndices = (uint32_t *)alloca(sizeof(uint32_t *) * windows.size());
  1177. present.pSwapchains = pSwapchains;
  1178. present.pImageIndices = pImageIndices;
  1179. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1180. Window *w = &E->get();
  1181. if (w->swapchain == VK_NULL_HANDLE) {
  1182. continue;
  1183. }
  1184. pSwapchains[present.swapchainCount] = w->swapchain;
  1185. pImageIndices[present.swapchainCount] = w->current_buffer;
  1186. present.swapchainCount++;
  1187. }
  1188. #if 0
  1189. if (VK_KHR_incremental_present_enabled) {
  1190. // If using VK_KHR_incremental_present, we provide a hint of the region
  1191. // that contains changed content relative to the previously-presented
  1192. // image. The implementation can use this hint in order to save
  1193. // work/power (by only copying the region in the hint). The
  1194. // implementation is free to ignore the hint though, and so we must
  1195. // ensure that the entire image has the correctly-drawn content.
  1196. uint32_t eighthOfWidth = width / 8;
  1197. uint32_t eighthOfHeight = height / 8;
  1198. VkRectLayerKHR rect = {
  1199. /*offset.x*/ eighthOfWidth,
  1200. /*offset.y*/ eighthOfHeight,
  1201. /*extent.width*/ eighthOfWidth * 6,
  1202. /*extent.height*/ eighthOfHeight * 6,
  1203. /*layer*/ 0,
  1204. };
  1205. VkPresentRegionKHR region = {
  1206. /*rectangleCount*/ 1,
  1207. /*pRectangles*/ &rect,
  1208. };
  1209. VkPresentRegionsKHR regions = {
  1210. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
  1211. /*pNext*/ present.pNext,
  1212. /*swapchainCount*/ present.swapchainCount,
  1213. /*pRegions*/ &region,
  1214. };
  1215. present.pNext = &regions;
  1216. }
  1217. #endif
  1218. #if 0
  1219. if (VK_GOOGLE_display_timing_enabled) {
  1220. VkPresentTimeGOOGLE ptime;
  1221. if (prev_desired_present_time == 0) {
  1222. // This must be the first present for this swapchain.
  1223. //
  1224. // We don't know where we are relative to the presentation engine's
  1225. // display's refresh cycle. We also don't know how long rendering
  1226. // takes. Let's make a grossly-simplified assumption that the
  1227. // desiredPresentTime should be half way between now and
  1228. // now+target_IPD. We will adjust over time.
  1229. uint64_t curtime = getTimeInNanoseconds();
  1230. if (curtime == 0) {
  1231. // Since we didn't find out the current time, don't give a
  1232. // desiredPresentTime:
  1233. ptime.desiredPresentTime = 0;
  1234. } else {
  1235. ptime.desiredPresentTime = curtime + (target_IPD >> 1);
  1236. }
  1237. } else {
  1238. ptime.desiredPresentTime = (prev_desired_present_time + target_IPD);
  1239. }
  1240. ptime.presentID = next_present_id++;
  1241. prev_desired_present_time = ptime.desiredPresentTime;
  1242. VkPresentTimesInfoGOOGLE present_time = {
  1243. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
  1244. /*pNext*/ present.pNext,
  1245. /*swapchainCount*/ present.swapchainCount,
  1246. /*pTimes*/ &ptime,
  1247. };
  1248. if (VK_GOOGLE_display_timing_enabled) {
  1249. present.pNext = &present_time;
  1250. }
  1251. }
  1252. #endif
  1253. static int total_frames = 0;
  1254. total_frames++;
  1255. // print_line("current buffer: " + itos(current_buffer));
  1256. err = fpQueuePresentKHR(present_queue, &present);
  1257. frame_index += 1;
  1258. frame_index %= FRAME_LAG;
  1259. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1260. // swapchain is out of date (e.g. the window was resized) and
  1261. // must be recreated:
  1262. print_line("out of date");
  1263. resize_notify();
  1264. } else if (err == VK_SUBOPTIMAL_KHR) {
  1265. // swapchain is not as optimal as it could be, but the platform's
  1266. // presentation engine will still present the image correctly.
  1267. print_line("suboptimal");
  1268. } else {
  1269. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1270. }
  1271. buffers_prepared = false;
  1272. return OK;
  1273. }
  1274. void VulkanContext::resize_notify() {
  1275. }
  1276. VkDevice VulkanContext::get_device() {
  1277. return device;
  1278. }
  1279. VkPhysicalDevice VulkanContext::get_physical_device() {
  1280. return gpu;
  1281. }
  1282. int VulkanContext::get_swapchain_image_count() const {
  1283. return swapchainImageCount;
  1284. }
  1285. uint32_t VulkanContext::get_graphics_queue() const {
  1286. return graphics_queue_family_index;
  1287. }
  1288. VkFormat VulkanContext::get_screen_format() const {
  1289. return format;
  1290. }
  1291. VkPhysicalDeviceLimits VulkanContext::get_device_limits() const {
  1292. return gpu_props.limits;
  1293. }
  1294. RID VulkanContext::local_device_create() {
  1295. LocalDevice ld;
  1296. { //create device
  1297. VkResult err;
  1298. float queue_priorities[1] = { 0.0 };
  1299. VkDeviceQueueCreateInfo queues[2];
  1300. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  1301. queues[0].pNext = nullptr;
  1302. queues[0].queueFamilyIndex = graphics_queue_family_index;
  1303. queues[0].queueCount = 1;
  1304. queues[0].pQueuePriorities = queue_priorities;
  1305. queues[0].flags = 0;
  1306. VkDeviceCreateInfo sdevice = {
  1307. /*sType =*/VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1308. /*pNext */ nullptr,
  1309. /*flags */ 0,
  1310. /*queueCreateInfoCount */ 1,
  1311. /*pQueueCreateInfos */ queues,
  1312. /*enabledLayerCount */ 0,
  1313. /*ppEnabledLayerNames */ nullptr,
  1314. /*enabledExtensionCount */ enabled_extension_count,
  1315. /*ppEnabledExtensionNames */ (const char *const *)extension_names,
  1316. /*pEnabledFeatures */ &physical_device_features, // If specific features are required, pass them in here
  1317. };
  1318. err = vkCreateDevice(gpu, &sdevice, nullptr, &ld.device);
  1319. ERR_FAIL_COND_V(err, RID());
  1320. }
  1321. { //create graphics queue
  1322. vkGetDeviceQueue(ld.device, graphics_queue_family_index, 0, &ld.queue);
  1323. }
  1324. return local_device_owner.make_rid(ld);
  1325. }
  1326. VkDevice VulkanContext::local_device_get_vk_device(RID p_local_device) {
  1327. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1328. return ld->device;
  1329. }
  1330. void VulkanContext::local_device_push_command_buffers(RID p_local_device, const VkCommandBuffer *p_buffers, int p_count) {
  1331. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1332. ERR_FAIL_COND(ld->waiting);
  1333. VkSubmitInfo submit_info;
  1334. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1335. submit_info.pNext = nullptr;
  1336. submit_info.pWaitDstStageMask = nullptr;
  1337. submit_info.waitSemaphoreCount = 0;
  1338. submit_info.pWaitSemaphores = nullptr;
  1339. submit_info.commandBufferCount = p_count;
  1340. submit_info.pCommandBuffers = p_buffers;
  1341. submit_info.signalSemaphoreCount = 0;
  1342. submit_info.pSignalSemaphores = nullptr;
  1343. VkResult err = vkQueueSubmit(ld->queue, 1, &submit_info, VK_NULL_HANDLE);
  1344. if (err == VK_ERROR_OUT_OF_HOST_MEMORY) {
  1345. print_line("out of host memory");
  1346. }
  1347. if (err == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
  1348. print_line("out of device memory");
  1349. }
  1350. if (err == VK_ERROR_DEVICE_LOST) {
  1351. print_line("device lost");
  1352. }
  1353. ERR_FAIL_COND(err);
  1354. ld->waiting = true;
  1355. }
  1356. void VulkanContext::local_device_sync(RID p_local_device) {
  1357. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1358. ERR_FAIL_COND(!ld->waiting);
  1359. vkDeviceWaitIdle(ld->device);
  1360. ld->waiting = false;
  1361. }
  1362. void VulkanContext::local_device_free(RID p_local_device) {
  1363. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1364. vkDestroyDevice(ld->device, nullptr);
  1365. local_device_owner.free(p_local_device);
  1366. }
  1367. VulkanContext::VulkanContext() {
  1368. use_validation_layers = Engine::get_singleton()->is_validation_layers_enabled();
  1369. command_buffer_queue.resize(1); // First one is always the setup command.
  1370. command_buffer_queue.write[0] = nullptr;
  1371. }
  1372. VulkanContext::~VulkanContext() {
  1373. if (queue_props) {
  1374. free(queue_props);
  1375. }
  1376. if (device_initialized) {
  1377. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1378. vkDestroyFence(device, fences[i], nullptr);
  1379. vkDestroySemaphore(device, image_acquired_semaphores[i], nullptr);
  1380. vkDestroySemaphore(device, draw_complete_semaphores[i], nullptr);
  1381. if (separate_present_queue) {
  1382. vkDestroySemaphore(device, image_ownership_semaphores[i], nullptr);
  1383. }
  1384. }
  1385. if (inst_initialized && use_validation_layers) {
  1386. DestroyDebugUtilsMessengerEXT(inst, dbg_messenger, nullptr);
  1387. }
  1388. vkDestroyDevice(device, nullptr);
  1389. }
  1390. if (inst_initialized) {
  1391. vkDestroyInstance(inst, nullptr);
  1392. }
  1393. }