vulkan_context.cpp 85 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338
  1. /*************************************************************************/
  2. /* vulkan_context.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "vulkan_context.h"
  31. #include "core/config/engine.h"
  32. #include "core/config/project_settings.h"
  33. #include "core/string/ustring.h"
  34. #include "core/templates/local_vector.h"
  35. #include "core/version.h"
  36. #include "servers/rendering/rendering_device.h"
  37. #include "vk_enum_string_helper.h"
  38. #include <stdio.h>
  39. #include <stdlib.h>
  40. #include <string.h>
  41. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  42. #define APP_SHORT_NAME "GodotEngine"
  43. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_messenger_callback(
  44. VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  45. VkDebugUtilsMessageTypeFlagsEXT messageType,
  46. const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
  47. void *pUserData) {
  48. // This error needs to be ignored because the AMD allocator will mix up memory types on IGP processors.
  49. if (strstr(pCallbackData->pMessage, "Mapping an image with layout") != nullptr &&
  50. strstr(pCallbackData->pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr) {
  51. return VK_FALSE;
  52. }
  53. // This needs to be ignored because Validator is wrong here.
  54. if (strstr(pCallbackData->pMessage, "Invalid SPIR-V binary version 1.3") != nullptr) {
  55. return VK_FALSE;
  56. }
  57. // This needs to be ignored because Validator is wrong here.
  58. if (strstr(pCallbackData->pMessage, "Shader requires flag") != nullptr) {
  59. return VK_FALSE;
  60. }
  61. // This needs to be ignored because Validator is wrong here.
  62. if (strstr(pCallbackData->pMessage, "SPIR-V module not valid: Pointer operand") != nullptr &&
  63. strstr(pCallbackData->pMessage, "must be a memory object") != nullptr) {
  64. return VK_FALSE;
  65. }
  66. /*
  67. // This is a valid warning because its illegal in Vulkan, but in practice it should work according to VK_KHR_maintenance2
  68. if (strstr(pCallbackData->pMessage, "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  69. return VK_FALSE;
  70. }
  71. if (strstr(pCallbackData->pMessage, "VK_FORMAT_R4G4B4A4_UNORM_PACK16 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  72. return VK_FALSE;
  73. }
  74. */
  75. // Workaround for Vulkan-Loader usability bug: https://github.com/KhronosGroup/Vulkan-Loader/issues/262.
  76. if (strstr(pCallbackData->pMessage, "wrong ELF class: ELFCLASS32") != nullptr) {
  77. return VK_FALSE;
  78. }
  79. if (pCallbackData->pMessageIdName && strstr(pCallbackData->pMessageIdName, "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw") != nullptr) {
  80. return VK_FALSE;
  81. }
  82. String type_string;
  83. switch (messageType) {
  84. case (VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT):
  85. type_string = "GENERAL";
  86. break;
  87. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT):
  88. type_string = "VALIDATION";
  89. break;
  90. case (VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  91. type_string = "PERFORMANCE";
  92. break;
  93. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  94. type_string = "VALIDATION|PERFORMANCE";
  95. break;
  96. }
  97. String objects_string;
  98. if (pCallbackData->objectCount > 0) {
  99. objects_string = "\n\tObjects - " + String::num_int64(pCallbackData->objectCount);
  100. for (uint32_t object = 0; object < pCallbackData->objectCount; ++object) {
  101. objects_string +=
  102. "\n\t\tObject[" + String::num_int64(object) + "]" +
  103. " - " + string_VkObjectType(pCallbackData->pObjects[object].objectType) +
  104. ", Handle " + String::num_int64(pCallbackData->pObjects[object].objectHandle);
  105. if (nullptr != pCallbackData->pObjects[object].pObjectName && strlen(pCallbackData->pObjects[object].pObjectName) > 0) {
  106. objects_string += ", Name \"" + String(pCallbackData->pObjects[object].pObjectName) + "\"";
  107. }
  108. }
  109. }
  110. String labels_string;
  111. if (pCallbackData->cmdBufLabelCount > 0) {
  112. labels_string = "\n\tCommand Buffer Labels - " + String::num_int64(pCallbackData->cmdBufLabelCount);
  113. for (uint32_t cmd_buf_label = 0; cmd_buf_label < pCallbackData->cmdBufLabelCount; ++cmd_buf_label) {
  114. labels_string +=
  115. "\n\t\tLabel[" + String::num_int64(cmd_buf_label) + "]" +
  116. " - " + pCallbackData->pCmdBufLabels[cmd_buf_label].pLabelName +
  117. "{ ";
  118. for (int color_idx = 0; color_idx < 4; ++color_idx) {
  119. labels_string += String::num(pCallbackData->pCmdBufLabels[cmd_buf_label].color[color_idx]);
  120. if (color_idx < 3) {
  121. labels_string += ", ";
  122. }
  123. }
  124. labels_string += " }";
  125. }
  126. }
  127. String error_message(type_string +
  128. " - Message Id Number: " + String::num_int64(pCallbackData->messageIdNumber) +
  129. " | Message Id Name: " + pCallbackData->pMessageIdName +
  130. "\n\t" + pCallbackData->pMessage +
  131. objects_string + labels_string);
  132. // Convert VK severity to our own log macros.
  133. switch (messageSeverity) {
  134. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  135. print_verbose(error_message);
  136. break;
  137. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  138. print_line(error_message);
  139. break;
  140. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  141. WARN_PRINT(error_message);
  142. break;
  143. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  144. ERR_PRINT(error_message);
  145. CRASH_COND_MSG(Engine::get_singleton()->is_abort_on_gpu_errors_enabled(),
  146. "Crashing, because abort on GPU errors is enabled.");
  147. break;
  148. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT:
  149. break; // Shouldn't happen, only handling to make compilers happy.
  150. }
  151. return VK_FALSE;
  152. }
  153. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_report_callback(
  154. VkDebugReportFlagsEXT flags,
  155. VkDebugReportObjectTypeEXT objectType,
  156. uint64_t object,
  157. size_t location,
  158. int32_t messageCode,
  159. const char *pLayerPrefix,
  160. const char *pMessage,
  161. void *pUserData) {
  162. String debugMessage = String("Vulkan Debug Report: object - ") +
  163. String::num_int64(object) + "\n" + pMessage;
  164. switch (flags) {
  165. case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
  166. case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
  167. print_line(debugMessage);
  168. break;
  169. case VK_DEBUG_REPORT_WARNING_BIT_EXT:
  170. case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
  171. WARN_PRINT(debugMessage);
  172. break;
  173. case VK_DEBUG_REPORT_ERROR_BIT_EXT:
  174. ERR_PRINT(debugMessage);
  175. break;
  176. }
  177. return VK_FALSE;
  178. }
  179. VkBool32 VulkanContext::_check_layers(uint32_t check_count, const char *const *check_names, uint32_t layer_count, VkLayerProperties *layers) {
  180. for (uint32_t i = 0; i < check_count; i++) {
  181. VkBool32 found = 0;
  182. for (uint32_t j = 0; j < layer_count; j++) {
  183. if (!strcmp(check_names[i], layers[j].layerName)) {
  184. found = 1;
  185. break;
  186. }
  187. }
  188. if (!found) {
  189. WARN_PRINT("Can't find layer: " + String(check_names[i]));
  190. return 0;
  191. }
  192. }
  193. return 1;
  194. }
  195. Error VulkanContext::_get_preferred_validation_layers(uint32_t *count, const char *const **names) {
  196. static const LocalVector<LocalVector<const char *>> instance_validation_layers_alt{
  197. // Preferred set of validation layers
  198. { "VK_LAYER_KHRONOS_validation" },
  199. // Alternative (deprecated, removed in SDK 1.1.126.0) set of validation layers
  200. { "VK_LAYER_LUNARG_standard_validation" },
  201. // Alternative (deprecated, removed in SDK 1.1.121.1) set of validation layers
  202. { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" }
  203. };
  204. // Clear out-arguments
  205. *count = 0;
  206. if (names != nullptr) {
  207. *names = nullptr;
  208. }
  209. VkResult err;
  210. uint32_t instance_layer_count;
  211. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
  212. if (err) {
  213. ERR_FAIL_V(ERR_CANT_CREATE);
  214. }
  215. if (instance_layer_count < 1) {
  216. return OK;
  217. }
  218. VkLayerProperties *instance_layers = (VkLayerProperties *)malloc(sizeof(VkLayerProperties) * instance_layer_count);
  219. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, instance_layers);
  220. if (err) {
  221. free(instance_layers);
  222. ERR_FAIL_V(ERR_CANT_CREATE);
  223. }
  224. for (uint32_t i = 0; i < instance_validation_layers_alt.size(); i++) {
  225. if (_check_layers(instance_validation_layers_alt[i].size(), instance_validation_layers_alt[i].ptr(), instance_layer_count, instance_layers)) {
  226. *count = instance_validation_layers_alt[i].size();
  227. if (names != nullptr) {
  228. *names = instance_validation_layers_alt[i].ptr();
  229. }
  230. break;
  231. }
  232. }
  233. free(instance_layers);
  234. return OK;
  235. }
  236. typedef VkResult(VKAPI_PTR *_vkEnumerateInstanceVersion)(uint32_t *);
  237. Error VulkanContext::_obtain_vulkan_version() {
  238. // https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkApplicationInfo.html#_description
  239. // for Vulkan 1.0 vkEnumerateInstanceVersion is not available, including not in the loader we compile against on Android.
  240. _vkEnumerateInstanceVersion func = (_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
  241. if (func != nullptr) {
  242. uint32_t api_version;
  243. VkResult res = func(&api_version);
  244. if (res == VK_SUCCESS) {
  245. vulkan_major = VK_VERSION_MAJOR(api_version);
  246. vulkan_minor = VK_VERSION_MINOR(api_version);
  247. vulkan_patch = VK_VERSION_PATCH(api_version);
  248. } else {
  249. // according to the documentation this shouldn't fail with anything except a memory allocation error
  250. // in which case we're in deep trouble anyway
  251. ERR_FAIL_V(ERR_CANT_CREATE);
  252. }
  253. } else {
  254. print_line("vkEnumerateInstanceVersion not available, assuming Vulkan 1.0.");
  255. }
  256. // we don't go above 1.2
  257. if ((vulkan_major > 1) || (vulkan_major == 1 && vulkan_minor > 2)) {
  258. vulkan_major = 1;
  259. vulkan_minor = 2;
  260. vulkan_patch = 0;
  261. }
  262. return OK;
  263. }
  264. Error VulkanContext::_initialize_extensions() {
  265. uint32_t instance_extension_count = 0;
  266. enabled_extension_count = 0;
  267. enabled_debug_utils = false;
  268. enabled_debug_report = false;
  269. /* Look for instance extensions */
  270. VkBool32 surfaceExtFound = 0;
  271. VkBool32 platformSurfaceExtFound = 0;
  272. memset(extension_names, 0, sizeof(extension_names));
  273. VkResult err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr);
  274. ERR_FAIL_COND_V(err != VK_SUCCESS && err != VK_INCOMPLETE, ERR_CANT_CREATE);
  275. if (instance_extension_count > 0) {
  276. VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  277. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions);
  278. if (err != VK_SUCCESS && err != VK_INCOMPLETE) {
  279. free(instance_extensions);
  280. ERR_FAIL_V(ERR_CANT_CREATE);
  281. }
  282. for (uint32_t i = 0; i < instance_extension_count; i++) {
  283. if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  284. surfaceExtFound = 1;
  285. extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
  286. }
  287. if (!strcmp(_get_platform_surface_extension(), instance_extensions[i].extensionName)) {
  288. platformSurfaceExtFound = 1;
  289. extension_names[enabled_extension_count++] = _get_platform_surface_extension();
  290. }
  291. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  292. if (_use_validation_layers()) {
  293. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  294. enabled_debug_report = true;
  295. }
  296. }
  297. if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  298. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  299. enabled_debug_utils = true;
  300. }
  301. if (!strcmp(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  302. extension_names[enabled_extension_count++] = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
  303. }
  304. if (enabled_extension_count >= MAX_EXTENSIONS) {
  305. free(instance_extensions);
  306. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  307. }
  308. }
  309. free(instance_extensions);
  310. }
  311. ERR_FAIL_COND_V_MSG(!surfaceExtFound, ERR_CANT_CREATE, "No surface extension found, is a driver installed?");
  312. ERR_FAIL_COND_V_MSG(!platformSurfaceExtFound, ERR_CANT_CREATE, "No platform surface extension found, is a driver installed?");
  313. return OK;
  314. }
  315. uint32_t VulkanContext::SubgroupCapabilities::supported_stages_flags_rd() const {
  316. uint32_t flags = 0;
  317. if (supportedStages & VK_SHADER_STAGE_VERTEX_BIT) {
  318. flags += RenderingDevice::ShaderStage::SHADER_STAGE_VERTEX_BIT;
  319. }
  320. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
  321. flags += RenderingDevice::ShaderStage::SHADER_STAGE_TESSELATION_CONTROL_BIT;
  322. }
  323. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
  324. flags += RenderingDevice::ShaderStage::SHADER_STAGE_TESSELATION_EVALUATION_BIT;
  325. }
  326. // if (supportedStages & VK_SHADER_STAGE_GEOMETRY_BIT) {
  327. // flags += RenderingDevice::ShaderStage::SHADER_STAGE_GEOMETRY_BIT;
  328. // }
  329. if (supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT) {
  330. flags += RenderingDevice::ShaderStage::SHADER_STAGE_FRAGMENT_BIT;
  331. }
  332. if (supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) {
  333. flags += RenderingDevice::ShaderStage::SHADER_STAGE_COMPUTE_BIT;
  334. }
  335. return flags;
  336. }
  337. String VulkanContext::SubgroupCapabilities::supported_stages_desc() const {
  338. String res;
  339. if (supportedStages & VK_SHADER_STAGE_VERTEX_BIT) {
  340. res += ", STAGE_VERTEX";
  341. }
  342. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
  343. res += ", STAGE_TESSELLATION_CONTROL";
  344. }
  345. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
  346. res += ", STAGE_TESSELLATION_EVALUATION";
  347. }
  348. if (supportedStages & VK_SHADER_STAGE_GEOMETRY_BIT) {
  349. res += ", STAGE_GEOMETRY";
  350. }
  351. if (supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT) {
  352. res += ", STAGE_FRAGMENT";
  353. }
  354. if (supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) {
  355. res += ", STAGE_COMPUTE";
  356. }
  357. /* these are not defined on Android GRMBL */
  358. if (supportedStages & 0x00000100 /* VK_SHADER_STAGE_RAYGEN_BIT_KHR */) {
  359. res += ", STAGE_RAYGEN_KHR";
  360. }
  361. if (supportedStages & 0x00000200 /* VK_SHADER_STAGE_ANY_HIT_BIT_KHR */) {
  362. res += ", STAGE_ANY_HIT_KHR";
  363. }
  364. if (supportedStages & 0x00000400 /* VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR */) {
  365. res += ", STAGE_CLOSEST_HIT_KHR";
  366. }
  367. if (supportedStages & 0x00000800 /* VK_SHADER_STAGE_MISS_BIT_KHR */) {
  368. res += ", STAGE_MISS_KHR";
  369. }
  370. if (supportedStages & 0x00001000 /* VK_SHADER_STAGE_INTERSECTION_BIT_KHR */) {
  371. res += ", STAGE_INTERSECTION_KHR";
  372. }
  373. if (supportedStages & 0x00002000 /* VK_SHADER_STAGE_CALLABLE_BIT_KHR */) {
  374. res += ", STAGE_CALLABLE_KHR";
  375. }
  376. if (supportedStages & 0x00000040 /* VK_SHADER_STAGE_TASK_BIT_NV */) {
  377. res += ", STAGE_TASK_NV";
  378. }
  379. if (supportedStages & 0x00000080 /* VK_SHADER_STAGE_MESH_BIT_NV */) {
  380. res += ", STAGE_MESH_NV";
  381. }
  382. return res.substr(2); // remove first ", "
  383. }
  384. uint32_t VulkanContext::SubgroupCapabilities::supported_operations_flags_rd() const {
  385. uint32_t flags = 0;
  386. if (supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT) {
  387. flags += RenderingDevice::SubgroupOperations::SUBGROUP_BASIC_BIT;
  388. }
  389. if (supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT) {
  390. flags += RenderingDevice::SubgroupOperations::SUBGROUP_VOTE_BIT;
  391. }
  392. if (supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) {
  393. flags += RenderingDevice::SubgroupOperations::SUBGROUP_ARITHMETIC_BIT;
  394. }
  395. if (supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT) {
  396. flags += RenderingDevice::SubgroupOperations::SUBGROUP_BALLOT_BIT;
  397. }
  398. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT) {
  399. flags += RenderingDevice::SubgroupOperations::SUBGROUP_SHUFFLE_BIT;
  400. }
  401. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT) {
  402. flags += RenderingDevice::SubgroupOperations::SUBGROUP_SHUFFLE_RELATIVE_BIT;
  403. }
  404. if (supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT) {
  405. flags += RenderingDevice::SubgroupOperations::SUBGROUP_CLUSTERED_BIT;
  406. }
  407. if (supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT) {
  408. flags += RenderingDevice::SubgroupOperations::SUBGROUP_QUAD_BIT;
  409. }
  410. return flags;
  411. }
  412. String VulkanContext::SubgroupCapabilities::supported_operations_desc() const {
  413. String res;
  414. if (supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT) {
  415. res += ", FEATURE_BASIC";
  416. }
  417. if (supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT) {
  418. res += ", FEATURE_VOTE";
  419. }
  420. if (supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) {
  421. res += ", FEATURE_ARITHMETIC";
  422. }
  423. if (supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT) {
  424. res += ", FEATURE_BALLOT";
  425. }
  426. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT) {
  427. res += ", FEATURE_SHUFFLE";
  428. }
  429. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT) {
  430. res += ", FEATURE_SHUFFLE_RELATIVE";
  431. }
  432. if (supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT) {
  433. res += ", FEATURE_CLUSTERED";
  434. }
  435. if (supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT) {
  436. res += ", FEATURE_QUAD";
  437. }
  438. if (supportedOperations & VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV) {
  439. res += ", FEATURE_PARTITIONED_NV";
  440. }
  441. return res.substr(2); // remove first ", "
  442. }
  443. Error VulkanContext::_check_capabilities() {
  444. // https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_KHR_multiview.html
  445. // https://www.khronos.org/blog/vulkan-subgroup-tutorial
  446. // for Vulkan 1.0 vkGetPhysicalDeviceProperties2 is not available, including not in the loader we compile against on Android.
  447. // so we check if the functions are accessible by getting their function pointers and skipping if not
  448. // (note that the desktop loader does a better job here but the android loader doesn't)
  449. // assume not supported until proven otherwise
  450. multiview_capabilities.is_supported = false;
  451. multiview_capabilities.geometry_shader_is_supported = false;
  452. multiview_capabilities.tessellation_shader_is_supported = false;
  453. multiview_capabilities.max_view_count = 0;
  454. multiview_capabilities.max_instance_count = 0;
  455. subgroup_capabilities.size = 0;
  456. subgroup_capabilities.supportedStages = 0;
  457. subgroup_capabilities.supportedOperations = 0;
  458. subgroup_capabilities.quadOperationsInAllStages = false;
  459. // check for extended features
  460. PFN_vkGetPhysicalDeviceFeatures2 device_features_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2");
  461. if (device_features_func == nullptr) {
  462. // In Vulkan 1.0 might be accessible under its original extension name
  463. device_features_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2KHR");
  464. }
  465. if (device_features_func != nullptr) {
  466. // check our extended features
  467. VkPhysicalDeviceMultiviewFeatures multiview_features;
  468. multiview_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
  469. multiview_features.pNext = nullptr;
  470. VkPhysicalDeviceFeatures2 device_features;
  471. device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  472. device_features.pNext = &multiview_features;
  473. device_features_func(gpu, &device_features);
  474. multiview_capabilities.is_supported = multiview_features.multiview;
  475. multiview_capabilities.geometry_shader_is_supported = multiview_features.multiviewGeometryShader;
  476. multiview_capabilities.tessellation_shader_is_supported = multiview_features.multiviewTessellationShader;
  477. VkPhysicalDeviceShaderFloat16Int8FeaturesKHR shader_features;
  478. shader_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR;
  479. shader_features.pNext = nullptr;
  480. device_features.pNext = &shader_features;
  481. device_features_func(gpu, &device_features);
  482. shader_capabilities.shader_float16_is_supported = shader_features.shaderFloat16;
  483. VkPhysicalDevice16BitStorageFeaturesKHR storage_feature;
  484. storage_feature.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR;
  485. storage_feature.pNext = nullptr;
  486. device_features.pNext = &storage_feature;
  487. device_features_func(gpu, &device_features);
  488. storage_buffer_capabilities.storage_buffer_16_bit_access_is_supported = storage_feature.storageBuffer16BitAccess;
  489. }
  490. // check extended properties
  491. PFN_vkGetPhysicalDeviceProperties2 device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2");
  492. if (device_properties_func == nullptr) {
  493. // In Vulkan 1.0 might be accessible under its original extension name
  494. device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2KHR");
  495. }
  496. if (device_properties_func != nullptr) {
  497. VkPhysicalDeviceMultiviewProperties multiviewProperties;
  498. VkPhysicalDeviceSubgroupProperties subgroupProperties;
  499. VkPhysicalDeviceProperties2 physicalDeviceProperties;
  500. subgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
  501. subgroupProperties.pNext = nullptr;
  502. physicalDeviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  503. if (multiview_capabilities.is_supported) {
  504. multiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
  505. multiviewProperties.pNext = &subgroupProperties;
  506. physicalDeviceProperties.pNext = &multiviewProperties;
  507. } else {
  508. physicalDeviceProperties.pNext = &subgroupProperties;
  509. }
  510. device_properties_func(gpu, &physicalDeviceProperties);
  511. subgroup_capabilities.size = subgroupProperties.subgroupSize;
  512. subgroup_capabilities.supportedStages = subgroupProperties.supportedStages;
  513. subgroup_capabilities.supportedOperations = subgroupProperties.supportedOperations;
  514. // Note: quadOperationsInAllStages will be true if:
  515. // - supportedStages has VK_SHADER_STAGE_ALL_GRAPHICS + VK_SHADER_STAGE_COMPUTE_BIT
  516. // - supportedOperations has VK_SUBGROUP_FEATURE_QUAD_BIT
  517. subgroup_capabilities.quadOperationsInAllStages = subgroupProperties.quadOperationsInAllStages;
  518. if (multiview_capabilities.is_supported) {
  519. multiview_capabilities.max_view_count = multiviewProperties.maxMultiviewViewCount;
  520. multiview_capabilities.max_instance_count = multiviewProperties.maxMultiviewInstanceIndex;
  521. print_verbose("- Vulkan multiview supported:");
  522. print_verbose(" max view count: " + itos(multiview_capabilities.max_view_count));
  523. print_verbose(" max instances: " + itos(multiview_capabilities.max_instance_count));
  524. } else {
  525. print_verbose("- Vulkan multiview not supported");
  526. }
  527. print_verbose("- Vulkan subgroup:");
  528. print_verbose(" size: " + itos(subgroup_capabilities.size));
  529. print_verbose(" stages: " + subgroup_capabilities.supported_stages_desc());
  530. print_verbose(" supported ops: " + subgroup_capabilities.supported_operations_desc());
  531. if (subgroup_capabilities.quadOperationsInAllStages) {
  532. print_verbose(" quad operations in all stages");
  533. }
  534. } else {
  535. print_verbose("- Couldn't call vkGetPhysicalDeviceProperties2");
  536. }
  537. return OK;
  538. }
  539. Error VulkanContext::_create_instance() {
  540. /* obtain version */
  541. _obtain_vulkan_version();
  542. /* initialise extensions */
  543. {
  544. Error err = _initialize_extensions();
  545. if (err != OK) {
  546. return err;
  547. }
  548. }
  549. CharString cs = ProjectSettings::get_singleton()->get("application/config/name").operator String().utf8();
  550. const VkApplicationInfo app = {
  551. /*sType*/ VK_STRUCTURE_TYPE_APPLICATION_INFO,
  552. /*pNext*/ nullptr,
  553. /*pApplicationName*/ cs.get_data(),
  554. /*applicationVersion*/ 0,
  555. /*pEngineName*/ VERSION_NAME,
  556. /*engineVersion*/ VK_MAKE_VERSION(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH),
  557. /*apiVersion*/ VK_MAKE_VERSION(vulkan_major, vulkan_minor, 0)
  558. };
  559. VkInstanceCreateInfo inst_info{};
  560. inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  561. inst_info.pApplicationInfo = &app;
  562. inst_info.enabledExtensionCount = enabled_extension_count;
  563. inst_info.ppEnabledExtensionNames = (const char *const *)extension_names;
  564. if (_use_validation_layers()) {
  565. _get_preferred_validation_layers(&inst_info.enabledLayerCount, &inst_info.ppEnabledLayerNames);
  566. }
  567. /*
  568. * This is info for a temp callback to use during CreateInstance.
  569. * After the instance is created, we use the instance-based
  570. * function to register the final callback.
  571. */
  572. VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info;
  573. VkDebugReportCallbackCreateInfoEXT dbg_report_callback_create_info{};
  574. if (enabled_debug_utils) {
  575. // VK_EXT_debug_utils style
  576. dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  577. dbg_messenger_create_info.pNext = nullptr;
  578. dbg_messenger_create_info.flags = 0;
  579. dbg_messenger_create_info.messageSeverity =
  580. VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  581. dbg_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
  582. VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
  583. VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  584. dbg_messenger_create_info.pfnUserCallback = _debug_messenger_callback;
  585. dbg_messenger_create_info.pUserData = this;
  586. inst_info.pNext = &dbg_messenger_create_info;
  587. } else if (enabled_debug_report) {
  588. dbg_report_callback_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
  589. dbg_report_callback_create_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
  590. VK_DEBUG_REPORT_WARNING_BIT_EXT |
  591. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
  592. VK_DEBUG_REPORT_ERROR_BIT_EXT |
  593. VK_DEBUG_REPORT_DEBUG_BIT_EXT;
  594. dbg_report_callback_create_info.pfnCallback = _debug_report_callback;
  595. dbg_report_callback_create_info.pUserData = this;
  596. inst_info.pNext = &dbg_report_callback_create_info;
  597. }
  598. VkResult err = vkCreateInstance(&inst_info, nullptr, &inst);
  599. ERR_FAIL_COND_V_MSG(err == VK_ERROR_INCOMPATIBLE_DRIVER, ERR_CANT_CREATE,
  600. "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
  601. "vkCreateInstance Failure");
  602. ERR_FAIL_COND_V_MSG(err == VK_ERROR_EXTENSION_NOT_PRESENT, ERR_CANT_CREATE,
  603. "Cannot find a specified extension library.\n"
  604. "Make sure your layers path is set appropriately.\n"
  605. "vkCreateInstance Failure");
  606. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE,
  607. "vkCreateInstance failed.\n\n"
  608. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  609. "Please look at the Getting Started guide for additional information.\n"
  610. "vkCreateInstance Failure");
  611. inst_initialized = true;
  612. #ifdef USE_VOLK
  613. volkLoadInstance(inst);
  614. #endif
  615. if (enabled_debug_utils) {
  616. // Setup VK_EXT_debug_utils function pointers always (we use them for
  617. // debug labels and names).
  618. CreateDebugUtilsMessengerEXT =
  619. (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT");
  620. DestroyDebugUtilsMessengerEXT =
  621. (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugUtilsMessengerEXT");
  622. SubmitDebugUtilsMessageEXT =
  623. (PFN_vkSubmitDebugUtilsMessageEXT)vkGetInstanceProcAddr(inst, "vkSubmitDebugUtilsMessageEXT");
  624. CmdBeginDebugUtilsLabelEXT =
  625. (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdBeginDebugUtilsLabelEXT");
  626. CmdEndDebugUtilsLabelEXT =
  627. (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdEndDebugUtilsLabelEXT");
  628. CmdInsertDebugUtilsLabelEXT =
  629. (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdInsertDebugUtilsLabelEXT");
  630. SetDebugUtilsObjectNameEXT =
  631. (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(inst, "vkSetDebugUtilsObjectNameEXT");
  632. if (nullptr == CreateDebugUtilsMessengerEXT || nullptr == DestroyDebugUtilsMessengerEXT ||
  633. nullptr == SubmitDebugUtilsMessageEXT || nullptr == CmdBeginDebugUtilsLabelEXT ||
  634. nullptr == CmdEndDebugUtilsLabelEXT || nullptr == CmdInsertDebugUtilsLabelEXT ||
  635. nullptr == SetDebugUtilsObjectNameEXT) {
  636. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  637. "GetProcAddr: Failed to init VK_EXT_debug_utils\n"
  638. "GetProcAddr: Failure");
  639. }
  640. err = CreateDebugUtilsMessengerEXT(inst, &dbg_messenger_create_info, nullptr, &dbg_messenger);
  641. switch (err) {
  642. case VK_SUCCESS:
  643. break;
  644. case VK_ERROR_OUT_OF_HOST_MEMORY:
  645. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  646. "CreateDebugUtilsMessengerEXT: out of host memory\n"
  647. "CreateDebugUtilsMessengerEXT Failure");
  648. break;
  649. default:
  650. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  651. "CreateDebugUtilsMessengerEXT: unknown failure\n"
  652. "CreateDebugUtilsMessengerEXT Failure");
  653. ERR_FAIL_V(ERR_CANT_CREATE);
  654. break;
  655. }
  656. } else if (enabled_debug_report) {
  657. CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugReportCallbackEXT");
  658. DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(inst, "vkDebugReportMessageEXT");
  659. DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugReportCallbackEXT");
  660. if (nullptr == CreateDebugReportCallbackEXT || nullptr == DebugReportMessageEXT || nullptr == DestroyDebugReportCallbackEXT) {
  661. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  662. "GetProcAddr: Failed to init VK_EXT_debug_report\n"
  663. "GetProcAddr: Failure");
  664. }
  665. err = CreateDebugReportCallbackEXT(inst, &dbg_report_callback_create_info, nullptr, &dbg_debug_report);
  666. switch (err) {
  667. case VK_SUCCESS:
  668. break;
  669. case VK_ERROR_OUT_OF_HOST_MEMORY:
  670. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  671. "CreateDebugReportCallbackEXT: out of host memory\n"
  672. "CreateDebugReportCallbackEXT Failure");
  673. break;
  674. default:
  675. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  676. "CreateDebugReportCallbackEXT: unknown failure\n"
  677. "CreateDebugReportCallbackEXT Failure");
  678. ERR_FAIL_V(ERR_CANT_CREATE);
  679. break;
  680. }
  681. }
  682. return OK;
  683. }
  684. Error VulkanContext::_create_physical_device(VkSurfaceKHR p_surface) {
  685. /* Make initial call to query gpu_count, then second call for gpu info*/
  686. uint32_t gpu_count = 0;
  687. VkResult err = vkEnumeratePhysicalDevices(inst, &gpu_count, nullptr);
  688. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  689. ERR_FAIL_COND_V_MSG(gpu_count == 0, ERR_CANT_CREATE,
  690. "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
  691. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  692. "vkEnumeratePhysicalDevices Failure");
  693. VkPhysicalDevice *physical_devices = (VkPhysicalDevice *)malloc(sizeof(VkPhysicalDevice) * gpu_count);
  694. err = vkEnumeratePhysicalDevices(inst, &gpu_count, physical_devices);
  695. if (err) {
  696. free(physical_devices);
  697. ERR_FAIL_V(ERR_CANT_CREATE);
  698. }
  699. static const struct {
  700. uint32_t id;
  701. const char *name;
  702. } vendor_names[] = {
  703. { 0x1002, "AMD" },
  704. { 0x1010, "ImgTec" },
  705. { 0x106B, "Apple" },
  706. { 0x10DE, "NVIDIA" },
  707. { 0x13B5, "ARM" },
  708. { 0x5143, "Qualcomm" },
  709. { 0x8086, "Intel" },
  710. { 0, nullptr },
  711. };
  712. // TODO: At least on Linux Laptops integrated GPUs fail with Vulkan in many instances.
  713. // The device should really be a preference, but for now choosing a discrete GPU over the
  714. // integrated one is better than the default.
  715. int32_t device_index = -1;
  716. int type_selected = -1;
  717. print_verbose("Vulkan devices:");
  718. for (uint32_t i = 0; i < gpu_count; ++i) {
  719. VkPhysicalDeviceProperties props;
  720. vkGetPhysicalDeviceProperties(physical_devices[i], &props);
  721. bool present_supported = false;
  722. uint32_t device_queue_family_count = 0;
  723. vkGetPhysicalDeviceQueueFamilyProperties(physical_devices[i], &device_queue_family_count, nullptr);
  724. VkQueueFamilyProperties *device_queue_props = (VkQueueFamilyProperties *)malloc(device_queue_family_count * sizeof(VkQueueFamilyProperties));
  725. vkGetPhysicalDeviceQueueFamilyProperties(physical_devices[i], &device_queue_family_count, device_queue_props);
  726. for (uint32_t j = 0; j < device_queue_family_count; j++) {
  727. VkBool32 supports;
  728. vkGetPhysicalDeviceSurfaceSupportKHR(physical_devices[i], j, p_surface, &supports);
  729. if (supports && ((device_queue_props[j].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)) {
  730. present_supported = true;
  731. } else {
  732. continue;
  733. }
  734. }
  735. String name = props.deviceName;
  736. String vendor = "Unknown";
  737. String dev_type;
  738. switch (props.deviceType) {
  739. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: {
  740. dev_type = "Discrete";
  741. } break;
  742. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: {
  743. dev_type = "Integrated";
  744. } break;
  745. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: {
  746. dev_type = "Virtual";
  747. } break;
  748. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_CPU: {
  749. dev_type = "CPU";
  750. } break;
  751. default: {
  752. dev_type = "Other";
  753. } break;
  754. }
  755. uint32_t vendor_idx = 0;
  756. while (vendor_names[vendor_idx].name != nullptr) {
  757. if (props.vendorID == vendor_names[vendor_idx].id) {
  758. vendor = vendor_names[vendor_idx].name;
  759. break;
  760. }
  761. vendor_idx++;
  762. }
  763. free(device_queue_props);
  764. print_verbose(" #" + itos(i) + ": " + vendor + " " + name + " - " + (present_supported ? "Supported" : "Unsupported") + ", " + dev_type);
  765. if (present_supported) { // Select first supported device of preffered type: Discrete > Integrated > Virtual > CPU > Other.
  766. switch (props.deviceType) {
  767. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: {
  768. if (type_selected < 4) {
  769. type_selected = 4;
  770. device_index = i;
  771. }
  772. } break;
  773. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: {
  774. if (type_selected < 3) {
  775. type_selected = 3;
  776. device_index = i;
  777. }
  778. } break;
  779. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: {
  780. if (type_selected < 2) {
  781. type_selected = 2;
  782. device_index = i;
  783. }
  784. } break;
  785. case VkPhysicalDeviceType::VK_PHYSICAL_DEVICE_TYPE_CPU: {
  786. if (type_selected < 1) {
  787. type_selected = 1;
  788. device_index = i;
  789. }
  790. } break;
  791. default: {
  792. if (type_selected < 0) {
  793. type_selected = 0;
  794. device_index = i;
  795. }
  796. } break;
  797. }
  798. }
  799. }
  800. int32_t user_device_index = Engine::get_singleton()->get_gpu_index(); // Force user selected GPU.
  801. if (user_device_index >= 0 && user_device_index < (int32_t)gpu_count) {
  802. device_index = user_device_index;
  803. }
  804. ERR_FAIL_COND_V_MSG(device_index == -1, ERR_CANT_CREATE, "None of Vulkan devices supports both graphics and present queues.");
  805. gpu = physical_devices[device_index];
  806. free(physical_devices);
  807. /* Look for device extensions */
  808. uint32_t device_extension_count = 0;
  809. VkBool32 swapchainExtFound = 0;
  810. enabled_extension_count = 0;
  811. memset(extension_names, 0, sizeof(extension_names));
  812. /* Get identifier properties */
  813. vkGetPhysicalDeviceProperties(gpu, &gpu_props);
  814. device_name = gpu_props.deviceName;
  815. device_type = gpu_props.deviceType;
  816. pipeline_cache_id = String::hex_encode_buffer(gpu_props.pipelineCacheUUID, VK_UUID_SIZE);
  817. pipeline_cache_id += "-driver-" + itos(gpu_props.driverVersion);
  818. {
  819. device_vendor = "Unknown";
  820. uint32_t vendor_idx = 0;
  821. while (vendor_names[vendor_idx].name != nullptr) {
  822. if (gpu_props.vendorID == vendor_names[vendor_idx].id) {
  823. device_vendor = vendor_names[vendor_idx].name;
  824. break;
  825. }
  826. vendor_idx++;
  827. }
  828. }
  829. print_line(
  830. "Vulkan API " + itos(vulkan_major) + "." + itos(vulkan_minor) + "." + itos(vulkan_patch) +
  831. " - " + "Using Vulkan Device #" + itos(device_index) + ": " + device_vendor + " - " + device_name);
  832. device_api_version = gpu_props.apiVersion;
  833. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr);
  834. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  835. if (device_extension_count > 0) {
  836. VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count);
  837. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions);
  838. if (err) {
  839. free(device_extensions);
  840. ERR_FAIL_V(ERR_CANT_CREATE);
  841. }
  842. for (uint32_t i = 0; i < device_extension_count; i++) {
  843. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
  844. swapchainExtFound = 1;
  845. extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  846. }
  847. if (!strcmp(VK_KHR_MULTIVIEW_EXTENSION_NAME, device_extensions[i].extensionName)) {
  848. // if multiview is supported, enable it
  849. extension_names[enabled_extension_count++] = VK_KHR_MULTIVIEW_EXTENSION_NAME;
  850. }
  851. if (enabled_extension_count >= MAX_EXTENSIONS) {
  852. free(device_extensions);
  853. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  854. }
  855. }
  856. if (VK_KHR_incremental_present_enabled) {
  857. // Even though the user "enabled" the extension via the command
  858. // line, we must make sure that it's enumerated for use with the
  859. // device. Therefore, disable it here, and re-enable it again if
  860. // enumerated.
  861. VK_KHR_incremental_present_enabled = false;
  862. for (uint32_t i = 0; i < device_extension_count; i++) {
  863. if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) {
  864. extension_names[enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
  865. VK_KHR_incremental_present_enabled = true;
  866. }
  867. if (enabled_extension_count >= MAX_EXTENSIONS) {
  868. free(device_extensions);
  869. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  870. }
  871. }
  872. }
  873. if (VK_GOOGLE_display_timing_enabled) {
  874. // Even though the user "enabled" the extension via the command
  875. // line, we must make sure that it's enumerated for use with the
  876. // device. Therefore, disable it here, and re-enable it again if
  877. // enumerated.
  878. VK_GOOGLE_display_timing_enabled = false;
  879. for (uint32_t i = 0; i < device_extension_count; i++) {
  880. if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) {
  881. extension_names[enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME;
  882. VK_GOOGLE_display_timing_enabled = true;
  883. }
  884. if (enabled_extension_count >= MAX_EXTENSIONS) {
  885. free(device_extensions);
  886. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  887. }
  888. }
  889. }
  890. free(device_extensions);
  891. }
  892. ERR_FAIL_COND_V_MSG(!swapchainExtFound, ERR_CANT_CREATE,
  893. "vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  894. " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n"
  895. "vkCreateInstance Failure");
  896. /* Call with nullptr data to get count */
  897. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, nullptr);
  898. ERR_FAIL_COND_V(queue_family_count == 0, ERR_CANT_CREATE);
  899. queue_props = (VkQueueFamilyProperties *)malloc(queue_family_count * sizeof(VkQueueFamilyProperties));
  900. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, queue_props);
  901. // Query fine-grained feature support for this device.
  902. // If app has specific feature requirements it should check supported
  903. // features based on this query
  904. vkGetPhysicalDeviceFeatures(gpu, &physical_device_features);
  905. physical_device_features.robustBufferAccess = false; //turn off robust buffer access, which can hamper performance on some hardware
  906. #define GET_INSTANCE_PROC_ADDR(inst, entrypoint) \
  907. { \
  908. fp##entrypoint = (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint); \
  909. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  910. "vkGetInstanceProcAddr failed to find vk" #entrypoint); \
  911. }
  912. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceSupportKHR);
  913. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  914. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceFormatsKHR);
  915. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfacePresentModesKHR);
  916. GET_INSTANCE_PROC_ADDR(inst, GetSwapchainImagesKHR);
  917. // get info about what our vulkan driver is capable off
  918. {
  919. Error res = _check_capabilities();
  920. if (res != OK) {
  921. return res;
  922. }
  923. }
  924. device_initialized = true;
  925. return OK;
  926. }
  927. Error VulkanContext::_create_device() {
  928. VkResult err;
  929. float queue_priorities[1] = { 0.0 };
  930. VkDeviceQueueCreateInfo queues[2];
  931. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  932. queues[0].pNext = nullptr;
  933. queues[0].queueFamilyIndex = graphics_queue_family_index;
  934. queues[0].queueCount = 1;
  935. queues[0].pQueuePriorities = queue_priorities;
  936. queues[0].flags = 0;
  937. VkDeviceCreateInfo sdevice = {
  938. /*sType*/ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  939. /*pNext*/ nullptr,
  940. /*flags*/ 0,
  941. /*queueCreateInfoCount*/ 1,
  942. /*pQueueCreateInfos*/ queues,
  943. /*enabledLayerCount*/ 0,
  944. /*ppEnabledLayerNames*/ nullptr,
  945. /*enabledExtensionCount*/ enabled_extension_count,
  946. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  947. /*pEnabledFeatures*/ &physical_device_features, // If specific features are required, pass them in here
  948. };
  949. if (separate_present_queue) {
  950. queues[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  951. queues[1].pNext = nullptr;
  952. queues[1].queueFamilyIndex = present_queue_family_index;
  953. queues[1].queueCount = 1;
  954. queues[1].pQueuePriorities = queue_priorities;
  955. queues[1].flags = 0;
  956. sdevice.queueCreateInfoCount = 2;
  957. }
  958. VkPhysicalDeviceVulkan11Features vulkan11features;
  959. VkPhysicalDeviceMultiviewFeatures multiview_features;
  960. if (vulkan_major > 1 || vulkan_minor >= 2) {
  961. vulkan11features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
  962. vulkan11features.pNext = nullptr;
  963. // !BAS! Need to figure out which ones of these we want enabled...
  964. vulkan11features.storageBuffer16BitAccess = 0;
  965. vulkan11features.uniformAndStorageBuffer16BitAccess = 0;
  966. vulkan11features.storagePushConstant16 = 0;
  967. vulkan11features.storageInputOutput16 = 0;
  968. vulkan11features.multiview = multiview_capabilities.is_supported;
  969. vulkan11features.multiviewGeometryShader = multiview_capabilities.geometry_shader_is_supported;
  970. vulkan11features.multiviewTessellationShader = multiview_capabilities.tessellation_shader_is_supported;
  971. vulkan11features.variablePointersStorageBuffer = 0;
  972. vulkan11features.variablePointers = 0;
  973. vulkan11features.protectedMemory = 0;
  974. vulkan11features.samplerYcbcrConversion = 0;
  975. vulkan11features.shaderDrawParameters = 0;
  976. sdevice.pNext = &vulkan11features;
  977. } else if (vulkan_major == 1 && vulkan_minor == 1) {
  978. multiview_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
  979. multiview_features.pNext = nullptr;
  980. multiview_features.multiview = multiview_capabilities.is_supported;
  981. multiview_features.multiviewGeometryShader = multiview_capabilities.geometry_shader_is_supported;
  982. multiview_features.multiviewTessellationShader = multiview_capabilities.tessellation_shader_is_supported;
  983. sdevice.pNext = &multiview_features;
  984. }
  985. err = vkCreateDevice(gpu, &sdevice, nullptr, &device);
  986. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  987. return OK;
  988. }
  989. Error VulkanContext::_initialize_queues(VkSurfaceKHR p_surface) {
  990. // Iterate over each queue to learn whether it supports presenting:
  991. VkBool32 *supportsPresent = (VkBool32 *)malloc(queue_family_count * sizeof(VkBool32));
  992. for (uint32_t i = 0; i < queue_family_count; i++) {
  993. fpGetPhysicalDeviceSurfaceSupportKHR(gpu, i, p_surface, &supportsPresent[i]);
  994. }
  995. // Search for a graphics and a present queue in the array of queue
  996. // families, try to find one that supports both
  997. uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
  998. uint32_t presentQueueFamilyIndex = UINT32_MAX;
  999. for (uint32_t i = 0; i < queue_family_count; i++) {
  1000. if ((queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  1001. if (graphicsQueueFamilyIndex == UINT32_MAX) {
  1002. graphicsQueueFamilyIndex = i;
  1003. }
  1004. if (supportsPresent[i] == VK_TRUE) {
  1005. graphicsQueueFamilyIndex = i;
  1006. presentQueueFamilyIndex = i;
  1007. break;
  1008. }
  1009. }
  1010. }
  1011. if (presentQueueFamilyIndex == UINT32_MAX) {
  1012. // If didn't find a queue that supports both graphics and present, then
  1013. // find a separate present queue.
  1014. for (uint32_t i = 0; i < queue_family_count; ++i) {
  1015. if (supportsPresent[i] == VK_TRUE) {
  1016. presentQueueFamilyIndex = i;
  1017. break;
  1018. }
  1019. }
  1020. }
  1021. free(supportsPresent);
  1022. // Generate error if could not find both a graphics and a present queue
  1023. ERR_FAIL_COND_V_MSG(graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX, ERR_CANT_CREATE,
  1024. "Could not find both graphics and present queues\n");
  1025. graphics_queue_family_index = graphicsQueueFamilyIndex;
  1026. present_queue_family_index = presentQueueFamilyIndex;
  1027. separate_present_queue = (graphics_queue_family_index != present_queue_family_index);
  1028. _create_device();
  1029. static PFN_vkGetDeviceProcAddr g_gdpa = nullptr;
  1030. #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
  1031. { \
  1032. if (!g_gdpa) \
  1033. g_gdpa = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(inst, "vkGetDeviceProcAddr"); \
  1034. fp##entrypoint = (PFN_vk##entrypoint)g_gdpa(dev, "vk" #entrypoint); \
  1035. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  1036. "vkGetDeviceProcAddr failed to find vk" #entrypoint); \
  1037. }
  1038. GET_DEVICE_PROC_ADDR(device, CreateSwapchainKHR);
  1039. GET_DEVICE_PROC_ADDR(device, DestroySwapchainKHR);
  1040. GET_DEVICE_PROC_ADDR(device, GetSwapchainImagesKHR);
  1041. GET_DEVICE_PROC_ADDR(device, AcquireNextImageKHR);
  1042. GET_DEVICE_PROC_ADDR(device, QueuePresentKHR);
  1043. if (VK_GOOGLE_display_timing_enabled) {
  1044. GET_DEVICE_PROC_ADDR(device, GetRefreshCycleDurationGOOGLE);
  1045. GET_DEVICE_PROC_ADDR(device, GetPastPresentationTimingGOOGLE);
  1046. }
  1047. vkGetDeviceQueue(device, graphics_queue_family_index, 0, &graphics_queue);
  1048. if (!separate_present_queue) {
  1049. present_queue = graphics_queue;
  1050. } else {
  1051. vkGetDeviceQueue(device, present_queue_family_index, 0, &present_queue);
  1052. }
  1053. // Get the list of VkFormat's that are supported:
  1054. uint32_t formatCount;
  1055. VkResult err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, p_surface, &formatCount, nullptr);
  1056. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1057. VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  1058. err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, p_surface, &formatCount, surfFormats);
  1059. if (err) {
  1060. free(surfFormats);
  1061. ERR_FAIL_V(ERR_CANT_CREATE);
  1062. }
  1063. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  1064. // the surface has no preferred format. Otherwise, at least one
  1065. // supported format will be returned.
  1066. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  1067. format = VK_FORMAT_B8G8R8A8_UNORM;
  1068. color_space = surfFormats[0].colorSpace;
  1069. } else {
  1070. // These should be ordered with the ones we want to use on top and fallback modes further down
  1071. // we want an 32bit RGBA unsigned normalised buffer or similar
  1072. const VkFormat allowed_formats[] = {
  1073. VK_FORMAT_B8G8R8A8_UNORM,
  1074. VK_FORMAT_R8G8B8A8_UNORM
  1075. };
  1076. uint32_t allowed_formats_count = sizeof(allowed_formats) / sizeof(VkFormat);
  1077. if (formatCount < 1) {
  1078. free(surfFormats);
  1079. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "formatCount less than 1");
  1080. }
  1081. // Find the first format that we support
  1082. format = VK_FORMAT_UNDEFINED;
  1083. for (uint32_t af = 0; af < allowed_formats_count && format == VK_FORMAT_UNDEFINED; af++) {
  1084. for (uint32_t sf = 0; sf < formatCount && format == VK_FORMAT_UNDEFINED; sf++) {
  1085. if (surfFormats[sf].format == allowed_formats[af]) {
  1086. format = surfFormats[sf].format;
  1087. color_space = surfFormats[sf].colorSpace;
  1088. }
  1089. }
  1090. }
  1091. if (format == VK_FORMAT_UNDEFINED) {
  1092. free(surfFormats);
  1093. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "No usable surface format found.");
  1094. }
  1095. }
  1096. free(surfFormats);
  1097. Error serr = _create_semaphores();
  1098. if (serr) {
  1099. return serr;
  1100. }
  1101. queues_initialized = true;
  1102. return OK;
  1103. }
  1104. Error VulkanContext::_create_semaphores() {
  1105. VkResult err;
  1106. // Create semaphores to synchronize acquiring presentable buffers before
  1107. // rendering and waiting for drawing to be complete before presenting
  1108. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  1109. /*sType*/ VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  1110. /*pNext*/ nullptr,
  1111. /*flags*/ 0,
  1112. };
  1113. // Create fences that we can use to throttle if we get too far
  1114. // ahead of the image presents
  1115. VkFenceCreateInfo fence_ci = {
  1116. /*sType*/ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
  1117. /*pNext*/ nullptr,
  1118. /*flags*/ VK_FENCE_CREATE_SIGNALED_BIT
  1119. };
  1120. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1121. err = vkCreateFence(device, &fence_ci, nullptr, &fences[i]);
  1122. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1123. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &draw_complete_semaphores[i]);
  1124. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1125. if (separate_present_queue) {
  1126. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_ownership_semaphores[i]);
  1127. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1128. }
  1129. }
  1130. frame_index = 0;
  1131. // Get Memory information and properties
  1132. vkGetPhysicalDeviceMemoryProperties(gpu, &memory_properties);
  1133. return OK;
  1134. }
  1135. bool VulkanContext::_use_validation_layers() {
  1136. return Engine::get_singleton()->is_validation_layers_enabled();
  1137. }
  1138. Error VulkanContext::_window_create(DisplayServer::WindowID p_window_id, DisplayServer::VSyncMode p_vsync_mode, VkSurfaceKHR p_surface, int p_width, int p_height) {
  1139. ERR_FAIL_COND_V(windows.has(p_window_id), ERR_INVALID_PARAMETER);
  1140. if (!device_initialized) {
  1141. Error err = _create_physical_device(p_surface);
  1142. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  1143. }
  1144. if (!queues_initialized) {
  1145. // We use a single GPU, but we need a surface to initialize the
  1146. // queues, so this process must be deferred until a surface
  1147. // is created.
  1148. Error err = _initialize_queues(p_surface);
  1149. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  1150. }
  1151. Window window;
  1152. window.surface = p_surface;
  1153. window.width = p_width;
  1154. window.height = p_height;
  1155. window.vsync_mode = p_vsync_mode;
  1156. Error err = _update_swap_chain(&window);
  1157. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  1158. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  1159. /*sType*/ VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  1160. /*pNext*/ nullptr,
  1161. /*flags*/ 0,
  1162. };
  1163. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1164. VkResult vkerr = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &window.image_acquired_semaphores[i]);
  1165. ERR_FAIL_COND_V(vkerr, ERR_CANT_CREATE);
  1166. }
  1167. windows[p_window_id] = window;
  1168. return OK;
  1169. }
  1170. void VulkanContext::window_resize(DisplayServer::WindowID p_window, int p_width, int p_height) {
  1171. ERR_FAIL_COND(!windows.has(p_window));
  1172. windows[p_window].width = p_width;
  1173. windows[p_window].height = p_height;
  1174. _update_swap_chain(&windows[p_window]);
  1175. }
  1176. int VulkanContext::window_get_width(DisplayServer::WindowID p_window) {
  1177. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  1178. return windows[p_window].width;
  1179. }
  1180. int VulkanContext::window_get_height(DisplayServer::WindowID p_window) {
  1181. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  1182. return windows[p_window].height;
  1183. }
  1184. VkRenderPass VulkanContext::window_get_render_pass(DisplayServer::WindowID p_window) {
  1185. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  1186. Window *w = &windows[p_window];
  1187. //vulkan use of currentbuffer
  1188. return w->render_pass;
  1189. }
  1190. VkFramebuffer VulkanContext::window_get_framebuffer(DisplayServer::WindowID p_window) {
  1191. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  1192. ERR_FAIL_COND_V(!buffers_prepared, VK_NULL_HANDLE);
  1193. Window *w = &windows[p_window];
  1194. //vulkan use of currentbuffer
  1195. return w->swapchain_image_resources[w->current_buffer].framebuffer;
  1196. }
  1197. void VulkanContext::window_destroy(DisplayServer::WindowID p_window_id) {
  1198. ERR_FAIL_COND(!windows.has(p_window_id));
  1199. _clean_up_swap_chain(&windows[p_window_id]);
  1200. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1201. vkDestroySemaphore(device, windows[p_window_id].image_acquired_semaphores[i], nullptr);
  1202. }
  1203. vkDestroySurfaceKHR(inst, windows[p_window_id].surface, nullptr);
  1204. windows.erase(p_window_id);
  1205. }
  1206. Error VulkanContext::_clean_up_swap_chain(Window *window) {
  1207. if (!window->swapchain) {
  1208. return OK;
  1209. }
  1210. vkDeviceWaitIdle(device);
  1211. //this destroys images associated it seems
  1212. fpDestroySwapchainKHR(device, window->swapchain, nullptr);
  1213. window->swapchain = VK_NULL_HANDLE;
  1214. vkDestroyRenderPass(device, window->render_pass, nullptr);
  1215. if (window->swapchain_image_resources) {
  1216. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1217. vkDestroyImageView(device, window->swapchain_image_resources[i].view, nullptr);
  1218. vkDestroyFramebuffer(device, window->swapchain_image_resources[i].framebuffer, nullptr);
  1219. }
  1220. free(window->swapchain_image_resources);
  1221. window->swapchain_image_resources = nullptr;
  1222. }
  1223. if (separate_present_queue) {
  1224. vkDestroyCommandPool(device, window->present_cmd_pool, nullptr);
  1225. }
  1226. return OK;
  1227. }
  1228. Error VulkanContext::_update_swap_chain(Window *window) {
  1229. VkResult err;
  1230. if (window->swapchain) {
  1231. _clean_up_swap_chain(window);
  1232. }
  1233. // Check the surface capabilities and formats
  1234. VkSurfaceCapabilitiesKHR surfCapabilities;
  1235. err = fpGetPhysicalDeviceSurfaceCapabilitiesKHR(gpu, window->surface, &surfCapabilities);
  1236. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1237. uint32_t presentModeCount;
  1238. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, nullptr);
  1239. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1240. VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  1241. ERR_FAIL_COND_V(!presentModes, ERR_CANT_CREATE);
  1242. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, presentModes);
  1243. if (err) {
  1244. free(presentModes);
  1245. ERR_FAIL_V(ERR_CANT_CREATE);
  1246. }
  1247. VkExtent2D swapchainExtent;
  1248. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  1249. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  1250. // If the surface size is undefined, the size is set to the size
  1251. // of the images requested, which must fit within the minimum and
  1252. // maximum values.
  1253. swapchainExtent.width = window->width;
  1254. swapchainExtent.height = window->height;
  1255. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  1256. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  1257. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  1258. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  1259. }
  1260. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  1261. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  1262. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  1263. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  1264. }
  1265. } else {
  1266. // If the surface size is defined, the swap chain size must match
  1267. swapchainExtent = surfCapabilities.currentExtent;
  1268. window->width = surfCapabilities.currentExtent.width;
  1269. window->height = surfCapabilities.currentExtent.height;
  1270. }
  1271. if (window->width == 0 || window->height == 0) {
  1272. free(presentModes);
  1273. //likely window minimized, no swapchain created
  1274. return OK;
  1275. }
  1276. // The FIFO present mode is guaranteed by the spec to be supported
  1277. // and to have no tearing. It's a great default present mode to use.
  1278. // There are times when you may wish to use another present mode. The
  1279. // following code shows how to select them, and the comments provide some
  1280. // reasons you may wish to use them.
  1281. //
  1282. // It should be noted that Vulkan 1.0 doesn't provide a method for
  1283. // synchronizing rendering with the presentation engine's display. There
  1284. // is a method provided for throttling rendering with the display, but
  1285. // there are some presentation engines for which this method will not work.
  1286. // If an application doesn't throttle its rendering, and if it renders much
  1287. // faster than the refresh rate of the display, this can waste power on
  1288. // mobile devices. That is because power is being spent rendering images
  1289. // that may never be seen.
  1290. // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care about
  1291. // tearing, or have some way of synchronizing their rendering with the
  1292. // display.
  1293. // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
  1294. // generally render a new presentable image every refresh cycle, but are
  1295. // occasionally early. In this case, the application wants the new image
  1296. // to be displayed instead of the previously-queued-for-presentation image
  1297. // that has not yet been displayed.
  1298. // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
  1299. // render a new presentable image every refresh cycle, but are occasionally
  1300. // late. In this case (perhaps because of stuttering/latency concerns),
  1301. // the application wants the late image to be immediately displayed, even
  1302. // though that may mean some tearing.
  1303. VkPresentModeKHR requested_present_mode = VkPresentModeKHR::VK_PRESENT_MODE_FIFO_KHR;
  1304. switch (window->vsync_mode) {
  1305. case DisplayServer::VSYNC_MAILBOX:
  1306. requested_present_mode = VkPresentModeKHR::VK_PRESENT_MODE_MAILBOX_KHR;
  1307. break;
  1308. case DisplayServer::VSYNC_ADAPTIVE:
  1309. requested_present_mode = VkPresentModeKHR::VK_PRESENT_MODE_FIFO_RELAXED_KHR;
  1310. break;
  1311. case DisplayServer::VSYNC_ENABLED:
  1312. requested_present_mode = VkPresentModeKHR::VK_PRESENT_MODE_FIFO_KHR;
  1313. break;
  1314. case DisplayServer::VSYNC_DISABLED:
  1315. requested_present_mode = VkPresentModeKHR::VK_PRESENT_MODE_IMMEDIATE_KHR;
  1316. break;
  1317. }
  1318. // Check if the requested mode is available.
  1319. bool present_mode_available = false;
  1320. for (uint32_t i = 0; i < presentModeCount; i++) {
  1321. if (presentModes[i] == requested_present_mode) {
  1322. present_mode_available = true;
  1323. }
  1324. }
  1325. // Set the windows present mode if it is available, otherwise FIFO is used (guaranteed supported).
  1326. if (present_mode_available) {
  1327. window->presentMode = requested_present_mode;
  1328. } else {
  1329. WARN_PRINT("Requested VSync mode is not available!");
  1330. window->vsync_mode = DisplayServer::VSYNC_ENABLED; //Set to default
  1331. }
  1332. print_verbose("Using present mode: " + String(string_VkPresentModeKHR(window->presentMode)));
  1333. free(presentModes);
  1334. // Determine the number of VkImages to use in the swap chain.
  1335. // Application desires to acquire 3 images at a time for triple
  1336. // buffering
  1337. uint32_t desiredNumOfSwapchainImages = 3;
  1338. if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
  1339. desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  1340. }
  1341. // If maxImageCount is 0, we can ask for as many images as we want;
  1342. // otherwise we're limited to maxImageCount
  1343. if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  1344. // Application must settle for fewer images than desired:
  1345. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  1346. }
  1347. VkSurfaceTransformFlagsKHR preTransform;
  1348. if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  1349. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  1350. } else {
  1351. preTransform = surfCapabilities.currentTransform;
  1352. }
  1353. // Find a supported composite alpha mode - one of these is guaranteed to be set
  1354. VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  1355. VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
  1356. VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  1357. VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
  1358. VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
  1359. VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
  1360. };
  1361. for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
  1362. if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
  1363. compositeAlpha = compositeAlphaFlags[i];
  1364. break;
  1365. }
  1366. }
  1367. VkSwapchainCreateInfoKHR swapchain_ci = {
  1368. /*sType*/ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  1369. /*pNext*/ nullptr,
  1370. /*flags*/ 0,
  1371. /*surface*/ window->surface,
  1372. /*minImageCount*/ desiredNumOfSwapchainImages,
  1373. /*imageFormat*/ format,
  1374. /*imageColorSpace*/ color_space,
  1375. /*imageExtent*/ {
  1376. /*width*/ swapchainExtent.width,
  1377. /*height*/ swapchainExtent.height,
  1378. },
  1379. /*imageArrayLayers*/ 1,
  1380. /*imageUsage*/ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  1381. /*imageSharingMode*/ VK_SHARING_MODE_EXCLUSIVE,
  1382. /*queueFamilyIndexCount*/ 0,
  1383. /*pQueueFamilyIndices*/ nullptr,
  1384. /*preTransform*/ (VkSurfaceTransformFlagBitsKHR)preTransform,
  1385. /*compositeAlpha*/ compositeAlpha,
  1386. /*presentMode*/ window->presentMode,
  1387. /*clipped*/ true,
  1388. /*oldSwapchain*/ VK_NULL_HANDLE,
  1389. };
  1390. err = fpCreateSwapchainKHR(device, &swapchain_ci, nullptr, &window->swapchain);
  1391. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1392. uint32_t sp_image_count;
  1393. err = fpGetSwapchainImagesKHR(device, window->swapchain, &sp_image_count, nullptr);
  1394. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1395. if (swapchainImageCount == 0) {
  1396. //assign here for the first time.
  1397. swapchainImageCount = sp_image_count;
  1398. } else {
  1399. ERR_FAIL_COND_V(swapchainImageCount != sp_image_count, ERR_BUG);
  1400. }
  1401. VkImage *swapchainImages = (VkImage *)malloc(swapchainImageCount * sizeof(VkImage));
  1402. ERR_FAIL_COND_V(!swapchainImages, ERR_CANT_CREATE);
  1403. err = fpGetSwapchainImagesKHR(device, window->swapchain, &swapchainImageCount, swapchainImages);
  1404. if (err) {
  1405. free(swapchainImages);
  1406. ERR_FAIL_V(ERR_CANT_CREATE);
  1407. }
  1408. window->swapchain_image_resources =
  1409. (SwapchainImageResources *)malloc(sizeof(SwapchainImageResources) * swapchainImageCount);
  1410. if (!window->swapchain_image_resources) {
  1411. free(swapchainImages);
  1412. ERR_FAIL_V(ERR_CANT_CREATE);
  1413. }
  1414. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1415. VkImageViewCreateInfo color_image_view = {
  1416. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  1417. /*pNext*/ nullptr,
  1418. /*flags*/ 0,
  1419. /*image*/ swapchainImages[i],
  1420. /*viewType*/ VK_IMAGE_VIEW_TYPE_2D,
  1421. /*format*/ format,
  1422. /*components*/ {
  1423. /*r*/ VK_COMPONENT_SWIZZLE_R,
  1424. /*g*/ VK_COMPONENT_SWIZZLE_G,
  1425. /*b*/ VK_COMPONENT_SWIZZLE_B,
  1426. /*a*/ VK_COMPONENT_SWIZZLE_A,
  1427. },
  1428. /*subresourceRange*/ { /*aspectMask*/ VK_IMAGE_ASPECT_COLOR_BIT,
  1429. /*baseMipLevel*/ 0,
  1430. /*levelCount*/ 1,
  1431. /*baseArrayLayer*/ 0,
  1432. /*layerCount*/ 1 },
  1433. };
  1434. window->swapchain_image_resources[i].image = swapchainImages[i];
  1435. color_image_view.image = window->swapchain_image_resources[i].image;
  1436. err = vkCreateImageView(device, &color_image_view, nullptr, &window->swapchain_image_resources[i].view);
  1437. if (err) {
  1438. free(swapchainImages);
  1439. ERR_FAIL_V(ERR_CANT_CREATE);
  1440. }
  1441. }
  1442. free(swapchainImages);
  1443. /******** FRAMEBUFFER ************/
  1444. {
  1445. const VkAttachmentDescription attachment = {
  1446. /*flags*/ 0,
  1447. /*format*/ format,
  1448. /*samples*/ VK_SAMPLE_COUNT_1_BIT,
  1449. /*loadOp*/ VK_ATTACHMENT_LOAD_OP_CLEAR,
  1450. /*storeOp*/ VK_ATTACHMENT_STORE_OP_STORE,
  1451. /*stencilLoadOp*/ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  1452. /*stencilStoreOp*/ VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1453. /*initialLayout*/ VK_IMAGE_LAYOUT_UNDEFINED,
  1454. /*finalLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1455. };
  1456. const VkAttachmentReference color_reference = {
  1457. /*attachment*/ 0,
  1458. /*layout*/ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1459. };
  1460. const VkSubpassDescription subpass = {
  1461. /*flags*/ 0,
  1462. /*pipelineBindPoint*/ VK_PIPELINE_BIND_POINT_GRAPHICS,
  1463. /*inputAttachmentCount*/ 0,
  1464. /*pInputAttachments*/ nullptr,
  1465. /*colorAttachmentCount*/ 1,
  1466. /*pColorAttachments*/ &color_reference,
  1467. /*pResolveAttachments*/ nullptr,
  1468. /*pDepthStencilAttachment*/ nullptr,
  1469. /*preserveAttachmentCount*/ 0,
  1470. /*pPreserveAttachments*/ nullptr,
  1471. };
  1472. const VkRenderPassCreateInfo rp_info = {
  1473. /*sTyp*/ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1474. /*pNext*/ nullptr,
  1475. /*flags*/ 0,
  1476. /*attachmentCount*/ 1,
  1477. /*pAttachments*/ &attachment,
  1478. /*subpassCount*/ 1,
  1479. /*pSubpasses*/ &subpass,
  1480. /*dependencyCount*/ 0,
  1481. /*pDependencies*/ nullptr,
  1482. };
  1483. err = vkCreateRenderPass(device, &rp_info, nullptr, &window->render_pass);
  1484. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1485. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1486. const VkFramebufferCreateInfo fb_info = {
  1487. /*sType*/ VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1488. /*pNext*/ nullptr,
  1489. /*flags*/ 0,
  1490. /*renderPass*/ window->render_pass,
  1491. /*attachmentCount*/ 1,
  1492. /*pAttachments*/ &window->swapchain_image_resources[i].view,
  1493. /*width*/ (uint32_t)window->width,
  1494. /*height*/ (uint32_t)window->height,
  1495. /*layers*/ 1,
  1496. };
  1497. err = vkCreateFramebuffer(device, &fb_info, nullptr, &window->swapchain_image_resources[i].framebuffer);
  1498. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1499. }
  1500. }
  1501. /******** SEPARATE PRESENT QUEUE ************/
  1502. if (separate_present_queue) {
  1503. const VkCommandPoolCreateInfo present_cmd_pool_info = {
  1504. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1505. /*pNext*/ nullptr,
  1506. /*flags*/ 0,
  1507. /*queueFamilyIndex*/ present_queue_family_index,
  1508. };
  1509. err = vkCreateCommandPool(device, &present_cmd_pool_info, nullptr, &window->present_cmd_pool);
  1510. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1511. const VkCommandBufferAllocateInfo present_cmd_info = {
  1512. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1513. /*pNext*/ nullptr,
  1514. /*commandPool*/ window->present_cmd_pool,
  1515. /*level*/ VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1516. /*commandBufferCount*/ 1,
  1517. };
  1518. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1519. err = vkAllocateCommandBuffers(device, &present_cmd_info,
  1520. &window->swapchain_image_resources[i].graphics_to_present_cmd);
  1521. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1522. const VkCommandBufferBeginInfo cmd_buf_info = {
  1523. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  1524. /*pNext*/ nullptr,
  1525. /*flags*/ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
  1526. /*pInheritanceInfo*/ nullptr,
  1527. };
  1528. err = vkBeginCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd, &cmd_buf_info);
  1529. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1530. VkImageMemoryBarrier image_ownership_barrier = {
  1531. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  1532. /*pNext*/ nullptr,
  1533. /*srcAccessMask*/ 0,
  1534. /*dstAccessMask*/ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  1535. /*oldLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1536. /*newLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1537. /*srcQueueFamilyIndex*/ graphics_queue_family_index,
  1538. /*dstQueueFamilyIndex*/ present_queue_family_index,
  1539. /*image*/ window->swapchain_image_resources[i].image,
  1540. /*subresourceRange*/ { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 }
  1541. };
  1542. vkCmdPipelineBarrier(window->swapchain_image_resources[i].graphics_to_present_cmd, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  1543. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_ownership_barrier);
  1544. err = vkEndCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd);
  1545. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1546. }
  1547. }
  1548. //reset current buffer
  1549. window->current_buffer = 0;
  1550. return OK;
  1551. }
  1552. Error VulkanContext::initialize() {
  1553. #ifdef USE_VOLK
  1554. if (volkInitialize() != VK_SUCCESS) {
  1555. return FAILED;
  1556. }
  1557. #endif
  1558. Error err = _create_instance();
  1559. if (err != OK) {
  1560. return err;
  1561. }
  1562. return OK;
  1563. }
  1564. void VulkanContext::set_setup_buffer(const VkCommandBuffer &pCommandBuffer) {
  1565. command_buffer_queue.write[0] = pCommandBuffer;
  1566. }
  1567. void VulkanContext::append_command_buffer(const VkCommandBuffer &pCommandBuffer) {
  1568. if (command_buffer_queue.size() <= command_buffer_count) {
  1569. command_buffer_queue.resize(command_buffer_count + 1);
  1570. }
  1571. command_buffer_queue.write[command_buffer_count] = pCommandBuffer;
  1572. command_buffer_count++;
  1573. }
  1574. void VulkanContext::flush(bool p_flush_setup, bool p_flush_pending) {
  1575. // ensure everything else pending is executed
  1576. vkDeviceWaitIdle(device);
  1577. //flush the pending setup buffer
  1578. if (p_flush_setup && command_buffer_queue[0]) {
  1579. //use a fence to wait for everything done
  1580. VkSubmitInfo submit_info;
  1581. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1582. submit_info.pNext = nullptr;
  1583. submit_info.pWaitDstStageMask = nullptr;
  1584. submit_info.waitSemaphoreCount = 0;
  1585. submit_info.pWaitSemaphores = nullptr;
  1586. submit_info.commandBufferCount = 1;
  1587. submit_info.pCommandBuffers = command_buffer_queue.ptr();
  1588. submit_info.signalSemaphoreCount = 0;
  1589. submit_info.pSignalSemaphores = nullptr;
  1590. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1591. command_buffer_queue.write[0] = nullptr;
  1592. ERR_FAIL_COND(err);
  1593. vkDeviceWaitIdle(device);
  1594. }
  1595. if (p_flush_pending && command_buffer_count > 1) {
  1596. //use a fence to wait for everything done
  1597. VkSubmitInfo submit_info;
  1598. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1599. submit_info.pNext = nullptr;
  1600. submit_info.pWaitDstStageMask = nullptr;
  1601. submit_info.waitSemaphoreCount = 0;
  1602. submit_info.pWaitSemaphores = nullptr;
  1603. submit_info.commandBufferCount = command_buffer_count - 1;
  1604. submit_info.pCommandBuffers = command_buffer_queue.ptr() + 1;
  1605. submit_info.signalSemaphoreCount = 0;
  1606. submit_info.pSignalSemaphores = nullptr;
  1607. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1608. ERR_FAIL_COND(err);
  1609. vkDeviceWaitIdle(device);
  1610. command_buffer_count = 1;
  1611. }
  1612. }
  1613. Error VulkanContext::prepare_buffers() {
  1614. if (!queues_initialized) {
  1615. return OK;
  1616. }
  1617. VkResult err;
  1618. // Ensure no more than FRAME_LAG renderings are outstanding
  1619. vkWaitForFences(device, 1, &fences[frame_index], VK_TRUE, UINT64_MAX);
  1620. vkResetFences(device, 1, &fences[frame_index]);
  1621. for (KeyValue<int, Window> &E : windows) {
  1622. Window *w = &E.value;
  1623. w->semaphore_acquired = false;
  1624. if (w->swapchain == VK_NULL_HANDLE) {
  1625. continue;
  1626. }
  1627. do {
  1628. // Get the index of the next available swapchain image:
  1629. err =
  1630. fpAcquireNextImageKHR(device, w->swapchain, UINT64_MAX,
  1631. w->image_acquired_semaphores[frame_index], VK_NULL_HANDLE, &w->current_buffer);
  1632. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1633. // swapchain is out of date (e.g. the window was resized) and
  1634. // must be recreated:
  1635. print_verbose("Vulkan: Early out of date swapchain, recreating.");
  1636. //resize_notify();
  1637. _update_swap_chain(w);
  1638. } else if (err == VK_SUBOPTIMAL_KHR) {
  1639. // swapchain is not as optimal as it could be, but the platform's
  1640. // presentation engine will still present the image correctly.
  1641. print_verbose("Vulkan: Early suboptimal swapchain.");
  1642. break;
  1643. } else if (err != VK_SUCCESS) {
  1644. ERR_BREAK_MSG(err != VK_SUCCESS, "Vulkan: Did not create swapchain successfully.");
  1645. } else {
  1646. w->semaphore_acquired = true;
  1647. }
  1648. } while (err != VK_SUCCESS);
  1649. }
  1650. buffers_prepared = true;
  1651. return OK;
  1652. }
  1653. Error VulkanContext::swap_buffers() {
  1654. if (!queues_initialized) {
  1655. return OK;
  1656. }
  1657. // print_line("swapbuffers?");
  1658. VkResult err;
  1659. #if 0
  1660. if (VK_GOOGLE_display_timing_enabled) {
  1661. // Look at what happened to previous presents, and make appropriate
  1662. // adjustments in timing:
  1663. DemoUpdateTargetIPD(demo);
  1664. // Note: a real application would position its geometry to that it's in
  1665. // the correct location for when the next image is presented. It might
  1666. // also wait, so that there's less latency between any input and when
  1667. // the next image is rendered/presented. This demo program is so
  1668. // simple that it doesn't do either of those.
  1669. }
  1670. #endif
  1671. // Wait for the image acquired semaphore to be signalled to ensure
  1672. // that the image won't be rendered to until the presentation
  1673. // engine has fully released ownership to the application, and it is
  1674. // okay to render to the image.
  1675. const VkCommandBuffer *commands_ptr = nullptr;
  1676. uint32_t commands_to_submit = 0;
  1677. if (command_buffer_queue[0] == nullptr) {
  1678. //no setup command, but commands to submit, submit from the first and skip command
  1679. if (command_buffer_count > 1) {
  1680. commands_ptr = command_buffer_queue.ptr() + 1;
  1681. commands_to_submit = command_buffer_count - 1;
  1682. }
  1683. } else {
  1684. commands_ptr = command_buffer_queue.ptr();
  1685. commands_to_submit = command_buffer_count;
  1686. }
  1687. VkSemaphore *semaphores_to_acquire = (VkSemaphore *)alloca(windows.size() * sizeof(VkSemaphore));
  1688. uint32_t semaphores_to_acquire_count = 0;
  1689. for (KeyValue<int, Window> &E : windows) {
  1690. Window *w = &E.value;
  1691. if (w->semaphore_acquired) {
  1692. semaphores_to_acquire[semaphores_to_acquire_count++] = w->image_acquired_semaphores[frame_index];
  1693. }
  1694. }
  1695. VkPipelineStageFlags pipe_stage_flags;
  1696. VkSubmitInfo submit_info;
  1697. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1698. submit_info.pNext = nullptr;
  1699. submit_info.pWaitDstStageMask = &pipe_stage_flags;
  1700. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1701. submit_info.waitSemaphoreCount = semaphores_to_acquire_count;
  1702. submit_info.pWaitSemaphores = semaphores_to_acquire;
  1703. submit_info.commandBufferCount = commands_to_submit;
  1704. submit_info.pCommandBuffers = commands_ptr;
  1705. submit_info.signalSemaphoreCount = 1;
  1706. submit_info.pSignalSemaphores = &draw_complete_semaphores[frame_index];
  1707. err = vkQueueSubmit(graphics_queue, 1, &submit_info, fences[frame_index]);
  1708. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1709. command_buffer_queue.write[0] = nullptr;
  1710. command_buffer_count = 1;
  1711. if (separate_present_queue) {
  1712. // If we are using separate queues, change image ownership to the
  1713. // present queue before presenting, waiting for the draw complete
  1714. // semaphore and signalling the ownership released semaphore when finished
  1715. VkFence nullFence = VK_NULL_HANDLE;
  1716. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1717. submit_info.waitSemaphoreCount = 1;
  1718. submit_info.pWaitSemaphores = &draw_complete_semaphores[frame_index];
  1719. submit_info.commandBufferCount = 0;
  1720. VkCommandBuffer *cmdbufptr = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer *) * windows.size());
  1721. submit_info.pCommandBuffers = cmdbufptr;
  1722. for (KeyValue<int, Window> &E : windows) {
  1723. Window *w = &E.value;
  1724. if (w->swapchain == VK_NULL_HANDLE) {
  1725. continue;
  1726. }
  1727. cmdbufptr[submit_info.commandBufferCount] = w->swapchain_image_resources[w->current_buffer].graphics_to_present_cmd;
  1728. submit_info.commandBufferCount++;
  1729. }
  1730. submit_info.signalSemaphoreCount = 1;
  1731. submit_info.pSignalSemaphores = &image_ownership_semaphores[frame_index];
  1732. err = vkQueueSubmit(present_queue, 1, &submit_info, nullFence);
  1733. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1734. }
  1735. // If we are using separate queues, we have to wait for image ownership,
  1736. // otherwise wait for draw complete
  1737. VkPresentInfoKHR present = {
  1738. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  1739. /*pNext*/ nullptr,
  1740. /*waitSemaphoreCount*/ 1,
  1741. /*pWaitSemaphores*/ (separate_present_queue) ? &image_ownership_semaphores[frame_index] : &draw_complete_semaphores[frame_index],
  1742. /*swapchainCount*/ 0,
  1743. /*pSwapchain*/ nullptr,
  1744. /*pImageIndices*/ nullptr,
  1745. /*pResults*/ nullptr,
  1746. };
  1747. VkSwapchainKHR *pSwapchains = (VkSwapchainKHR *)alloca(sizeof(VkSwapchainKHR *) * windows.size());
  1748. uint32_t *pImageIndices = (uint32_t *)alloca(sizeof(uint32_t *) * windows.size());
  1749. present.pSwapchains = pSwapchains;
  1750. present.pImageIndices = pImageIndices;
  1751. for (KeyValue<int, Window> &E : windows) {
  1752. Window *w = &E.value;
  1753. if (w->swapchain == VK_NULL_HANDLE) {
  1754. continue;
  1755. }
  1756. pSwapchains[present.swapchainCount] = w->swapchain;
  1757. pImageIndices[present.swapchainCount] = w->current_buffer;
  1758. present.swapchainCount++;
  1759. }
  1760. #if 0
  1761. if (VK_KHR_incremental_present_enabled) {
  1762. // If using VK_KHR_incremental_present, we provide a hint of the region
  1763. // that contains changed content relative to the previously-presented
  1764. // image. The implementation can use this hint in order to save
  1765. // work/power (by only copying the region in the hint). The
  1766. // implementation is free to ignore the hint though, and so we must
  1767. // ensure that the entire image has the correctly-drawn content.
  1768. uint32_t eighthOfWidth = width / 8;
  1769. uint32_t eighthOfHeight = height / 8;
  1770. VkRectLayerKHR rect = {
  1771. /*offset.x*/ eighthOfWidth,
  1772. /*offset.y*/ eighthOfHeight,
  1773. /*extent.width*/ eighthOfWidth * 6,
  1774. /*extent.height*/ eighthOfHeight * 6,
  1775. /*layer*/ 0,
  1776. };
  1777. VkPresentRegionKHR region = {
  1778. /*rectangleCount*/ 1,
  1779. /*pRectangles*/ &rect,
  1780. };
  1781. VkPresentRegionsKHR regions = {
  1782. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
  1783. /*pNext*/ present.pNext,
  1784. /*swapchainCount*/ present.swapchainCount,
  1785. /*pRegions*/ &region,
  1786. };
  1787. present.pNext = &regions;
  1788. }
  1789. #endif
  1790. #if 0
  1791. if (VK_GOOGLE_display_timing_enabled) {
  1792. VkPresentTimeGOOGLE ptime;
  1793. if (prev_desired_present_time == 0) {
  1794. // This must be the first present for this swapchain.
  1795. //
  1796. // We don't know where we are relative to the presentation engine's
  1797. // display's refresh cycle. We also don't know how long rendering
  1798. // takes. Let's make a grossly-simplified assumption that the
  1799. // desiredPresentTime should be half way between now and
  1800. // now+target_IPD. We will adjust over time.
  1801. uint64_t curtime = getTimeInNanoseconds();
  1802. if (curtime == 0) {
  1803. // Since we didn't find out the current time, don't give a
  1804. // desiredPresentTime:
  1805. ptime.desiredPresentTime = 0;
  1806. } else {
  1807. ptime.desiredPresentTime = curtime + (target_IPD >> 1);
  1808. }
  1809. } else {
  1810. ptime.desiredPresentTime = (prev_desired_present_time + target_IPD);
  1811. }
  1812. ptime.presentID = next_present_id++;
  1813. prev_desired_present_time = ptime.desiredPresentTime;
  1814. VkPresentTimesInfoGOOGLE present_time = {
  1815. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
  1816. /*pNext*/ present.pNext,
  1817. /*swapchainCount*/ present.swapchainCount,
  1818. /*pTimes*/ &ptime,
  1819. };
  1820. if (VK_GOOGLE_display_timing_enabled) {
  1821. present.pNext = &present_time;
  1822. }
  1823. }
  1824. #endif
  1825. static int total_frames = 0;
  1826. total_frames++;
  1827. // print_line("current buffer: " + itos(current_buffer));
  1828. err = fpQueuePresentKHR(present_queue, &present);
  1829. frame_index += 1;
  1830. frame_index %= FRAME_LAG;
  1831. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1832. // swapchain is out of date (e.g. the window was resized) and
  1833. // must be recreated:
  1834. print_verbose("Vulkan: Swapchain is out of date, recreating.");
  1835. resize_notify();
  1836. } else if (err == VK_SUBOPTIMAL_KHR) {
  1837. // swapchain is not as optimal as it could be, but the platform's
  1838. // presentation engine will still present the image correctly.
  1839. print_verbose("Vulkan: Swapchain is suboptimal.");
  1840. } else {
  1841. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1842. }
  1843. buffers_prepared = false;
  1844. return OK;
  1845. }
  1846. void VulkanContext::resize_notify() {
  1847. }
  1848. VkDevice VulkanContext::get_device() {
  1849. return device;
  1850. }
  1851. VkPhysicalDevice VulkanContext::get_physical_device() {
  1852. return gpu;
  1853. }
  1854. int VulkanContext::get_swapchain_image_count() const {
  1855. return swapchainImageCount;
  1856. }
  1857. VkQueue VulkanContext::get_graphics_queue() const {
  1858. return graphics_queue;
  1859. }
  1860. uint32_t VulkanContext::get_graphics_queue_family_index() const {
  1861. return graphics_queue_family_index;
  1862. }
  1863. VkFormat VulkanContext::get_screen_format() const {
  1864. return format;
  1865. }
  1866. VkPhysicalDeviceLimits VulkanContext::get_device_limits() const {
  1867. return gpu_props.limits;
  1868. }
  1869. RID VulkanContext::local_device_create() {
  1870. LocalDevice ld;
  1871. { //create device
  1872. VkResult err;
  1873. float queue_priorities[1] = { 0.0 };
  1874. VkDeviceQueueCreateInfo queues[2];
  1875. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  1876. queues[0].pNext = nullptr;
  1877. queues[0].queueFamilyIndex = graphics_queue_family_index;
  1878. queues[0].queueCount = 1;
  1879. queues[0].pQueuePriorities = queue_priorities;
  1880. queues[0].flags = 0;
  1881. VkDeviceCreateInfo sdevice = {
  1882. /*sType =*/VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1883. /*pNext */ nullptr,
  1884. /*flags */ 0,
  1885. /*queueCreateInfoCount */ 1,
  1886. /*pQueueCreateInfos */ queues,
  1887. /*enabledLayerCount */ 0,
  1888. /*ppEnabledLayerNames */ nullptr,
  1889. /*enabledExtensionCount */ enabled_extension_count,
  1890. /*ppEnabledExtensionNames */ (const char *const *)extension_names,
  1891. /*pEnabledFeatures */ &physical_device_features, // If specific features are required, pass them in here
  1892. };
  1893. err = vkCreateDevice(gpu, &sdevice, nullptr, &ld.device);
  1894. ERR_FAIL_COND_V(err, RID());
  1895. }
  1896. { //create graphics queue
  1897. vkGetDeviceQueue(ld.device, graphics_queue_family_index, 0, &ld.queue);
  1898. }
  1899. return local_device_owner.make_rid(ld);
  1900. }
  1901. VkDevice VulkanContext::local_device_get_vk_device(RID p_local_device) {
  1902. LocalDevice *ld = local_device_owner.get_or_null(p_local_device);
  1903. return ld->device;
  1904. }
  1905. void VulkanContext::local_device_push_command_buffers(RID p_local_device, const VkCommandBuffer *p_buffers, int p_count) {
  1906. LocalDevice *ld = local_device_owner.get_or_null(p_local_device);
  1907. ERR_FAIL_COND(ld->waiting);
  1908. VkSubmitInfo submit_info;
  1909. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1910. submit_info.pNext = nullptr;
  1911. submit_info.pWaitDstStageMask = nullptr;
  1912. submit_info.waitSemaphoreCount = 0;
  1913. submit_info.pWaitSemaphores = nullptr;
  1914. submit_info.commandBufferCount = p_count;
  1915. submit_info.pCommandBuffers = p_buffers;
  1916. submit_info.signalSemaphoreCount = 0;
  1917. submit_info.pSignalSemaphores = nullptr;
  1918. VkResult err = vkQueueSubmit(ld->queue, 1, &submit_info, VK_NULL_HANDLE);
  1919. if (err == VK_ERROR_OUT_OF_HOST_MEMORY) {
  1920. print_line("Vulkan: Out of host memory!");
  1921. }
  1922. if (err == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
  1923. print_line("Vulkan: Out of device memory!");
  1924. }
  1925. if (err == VK_ERROR_DEVICE_LOST) {
  1926. print_line("Vulkan: Device lost!");
  1927. }
  1928. ERR_FAIL_COND(err);
  1929. ld->waiting = true;
  1930. }
  1931. void VulkanContext::local_device_sync(RID p_local_device) {
  1932. LocalDevice *ld = local_device_owner.get_or_null(p_local_device);
  1933. ERR_FAIL_COND(!ld->waiting);
  1934. vkDeviceWaitIdle(ld->device);
  1935. ld->waiting = false;
  1936. }
  1937. void VulkanContext::local_device_free(RID p_local_device) {
  1938. LocalDevice *ld = local_device_owner.get_or_null(p_local_device);
  1939. vkDestroyDevice(ld->device, nullptr);
  1940. local_device_owner.free(p_local_device);
  1941. }
  1942. void VulkanContext::command_begin_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1943. if (!enabled_debug_utils) {
  1944. return;
  1945. }
  1946. CharString cs = p_label_name.utf8().get_data();
  1947. VkDebugUtilsLabelEXT label;
  1948. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1949. label.pNext = nullptr;
  1950. label.pLabelName = cs.get_data();
  1951. label.color[0] = p_color[0];
  1952. label.color[1] = p_color[1];
  1953. label.color[2] = p_color[2];
  1954. label.color[3] = p_color[3];
  1955. CmdBeginDebugUtilsLabelEXT(p_command_buffer, &label);
  1956. }
  1957. void VulkanContext::command_insert_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1958. if (!enabled_debug_utils) {
  1959. return;
  1960. }
  1961. CharString cs = p_label_name.utf8().get_data();
  1962. VkDebugUtilsLabelEXT label;
  1963. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1964. label.pNext = nullptr;
  1965. label.pLabelName = cs.get_data();
  1966. label.color[0] = p_color[0];
  1967. label.color[1] = p_color[1];
  1968. label.color[2] = p_color[2];
  1969. label.color[3] = p_color[3];
  1970. CmdInsertDebugUtilsLabelEXT(p_command_buffer, &label);
  1971. }
  1972. void VulkanContext::command_end_label(VkCommandBuffer p_command_buffer) {
  1973. if (!enabled_debug_utils) {
  1974. return;
  1975. }
  1976. CmdEndDebugUtilsLabelEXT(p_command_buffer);
  1977. }
  1978. void VulkanContext::set_object_name(VkObjectType p_object_type, uint64_t p_object_handle, String p_object_name) {
  1979. if (!enabled_debug_utils) {
  1980. return;
  1981. }
  1982. CharString obj_data = p_object_name.utf8();
  1983. VkDebugUtilsObjectNameInfoEXT name_info;
  1984. name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  1985. name_info.pNext = nullptr;
  1986. name_info.objectType = p_object_type;
  1987. name_info.objectHandle = p_object_handle;
  1988. name_info.pObjectName = obj_data.get_data();
  1989. SetDebugUtilsObjectNameEXT(device, &name_info);
  1990. }
  1991. String VulkanContext::get_device_vendor_name() const {
  1992. return device_vendor;
  1993. }
  1994. String VulkanContext::get_device_name() const {
  1995. return device_name;
  1996. }
  1997. RenderingDevice::DeviceType VulkanContext::get_device_type() const {
  1998. return RenderingDevice::DeviceType(device_type);
  1999. }
  2000. String VulkanContext::get_device_pipeline_cache_uuid() const {
  2001. return pipeline_cache_id;
  2002. }
  2003. DisplayServer::VSyncMode VulkanContext::get_vsync_mode(DisplayServer::WindowID p_window) const {
  2004. ERR_FAIL_COND_V_MSG(!windows.has(p_window), DisplayServer::VSYNC_ENABLED, "Could not get VSync mode for window with WindowID " + itos(p_window) + " because it does not exist.");
  2005. return windows[p_window].vsync_mode;
  2006. }
  2007. void VulkanContext::set_vsync_mode(DisplayServer::WindowID p_window, DisplayServer::VSyncMode p_mode) {
  2008. ERR_FAIL_COND_MSG(!windows.has(p_window), "Could not set VSync mode for window with WindowID " + itos(p_window) + " because it does not exist.");
  2009. windows[p_window].vsync_mode = p_mode;
  2010. _update_swap_chain(&windows[p_window]);
  2011. }
  2012. VulkanContext::VulkanContext() {
  2013. command_buffer_queue.resize(1); // First one is always the setup command.
  2014. command_buffer_queue.write[0] = nullptr;
  2015. }
  2016. VulkanContext::~VulkanContext() {
  2017. if (queue_props) {
  2018. free(queue_props);
  2019. }
  2020. if (device_initialized) {
  2021. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  2022. vkDestroyFence(device, fences[i], nullptr);
  2023. vkDestroySemaphore(device, draw_complete_semaphores[i], nullptr);
  2024. if (separate_present_queue) {
  2025. vkDestroySemaphore(device, image_ownership_semaphores[i], nullptr);
  2026. }
  2027. }
  2028. if (inst_initialized && enabled_debug_utils) {
  2029. DestroyDebugUtilsMessengerEXT(inst, dbg_messenger, nullptr);
  2030. }
  2031. if (inst_initialized && dbg_debug_report != VK_NULL_HANDLE) {
  2032. DestroyDebugReportCallbackEXT(inst, dbg_debug_report, nullptr);
  2033. }
  2034. vkDestroyDevice(device, nullptr);
  2035. }
  2036. if (inst_initialized) {
  2037. vkDestroyInstance(inst, nullptr);
  2038. }
  2039. }