vulkan_context.cpp 76 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094
  1. /*************************************************************************/
  2. /* vulkan_context.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "vulkan_context.h"
  31. #include "core/config/engine.h"
  32. #include "core/config/project_settings.h"
  33. #include "core/string/ustring.h"
  34. #include "core/version.h"
  35. #include "servers/rendering/rendering_device.h"
  36. #include "vk_enum_string_helper.h"
  37. #include <stdio.h>
  38. #include <stdlib.h>
  39. #include <string.h>
  40. #include <vector>
  41. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  42. #define APP_SHORT_NAME "GodotEngine"
  43. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_messenger_callback(
  44. VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  45. VkDebugUtilsMessageTypeFlagsEXT messageType,
  46. const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
  47. void *pUserData) {
  48. // This error needs to be ignored because the AMD allocator will mix up memory types on IGP processors.
  49. if (strstr(pCallbackData->pMessage, "Mapping an image with layout") != nullptr &&
  50. strstr(pCallbackData->pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr) {
  51. return VK_FALSE;
  52. }
  53. // This needs to be ignored because Validator is wrong here.
  54. if (strstr(pCallbackData->pMessage, "Invalid SPIR-V binary version 1.3") != nullptr) {
  55. return VK_FALSE;
  56. }
  57. // This needs to be ignored because Validator is wrong here.
  58. if (strstr(pCallbackData->pMessage, "Shader requires flag") != nullptr) {
  59. return VK_FALSE;
  60. }
  61. // This needs to be ignored because Validator is wrong here.
  62. if (strstr(pCallbackData->pMessage, "SPIR-V module not valid: Pointer operand") != nullptr &&
  63. strstr(pCallbackData->pMessage, "must be a memory object") != nullptr) {
  64. return VK_FALSE;
  65. }
  66. /*
  67. // This is a valid warning because its illegal in Vulkan, but in practice it should work according to VK_KHR_maintenance2
  68. if (strstr(pCallbackData->pMessage, "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  69. return VK_FALSE;
  70. }
  71. if (strstr(pCallbackData->pMessage, "VK_FORMAT_R4G4B4A4_UNORM_PACK16 with tiling VK_IMAGE_TILING_OPTIMAL does not support usage that includes VK_IMAGE_USAGE_STORAGE_BIT") != nullptr) {
  72. return VK_FALSE;
  73. }
  74. */
  75. // Workaround for Vulkan-Loader usability bug: https://github.com/KhronosGroup/Vulkan-Loader/issues/262.
  76. if (strstr(pCallbackData->pMessage, "wrong ELF class: ELFCLASS32") != nullptr) {
  77. return VK_FALSE;
  78. }
  79. if (pCallbackData->pMessageIdName && strstr(pCallbackData->pMessageIdName, "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw") != nullptr) {
  80. return VK_FALSE;
  81. }
  82. String type_string;
  83. switch (messageType) {
  84. case (VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT):
  85. type_string = "GENERAL";
  86. break;
  87. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT):
  88. type_string = "VALIDATION";
  89. break;
  90. case (VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  91. type_string = "PERFORMANCE";
  92. break;
  93. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  94. type_string = "VALIDATION|PERFORMANCE";
  95. break;
  96. }
  97. String objects_string;
  98. if (pCallbackData->objectCount > 0) {
  99. objects_string = "\n\tObjects - " + String::num_int64(pCallbackData->objectCount);
  100. for (uint32_t object = 0; object < pCallbackData->objectCount; ++object) {
  101. objects_string +=
  102. "\n\t\tObject[" + String::num_int64(object) + "]" +
  103. " - " + string_VkObjectType(pCallbackData->pObjects[object].objectType) +
  104. ", Handle " + String::num_int64(pCallbackData->pObjects[object].objectHandle);
  105. if (nullptr != pCallbackData->pObjects[object].pObjectName && strlen(pCallbackData->pObjects[object].pObjectName) > 0) {
  106. objects_string += ", Name \"" + String(pCallbackData->pObjects[object].pObjectName) + "\"";
  107. }
  108. }
  109. }
  110. String labels_string;
  111. if (pCallbackData->cmdBufLabelCount > 0) {
  112. labels_string = "\n\tCommand Buffer Labels - " + String::num_int64(pCallbackData->cmdBufLabelCount);
  113. for (uint32_t cmd_buf_label = 0; cmd_buf_label < pCallbackData->cmdBufLabelCount; ++cmd_buf_label) {
  114. labels_string +=
  115. "\n\t\tLabel[" + String::num_int64(cmd_buf_label) + "]" +
  116. " - " + pCallbackData->pCmdBufLabels[cmd_buf_label].pLabelName +
  117. "{ ";
  118. for (int color_idx = 0; color_idx < 4; ++color_idx) {
  119. labels_string += String::num(pCallbackData->pCmdBufLabels[cmd_buf_label].color[color_idx]);
  120. if (color_idx < 3) {
  121. labels_string += ", ";
  122. }
  123. }
  124. labels_string += " }";
  125. }
  126. }
  127. String error_message(type_string +
  128. " - Message Id Number: " + String::num_int64(pCallbackData->messageIdNumber) +
  129. " | Message Id Name: " + pCallbackData->pMessageIdName +
  130. "\n\t" + pCallbackData->pMessage +
  131. objects_string + labels_string);
  132. // Convert VK severity to our own log macros.
  133. switch (messageSeverity) {
  134. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  135. print_verbose(error_message);
  136. break;
  137. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  138. print_line(error_message);
  139. break;
  140. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  141. WARN_PRINT(error_message);
  142. break;
  143. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  144. ERR_PRINT(error_message);
  145. CRASH_COND_MSG(Engine::get_singleton()->is_abort_on_gpu_errors_enabled(),
  146. "Crashing, because abort on GPU errors is enabled.");
  147. break;
  148. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT:
  149. break; // Shouldn't happen, only handling to make compilers happy.
  150. }
  151. return VK_FALSE;
  152. }
  153. VKAPI_ATTR VkBool32 VKAPI_CALL VulkanContext::_debug_report_callback(
  154. VkDebugReportFlagsEXT flags,
  155. VkDebugReportObjectTypeEXT objectType,
  156. uint64_t object,
  157. size_t location,
  158. int32_t messageCode,
  159. const char *pLayerPrefix,
  160. const char *pMessage,
  161. void *pUserData) {
  162. String debugMessage = String("Vulkan Debug Report: object - ") +
  163. String::num_int64(object) + "\n" + pMessage;
  164. switch (flags) {
  165. case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
  166. case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
  167. print_line(debugMessage);
  168. break;
  169. case VK_DEBUG_REPORT_WARNING_BIT_EXT:
  170. case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
  171. WARN_PRINT(debugMessage);
  172. break;
  173. case VK_DEBUG_REPORT_ERROR_BIT_EXT:
  174. ERR_PRINT(debugMessage);
  175. break;
  176. }
  177. return VK_FALSE;
  178. }
  179. VkBool32 VulkanContext::_check_layers(uint32_t check_count, const char *const *check_names, uint32_t layer_count, VkLayerProperties *layers) {
  180. for (uint32_t i = 0; i < check_count; i++) {
  181. VkBool32 found = 0;
  182. for (uint32_t j = 0; j < layer_count; j++) {
  183. if (!strcmp(check_names[i], layers[j].layerName)) {
  184. found = 1;
  185. break;
  186. }
  187. }
  188. if (!found) {
  189. WARN_PRINT("Can't find layer: " + String(check_names[i]));
  190. return 0;
  191. }
  192. }
  193. return 1;
  194. }
  195. Error VulkanContext::_get_preferred_validation_layers(uint32_t *count, const char *const **names) {
  196. static const std::vector<std::vector<const char *>> instance_validation_layers_alt{
  197. // Preferred set of validation layers
  198. { "VK_LAYER_KHRONOS_validation" },
  199. // Alternative (deprecated, removed in SDK 1.1.126.0) set of validation layers
  200. { "VK_LAYER_LUNARG_standard_validation" },
  201. // Alternative (deprecated, removed in SDK 1.1.121.1) set of validation layers
  202. { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" }
  203. };
  204. // Clear out-arguments
  205. *count = 0;
  206. if (names != nullptr) {
  207. *names = nullptr;
  208. }
  209. VkResult err;
  210. uint32_t instance_layer_count;
  211. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
  212. if (err) {
  213. ERR_FAIL_V(ERR_CANT_CREATE);
  214. }
  215. if (instance_layer_count < 1) {
  216. return OK;
  217. }
  218. VkLayerProperties *instance_layers = (VkLayerProperties *)malloc(sizeof(VkLayerProperties) * instance_layer_count);
  219. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, instance_layers);
  220. if (err) {
  221. free(instance_layers);
  222. ERR_FAIL_V(ERR_CANT_CREATE);
  223. }
  224. for (uint32_t i = 0; i < instance_validation_layers_alt.size(); i++) {
  225. if (_check_layers(instance_validation_layers_alt[i].size(), instance_validation_layers_alt[i].data(), instance_layer_count, instance_layers)) {
  226. *count = instance_validation_layers_alt[i].size();
  227. if (names != nullptr) {
  228. *names = instance_validation_layers_alt[i].data();
  229. }
  230. break;
  231. }
  232. }
  233. free(instance_layers);
  234. return OK;
  235. }
  236. typedef VkResult(VKAPI_PTR *_vkEnumerateInstanceVersion)(uint32_t *);
  237. Error VulkanContext::_obtain_vulkan_version() {
  238. // https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkApplicationInfo.html#_description
  239. // for Vulkan 1.0 vkEnumerateInstanceVersion is not available, including not in the loader we compile against on Android.
  240. _vkEnumerateInstanceVersion func = (_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
  241. if (func != nullptr) {
  242. uint32_t api_version;
  243. VkResult res = func(&api_version);
  244. if (res == VK_SUCCESS) {
  245. vulkan_major = VK_VERSION_MAJOR(api_version);
  246. vulkan_minor = VK_VERSION_MINOR(api_version);
  247. uint32_t vulkan_patch = VK_VERSION_PATCH(api_version);
  248. print_line("Vulkan API " + itos(vulkan_major) + "." + itos(vulkan_minor) + "." + itos(vulkan_patch));
  249. } else {
  250. // according to the documentation this shouldn't fail with anything except a memory allocation error
  251. // in which case we're in deep trouble anyway
  252. ERR_FAIL_V(ERR_CANT_CREATE);
  253. }
  254. } else {
  255. print_line("vkEnumerateInstanceVersion not available, assuming Vulkan 1.0");
  256. }
  257. // we don't go above 1.2
  258. if ((vulkan_major > 1) || (vulkan_major == 1 && vulkan_minor > 2)) {
  259. vulkan_major = 1;
  260. vulkan_minor = 2;
  261. }
  262. return OK;
  263. }
  264. Error VulkanContext::_initialize_extensions() {
  265. uint32_t instance_extension_count = 0;
  266. enabled_extension_count = 0;
  267. enabled_debug_utils = false;
  268. enabled_debug_report = false;
  269. /* Look for instance extensions */
  270. VkBool32 surfaceExtFound = 0;
  271. VkBool32 platformSurfaceExtFound = 0;
  272. memset(extension_names, 0, sizeof(extension_names));
  273. VkResult err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr);
  274. ERR_FAIL_COND_V(err != VK_SUCCESS && err != VK_INCOMPLETE, ERR_CANT_CREATE);
  275. if (instance_extension_count > 0) {
  276. VkExtensionProperties *instance_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  277. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions);
  278. if (err != VK_SUCCESS && err != VK_INCOMPLETE) {
  279. free(instance_extensions);
  280. ERR_FAIL_V(ERR_CANT_CREATE);
  281. }
  282. for (uint32_t i = 0; i < instance_extension_count; i++) {
  283. if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  284. surfaceExtFound = 1;
  285. extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
  286. }
  287. if (!strcmp(_get_platform_surface_extension(), instance_extensions[i].extensionName)) {
  288. platformSurfaceExtFound = 1;
  289. extension_names[enabled_extension_count++] = _get_platform_surface_extension();
  290. }
  291. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  292. if (_use_validation_layers()) {
  293. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  294. enabled_debug_report = true;
  295. }
  296. }
  297. if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) {
  298. extension_names[enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  299. enabled_debug_utils = true;
  300. }
  301. if (enabled_extension_count >= MAX_EXTENSIONS) {
  302. free(instance_extensions);
  303. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  304. }
  305. }
  306. free(instance_extensions);
  307. }
  308. ERR_FAIL_COND_V_MSG(!surfaceExtFound, ERR_CANT_CREATE, "No surface extension found, is a driver installed?");
  309. ERR_FAIL_COND_V_MSG(!platformSurfaceExtFound, ERR_CANT_CREATE, "No platform surface extension found, is a driver installed?");
  310. return OK;
  311. }
  312. uint32_t VulkanContext::SubgroupCapabilities::supported_stages_flags_rd() const {
  313. uint32_t flags = 0;
  314. if (supportedStages & VK_SHADER_STAGE_VERTEX_BIT) {
  315. flags += RenderingDevice::ShaderStage::SHADER_STAGE_VERTEX_BIT;
  316. }
  317. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
  318. flags += RenderingDevice::ShaderStage::SHADER_STAGE_TESSELATION_CONTROL_BIT;
  319. }
  320. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
  321. flags += RenderingDevice::ShaderStage::SHADER_STAGE_TESSELATION_EVALUATION_BIT;
  322. }
  323. // if (supportedStages & VK_SHADER_STAGE_GEOMETRY_BIT) {
  324. // flags += RenderingDevice::ShaderStage::SHADER_STAGE_GEOMETRY_BIT;
  325. // }
  326. if (supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT) {
  327. flags += RenderingDevice::ShaderStage::SHADER_STAGE_FRAGMENT_BIT;
  328. }
  329. if (supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) {
  330. flags += RenderingDevice::ShaderStage::SHADER_STAGE_COMPUTE_BIT;
  331. }
  332. return flags;
  333. }
  334. String VulkanContext::SubgroupCapabilities::supported_stages_desc() const {
  335. String res;
  336. if (supportedStages & VK_SHADER_STAGE_VERTEX_BIT) {
  337. res += ", STAGE_VERTEX";
  338. }
  339. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
  340. res += ", STAGE_TESSELLATION_CONTROL";
  341. }
  342. if (supportedStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
  343. res += ", STAGE_TESSELLATION_EVALUATION";
  344. }
  345. if (supportedStages & VK_SHADER_STAGE_GEOMETRY_BIT) {
  346. res += ", STAGE_GEOMETRY";
  347. }
  348. if (supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT) {
  349. res += ", STAGE_FRAGMENT";
  350. }
  351. if (supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) {
  352. res += ", STAGE_COMPUTE";
  353. }
  354. /* these are not defined on Android GRMBL */
  355. if (supportedStages & 0x00000100 /* VK_SHADER_STAGE_RAYGEN_BIT_KHR */) {
  356. res += ", STAGE_RAYGEN_KHR";
  357. }
  358. if (supportedStages & 0x00000200 /* VK_SHADER_STAGE_ANY_HIT_BIT_KHR */) {
  359. res += ", STAGE_ANY_HIT_KHR";
  360. }
  361. if (supportedStages & 0x00000400 /* VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR */) {
  362. res += ", STAGE_CLOSEST_HIT_KHR";
  363. }
  364. if (supportedStages & 0x00000800 /* VK_SHADER_STAGE_MISS_BIT_KHR */) {
  365. res += ", STAGE_MISS_KHR";
  366. }
  367. if (supportedStages & 0x00001000 /* VK_SHADER_STAGE_INTERSECTION_BIT_KHR */) {
  368. res += ", STAGE_INTERSECTION_KHR";
  369. }
  370. if (supportedStages & 0x00002000 /* VK_SHADER_STAGE_CALLABLE_BIT_KHR */) {
  371. res += ", STAGE_CALLABLE_KHR";
  372. }
  373. if (supportedStages & 0x00000040 /* VK_SHADER_STAGE_TASK_BIT_NV */) {
  374. res += ", STAGE_TASK_NV";
  375. }
  376. if (supportedStages & 0x00000080 /* VK_SHADER_STAGE_MESH_BIT_NV */) {
  377. res += ", STAGE_MESH_NV";
  378. }
  379. return res.substr(2); // remove first ", "
  380. }
  381. uint32_t VulkanContext::SubgroupCapabilities::supported_operations_flags_rd() const {
  382. uint32_t flags = 0;
  383. if (supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT) {
  384. flags += RenderingDevice::SubgroupOperations::SUBGROUP_BASIC_BIT;
  385. }
  386. if (supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT) {
  387. flags += RenderingDevice::SubgroupOperations::SUBGROUP_VOTE_BIT;
  388. }
  389. if (supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) {
  390. flags += RenderingDevice::SubgroupOperations::SUBGROUP_ARITHMETIC_BIT;
  391. }
  392. if (supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT) {
  393. flags += RenderingDevice::SubgroupOperations::SUBGROUP_BALLOT_BIT;
  394. }
  395. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT) {
  396. flags += RenderingDevice::SubgroupOperations::SUBGROUP_SHUFFLE_BIT;
  397. }
  398. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT) {
  399. flags += RenderingDevice::SubgroupOperations::SUBGROUP_SHUFFLE_RELATIVE_BIT;
  400. }
  401. if (supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT) {
  402. flags += RenderingDevice::SubgroupOperations::SUBGROUP_CLUSTERED_BIT;
  403. }
  404. if (supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT) {
  405. flags += RenderingDevice::SubgroupOperations::SUBGROUP_QUAD_BIT;
  406. }
  407. return flags;
  408. }
  409. String VulkanContext::SubgroupCapabilities::supported_operations_desc() const {
  410. String res;
  411. if (supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT) {
  412. res += ", FEATURE_BASIC";
  413. }
  414. if (supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT) {
  415. res += ", FEATURE_VOTE";
  416. }
  417. if (supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) {
  418. res += ", FEATURE_ARITHMETIC";
  419. }
  420. if (supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT) {
  421. res += ", FEATURE_BALLOT";
  422. }
  423. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT) {
  424. res += ", FEATURE_SHUFFLE";
  425. }
  426. if (supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT) {
  427. res += ", FEATURE_SHUFFLE_RELATIVE";
  428. }
  429. if (supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT) {
  430. res += ", FEATURE_CLUSTERED";
  431. }
  432. if (supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT) {
  433. res += ", FEATURE_QUAD";
  434. }
  435. if (supportedOperations & VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV) {
  436. res += ", FEATURE_PARTITIONED_NV";
  437. }
  438. return res.substr(2); // remove first ", "
  439. }
  440. Error VulkanContext::_check_capabilities() {
  441. // https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_KHR_multiview.html
  442. // https://www.khronos.org/blog/vulkan-subgroup-tutorial
  443. // for Vulkan 1.0 vkGetPhysicalDeviceProperties2 is not available, including not in the loader we compile against on Android.
  444. // so we check if the functions are accessible by getting their function pointers and skipping if not
  445. // (note that the desktop loader does a better job here but the android loader doesn't)
  446. // assume not supported until proven otherwise
  447. multiview_capabilities.is_supported = false;
  448. multiview_capabilities.max_view_count = 0;
  449. multiview_capabilities.max_instance_count = 0;
  450. subgroup_capabilities.size = 0;
  451. subgroup_capabilities.supportedStages = 0;
  452. subgroup_capabilities.supportedOperations = 0;
  453. subgroup_capabilities.quadOperationsInAllStages = false;
  454. // check for extended features
  455. PFN_vkGetPhysicalDeviceFeatures2 device_features_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2");
  456. if (device_features_func == nullptr) {
  457. // In Vulkan 1.0 might be accessible under its original extension name
  458. device_features_func = (PFN_vkGetPhysicalDeviceFeatures2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceFeatures2KHR");
  459. }
  460. if (device_features_func != nullptr) {
  461. // check our extended features
  462. VkPhysicalDeviceMultiviewFeatures multiview_features;
  463. multiview_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
  464. multiview_features.pNext = NULL;
  465. VkPhysicalDeviceFeatures2 device_features;
  466. device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  467. device_features.pNext = &multiview_features;
  468. device_features_func(gpu, &device_features);
  469. multiview_capabilities.is_supported = multiview_features.multiview;
  470. // For now we ignore if multiview is available in geometry and tesselation as we do not currently support those
  471. }
  472. // check extended properties
  473. PFN_vkGetPhysicalDeviceProperties2 device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2");
  474. if (device_properties_func == nullptr) {
  475. // In Vulkan 1.0 might be accessible under its original extension name
  476. device_properties_func = (PFN_vkGetPhysicalDeviceProperties2)vkGetInstanceProcAddr(inst, "vkGetPhysicalDeviceProperties2KHR");
  477. }
  478. if (device_properties_func != nullptr) {
  479. VkPhysicalDeviceMultiviewProperties multiviewProperties;
  480. VkPhysicalDeviceSubgroupProperties subgroupProperties;
  481. VkPhysicalDeviceProperties2 physicalDeviceProperties;
  482. subgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
  483. subgroupProperties.pNext = nullptr;
  484. physicalDeviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  485. if (multiview_capabilities.is_supported) {
  486. multiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
  487. multiviewProperties.pNext = &subgroupProperties;
  488. physicalDeviceProperties.pNext = &multiviewProperties;
  489. } else {
  490. physicalDeviceProperties.pNext = &subgroupProperties;
  491. }
  492. device_properties_func(gpu, &physicalDeviceProperties);
  493. subgroup_capabilities.size = subgroupProperties.subgroupSize;
  494. subgroup_capabilities.supportedStages = subgroupProperties.supportedStages;
  495. subgroup_capabilities.supportedOperations = subgroupProperties.supportedOperations;
  496. // Note: quadOperationsInAllStages will be true if:
  497. // - supportedStages has VK_SHADER_STAGE_ALL_GRAPHICS + VK_SHADER_STAGE_COMPUTE_BIT
  498. // - supportedOperations has VK_SUBGROUP_FEATURE_QUAD_BIT
  499. subgroup_capabilities.quadOperationsInAllStages = subgroupProperties.quadOperationsInAllStages;
  500. if (multiview_capabilities.is_supported) {
  501. multiview_capabilities.max_view_count = multiviewProperties.maxMultiviewViewCount;
  502. multiview_capabilities.max_instance_count = multiviewProperties.maxMultiviewInstanceIndex;
  503. #ifdef DEBUG_ENABLED
  504. print_line("- Vulkan multiview supported:");
  505. print_line(" max views: " + itos(multiview_capabilities.max_view_count));
  506. print_line(" max instances: " + itos(multiview_capabilities.max_instance_count));
  507. } else {
  508. print_line("- Vulkan multiview not supported");
  509. #endif
  510. }
  511. #ifdef DEBUG_ENABLED
  512. print_line("- Vulkan subgroup:");
  513. print_line(" size: " + itos(subgroup_capabilities.size));
  514. print_line(" stages: " + subgroup_capabilities.supported_stages_desc());
  515. print_line(" supported ops: " + subgroup_capabilities.supported_operations_desc());
  516. if (subgroup_capabilities.quadOperationsInAllStages) {
  517. print_line(" quad operations in all stages");
  518. }
  519. } else {
  520. print_line("- Couldn't call vkGetPhysicalDeviceProperties2");
  521. #endif
  522. }
  523. return OK;
  524. }
  525. Error VulkanContext::_create_physical_device() {
  526. /* obtain version */
  527. _obtain_vulkan_version();
  528. /* initialise extensions */
  529. {
  530. Error err = _initialize_extensions();
  531. if (err != OK) {
  532. return err;
  533. }
  534. }
  535. CharString cs = ProjectSettings::get_singleton()->get("application/config/name").operator String().utf8();
  536. String name = "GodotEngine " + String(VERSION_FULL_NAME);
  537. CharString namecs = name.utf8();
  538. const VkApplicationInfo app = {
  539. /*sType*/ VK_STRUCTURE_TYPE_APPLICATION_INFO,
  540. /*pNext*/ nullptr,
  541. /*pApplicationName*/ cs.get_data(),
  542. /*applicationVersion*/ 0,
  543. /*pEngineName*/ namecs.get_data(),
  544. /*engineVersion*/ 0,
  545. /*apiVersion*/ VK_MAKE_VERSION(vulkan_major, vulkan_minor, 0)
  546. };
  547. VkInstanceCreateInfo inst_info{};
  548. inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  549. inst_info.pApplicationInfo = &app;
  550. inst_info.enabledExtensionCount = enabled_extension_count;
  551. inst_info.ppEnabledExtensionNames = (const char *const *)extension_names;
  552. if (_use_validation_layers()) {
  553. _get_preferred_validation_layers(&inst_info.enabledLayerCount, &inst_info.ppEnabledLayerNames);
  554. }
  555. /*
  556. * This is info for a temp callback to use during CreateInstance.
  557. * After the instance is created, we use the instance-based
  558. * function to register the final callback.
  559. */
  560. VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info;
  561. VkDebugReportCallbackCreateInfoEXT dbg_report_callback_create_info{};
  562. if (enabled_debug_utils) {
  563. // VK_EXT_debug_utils style
  564. dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  565. dbg_messenger_create_info.pNext = nullptr;
  566. dbg_messenger_create_info.flags = 0;
  567. dbg_messenger_create_info.messageSeverity =
  568. VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  569. dbg_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
  570. VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
  571. VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  572. dbg_messenger_create_info.pfnUserCallback = _debug_messenger_callback;
  573. dbg_messenger_create_info.pUserData = this;
  574. inst_info.pNext = &dbg_messenger_create_info;
  575. } else if (enabled_debug_report) {
  576. dbg_report_callback_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
  577. dbg_report_callback_create_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
  578. VK_DEBUG_REPORT_WARNING_BIT_EXT |
  579. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
  580. VK_DEBUG_REPORT_ERROR_BIT_EXT |
  581. VK_DEBUG_REPORT_DEBUG_BIT_EXT;
  582. dbg_report_callback_create_info.pfnCallback = _debug_report_callback;
  583. dbg_report_callback_create_info.pUserData = this;
  584. inst_info.pNext = &dbg_report_callback_create_info;
  585. }
  586. uint32_t gpu_count;
  587. VkResult err = vkCreateInstance(&inst_info, nullptr, &inst);
  588. ERR_FAIL_COND_V_MSG(err == VK_ERROR_INCOMPATIBLE_DRIVER, ERR_CANT_CREATE,
  589. "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
  590. "vkCreateInstance Failure");
  591. ERR_FAIL_COND_V_MSG(err == VK_ERROR_EXTENSION_NOT_PRESENT, ERR_CANT_CREATE,
  592. "Cannot find a specified extension library.\n"
  593. "Make sure your layers path is set appropriately.\n"
  594. "vkCreateInstance Failure");
  595. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE,
  596. "vkCreateInstance failed.\n\n"
  597. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  598. "Please look at the Getting Started guide for additional information.\n"
  599. "vkCreateInstance Failure");
  600. inst_initialized = true;
  601. /* Make initial call to query gpu_count, then second call for gpu info*/
  602. err = vkEnumeratePhysicalDevices(inst, &gpu_count, nullptr);
  603. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  604. ERR_FAIL_COND_V_MSG(gpu_count == 0, ERR_CANT_CREATE,
  605. "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
  606. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  607. "vkEnumeratePhysicalDevices Failure");
  608. VkPhysicalDevice *physical_devices = (VkPhysicalDevice *)malloc(sizeof(VkPhysicalDevice) * gpu_count);
  609. err = vkEnumeratePhysicalDevices(inst, &gpu_count, physical_devices);
  610. if (err) {
  611. free(physical_devices);
  612. ERR_FAIL_V(ERR_CANT_CREATE);
  613. }
  614. /* for now, just grab the first physical device */
  615. uint32_t device_index = 0;
  616. gpu = physical_devices[device_index];
  617. free(physical_devices);
  618. /* Look for device extensions */
  619. uint32_t device_extension_count = 0;
  620. VkBool32 swapchainExtFound = 0;
  621. enabled_extension_count = 0;
  622. memset(extension_names, 0, sizeof(extension_names));
  623. /* Get identifier properties */
  624. vkGetPhysicalDeviceProperties(gpu, &gpu_props);
  625. static const struct {
  626. uint32_t id;
  627. const char *name;
  628. } vendor_names[] = {
  629. { 0x1002, "AMD" },
  630. { 0x1010, "ImgTec" },
  631. { 0x10DE, "NVIDIA" },
  632. { 0x13B5, "ARM" },
  633. { 0x5143, "Qualcomm" },
  634. { 0x8086, "INTEL" },
  635. { 0, nullptr },
  636. };
  637. device_name = gpu_props.deviceName;
  638. pipeline_cache_id = String::hex_encode_buffer(gpu_props.pipelineCacheUUID, VK_UUID_SIZE);
  639. pipeline_cache_id += "-driver-" + itos(gpu_props.driverVersion);
  640. {
  641. device_vendor = "Unknown";
  642. uint32_t vendor_idx = 0;
  643. while (vendor_names[vendor_idx].name != nullptr) {
  644. if (gpu_props.vendorID == vendor_names[vendor_idx].id) {
  645. device_vendor = vendor_names[vendor_idx].name;
  646. break;
  647. }
  648. vendor_idx++;
  649. }
  650. }
  651. #ifdef DEBUG_ENABLED
  652. print_line("Using Vulkan Device #" + itos(device_index) + ": " + device_vendor + " - " + device_name);
  653. #endif
  654. device_api_version = gpu_props.apiVersion;
  655. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, nullptr);
  656. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  657. if (device_extension_count > 0) {
  658. VkExtensionProperties *device_extensions = (VkExtensionProperties *)malloc(sizeof(VkExtensionProperties) * device_extension_count);
  659. err = vkEnumerateDeviceExtensionProperties(gpu, nullptr, &device_extension_count, device_extensions);
  660. if (err) {
  661. free(device_extensions);
  662. ERR_FAIL_V(ERR_CANT_CREATE);
  663. }
  664. for (uint32_t i = 0; i < device_extension_count; i++) {
  665. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
  666. swapchainExtFound = 1;
  667. extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  668. }
  669. if (enabled_extension_count >= MAX_EXTENSIONS) {
  670. free(device_extensions);
  671. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  672. }
  673. }
  674. if (VK_KHR_incremental_present_enabled) {
  675. // Even though the user "enabled" the extension via the command
  676. // line, we must make sure that it's enumerated for use with the
  677. // device. Therefore, disable it here, and re-enable it again if
  678. // enumerated.
  679. VK_KHR_incremental_present_enabled = false;
  680. for (uint32_t i = 0; i < device_extension_count; i++) {
  681. if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) {
  682. extension_names[enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
  683. VK_KHR_incremental_present_enabled = true;
  684. }
  685. if (enabled_extension_count >= MAX_EXTENSIONS) {
  686. free(device_extensions);
  687. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  688. }
  689. }
  690. }
  691. if (VK_GOOGLE_display_timing_enabled) {
  692. // Even though the user "enabled" the extension via the command
  693. // line, we must make sure that it's enumerated for use with the
  694. // device. Therefore, disable it here, and re-enable it again if
  695. // enumerated.
  696. VK_GOOGLE_display_timing_enabled = false;
  697. for (uint32_t i = 0; i < device_extension_count; i++) {
  698. if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) {
  699. extension_names[enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME;
  700. VK_GOOGLE_display_timing_enabled = true;
  701. }
  702. if (enabled_extension_count >= MAX_EXTENSIONS) {
  703. free(device_extensions);
  704. ERR_FAIL_V_MSG(ERR_BUG, "Enabled extension count reaches MAX_EXTENSIONS, BUG");
  705. }
  706. }
  707. }
  708. free(device_extensions);
  709. }
  710. ERR_FAIL_COND_V_MSG(!swapchainExtFound, ERR_CANT_CREATE,
  711. "vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  712. " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n"
  713. "vkCreateInstance Failure");
  714. if (enabled_debug_utils) {
  715. // Setup VK_EXT_debug_utils function pointers always (we use them for
  716. // debug labels and names).
  717. CreateDebugUtilsMessengerEXT =
  718. (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugUtilsMessengerEXT");
  719. DestroyDebugUtilsMessengerEXT =
  720. (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugUtilsMessengerEXT");
  721. SubmitDebugUtilsMessageEXT =
  722. (PFN_vkSubmitDebugUtilsMessageEXT)vkGetInstanceProcAddr(inst, "vkSubmitDebugUtilsMessageEXT");
  723. CmdBeginDebugUtilsLabelEXT =
  724. (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdBeginDebugUtilsLabelEXT");
  725. CmdEndDebugUtilsLabelEXT =
  726. (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdEndDebugUtilsLabelEXT");
  727. CmdInsertDebugUtilsLabelEXT =
  728. (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(inst, "vkCmdInsertDebugUtilsLabelEXT");
  729. SetDebugUtilsObjectNameEXT =
  730. (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(inst, "vkSetDebugUtilsObjectNameEXT");
  731. if (nullptr == CreateDebugUtilsMessengerEXT || nullptr == DestroyDebugUtilsMessengerEXT ||
  732. nullptr == SubmitDebugUtilsMessageEXT || nullptr == CmdBeginDebugUtilsLabelEXT ||
  733. nullptr == CmdEndDebugUtilsLabelEXT || nullptr == CmdInsertDebugUtilsLabelEXT ||
  734. nullptr == SetDebugUtilsObjectNameEXT) {
  735. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  736. "GetProcAddr: Failed to init VK_EXT_debug_utils\n"
  737. "GetProcAddr: Failure");
  738. }
  739. err = CreateDebugUtilsMessengerEXT(inst, &dbg_messenger_create_info, nullptr, &dbg_messenger);
  740. switch (err) {
  741. case VK_SUCCESS:
  742. break;
  743. case VK_ERROR_OUT_OF_HOST_MEMORY:
  744. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  745. "CreateDebugUtilsMessengerEXT: out of host memory\n"
  746. "CreateDebugUtilsMessengerEXT Failure");
  747. break;
  748. default:
  749. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  750. "CreateDebugUtilsMessengerEXT: unknown failure\n"
  751. "CreateDebugUtilsMessengerEXT Failure");
  752. ERR_FAIL_V(ERR_CANT_CREATE);
  753. break;
  754. }
  755. } else if (enabled_debug_report) {
  756. CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkCreateDebugReportCallbackEXT");
  757. DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(inst, "vkDebugReportMessageEXT");
  758. DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(inst, "vkDestroyDebugReportCallbackEXT");
  759. if (nullptr == CreateDebugReportCallbackEXT || nullptr == DebugReportMessageEXT || nullptr == DestroyDebugReportCallbackEXT) {
  760. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  761. "GetProcAddr: Failed to init VK_EXT_debug_report\n"
  762. "GetProcAddr: Failure");
  763. }
  764. err = CreateDebugReportCallbackEXT(inst, &dbg_report_callback_create_info, nullptr, &dbg_debug_report);
  765. switch (err) {
  766. case VK_SUCCESS:
  767. break;
  768. case VK_ERROR_OUT_OF_HOST_MEMORY:
  769. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  770. "CreateDebugReportCallbackEXT: out of host memory\n"
  771. "CreateDebugReportCallbackEXT Failure");
  772. break;
  773. default:
  774. ERR_FAIL_V_MSG(ERR_CANT_CREATE,
  775. "CreateDebugReportCallbackEXT: unknown failure\n"
  776. "CreateDebugReportCallbackEXT Failure");
  777. ERR_FAIL_V(ERR_CANT_CREATE);
  778. break;
  779. }
  780. }
  781. /* Call with nullptr data to get count */
  782. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, nullptr);
  783. ERR_FAIL_COND_V(queue_family_count == 0, ERR_CANT_CREATE);
  784. queue_props = (VkQueueFamilyProperties *)malloc(queue_family_count * sizeof(VkQueueFamilyProperties));
  785. vkGetPhysicalDeviceQueueFamilyProperties(gpu, &queue_family_count, queue_props);
  786. // Query fine-grained feature support for this device.
  787. // If app has specific feature requirements it should check supported
  788. // features based on this query
  789. vkGetPhysicalDeviceFeatures(gpu, &physical_device_features);
  790. physical_device_features.robustBufferAccess = false; //turn off robust buffer access, which can hamper performance on some hardware
  791. #define GET_INSTANCE_PROC_ADDR(inst, entrypoint) \
  792. { \
  793. fp##entrypoint = (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint); \
  794. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  795. "vkGetInstanceProcAddr failed to find vk" #entrypoint); \
  796. }
  797. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceSupportKHR);
  798. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  799. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfaceFormatsKHR);
  800. GET_INSTANCE_PROC_ADDR(inst, GetPhysicalDeviceSurfacePresentModesKHR);
  801. GET_INSTANCE_PROC_ADDR(inst, GetSwapchainImagesKHR);
  802. // get info about what our vulkan driver is capable off
  803. {
  804. Error res = _check_capabilities();
  805. if (res != OK) {
  806. return res;
  807. }
  808. }
  809. return OK;
  810. }
  811. Error VulkanContext::_create_device() {
  812. VkResult err;
  813. float queue_priorities[1] = { 0.0 };
  814. VkDeviceQueueCreateInfo queues[2];
  815. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  816. queues[0].pNext = nullptr;
  817. queues[0].queueFamilyIndex = graphics_queue_family_index;
  818. queues[0].queueCount = 1;
  819. queues[0].pQueuePriorities = queue_priorities;
  820. queues[0].flags = 0;
  821. VkDeviceCreateInfo sdevice = {
  822. /*sType*/ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  823. /*pNext*/ nullptr,
  824. /*flags*/ 0,
  825. /*queueCreateInfoCount*/ 1,
  826. /*pQueueCreateInfos*/ queues,
  827. /*enabledLayerCount*/ 0,
  828. /*ppEnabledLayerNames*/ nullptr,
  829. /*enabledExtensionCount*/ enabled_extension_count,
  830. /*ppEnabledExtensionNames*/ (const char *const *)extension_names,
  831. /*pEnabledFeatures*/ &physical_device_features, // If specific features are required, pass them in here
  832. };
  833. if (separate_present_queue) {
  834. queues[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  835. queues[1].pNext = nullptr;
  836. queues[1].queueFamilyIndex = present_queue_family_index;
  837. queues[1].queueCount = 1;
  838. queues[1].pQueuePriorities = queue_priorities;
  839. queues[1].flags = 0;
  840. sdevice.queueCreateInfoCount = 2;
  841. }
  842. err = vkCreateDevice(gpu, &sdevice, nullptr, &device);
  843. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  844. return OK;
  845. }
  846. Error VulkanContext::_initialize_queues(VkSurfaceKHR surface) {
  847. // Iterate over each queue to learn whether it supports presenting:
  848. VkBool32 *supportsPresent = (VkBool32 *)malloc(queue_family_count * sizeof(VkBool32));
  849. for (uint32_t i = 0; i < queue_family_count; i++) {
  850. fpGetPhysicalDeviceSurfaceSupportKHR(gpu, i, surface, &supportsPresent[i]);
  851. }
  852. // Search for a graphics and a present queue in the array of queue
  853. // families, try to find one that supports both
  854. uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
  855. uint32_t presentQueueFamilyIndex = UINT32_MAX;
  856. for (uint32_t i = 0; i < queue_family_count; i++) {
  857. if ((queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  858. if (graphicsQueueFamilyIndex == UINT32_MAX) {
  859. graphicsQueueFamilyIndex = i;
  860. }
  861. if (supportsPresent[i] == VK_TRUE) {
  862. graphicsQueueFamilyIndex = i;
  863. presentQueueFamilyIndex = i;
  864. break;
  865. }
  866. }
  867. }
  868. if (presentQueueFamilyIndex == UINT32_MAX) {
  869. // If didn't find a queue that supports both graphics and present, then
  870. // find a separate present queue.
  871. for (uint32_t i = 0; i < queue_family_count; ++i) {
  872. if (supportsPresent[i] == VK_TRUE) {
  873. presentQueueFamilyIndex = i;
  874. break;
  875. }
  876. }
  877. }
  878. free(supportsPresent);
  879. // Generate error if could not find both a graphics and a present queue
  880. ERR_FAIL_COND_V_MSG(graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX, ERR_CANT_CREATE,
  881. "Could not find both graphics and present queues\n");
  882. graphics_queue_family_index = graphicsQueueFamilyIndex;
  883. present_queue_family_index = presentQueueFamilyIndex;
  884. separate_present_queue = (graphics_queue_family_index != present_queue_family_index);
  885. _create_device();
  886. static PFN_vkGetDeviceProcAddr g_gdpa = nullptr;
  887. #define GET_DEVICE_PROC_ADDR(dev, entrypoint) \
  888. { \
  889. if (!g_gdpa) \
  890. g_gdpa = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(inst, "vkGetDeviceProcAddr"); \
  891. fp##entrypoint = (PFN_vk##entrypoint)g_gdpa(dev, "vk" #entrypoint); \
  892. ERR_FAIL_COND_V_MSG(fp##entrypoint == nullptr, ERR_CANT_CREATE, \
  893. "vkGetDeviceProcAddr failed to find vk" #entrypoint); \
  894. }
  895. GET_DEVICE_PROC_ADDR(device, CreateSwapchainKHR);
  896. GET_DEVICE_PROC_ADDR(device, DestroySwapchainKHR);
  897. GET_DEVICE_PROC_ADDR(device, GetSwapchainImagesKHR);
  898. GET_DEVICE_PROC_ADDR(device, AcquireNextImageKHR);
  899. GET_DEVICE_PROC_ADDR(device, QueuePresentKHR);
  900. if (VK_GOOGLE_display_timing_enabled) {
  901. GET_DEVICE_PROC_ADDR(device, GetRefreshCycleDurationGOOGLE);
  902. GET_DEVICE_PROC_ADDR(device, GetPastPresentationTimingGOOGLE);
  903. }
  904. vkGetDeviceQueue(device, graphics_queue_family_index, 0, &graphics_queue);
  905. if (!separate_present_queue) {
  906. present_queue = graphics_queue;
  907. } else {
  908. vkGetDeviceQueue(device, present_queue_family_index, 0, &present_queue);
  909. }
  910. // Get the list of VkFormat's that are supported:
  911. uint32_t formatCount;
  912. VkResult err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, nullptr);
  913. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  914. VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  915. err = fpGetPhysicalDeviceSurfaceFormatsKHR(gpu, surface, &formatCount, surfFormats);
  916. if (err) {
  917. free(surfFormats);
  918. ERR_FAIL_V(ERR_CANT_CREATE);
  919. }
  920. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  921. // the surface has no preferred format. Otherwise, at least one
  922. // supported format will be returned.
  923. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  924. format = VK_FORMAT_B8G8R8A8_UNORM;
  925. color_space = surfFormats[0].colorSpace;
  926. } else {
  927. // These should be ordered with the ones we want to use on top and fallback modes further down
  928. // we want an 32bit RGBA unsigned normalised buffer or similar
  929. const VkFormat allowed_formats[] = {
  930. VK_FORMAT_B8G8R8A8_UNORM,
  931. VK_FORMAT_R8G8B8A8_UNORM
  932. };
  933. uint32_t allowed_formats_count = sizeof(allowed_formats) / sizeof(VkFormat);
  934. if (formatCount < 1) {
  935. free(surfFormats);
  936. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "formatCount less than 1");
  937. }
  938. // Find the first format that we support
  939. format = VK_FORMAT_UNDEFINED;
  940. for (uint32_t af = 0; af < allowed_formats_count && format == VK_FORMAT_UNDEFINED; af++) {
  941. for (uint32_t sf = 0; sf < formatCount && format == VK_FORMAT_UNDEFINED; sf++) {
  942. if (surfFormats[sf].format == allowed_formats[af]) {
  943. format = surfFormats[sf].format;
  944. color_space = surfFormats[sf].colorSpace;
  945. }
  946. }
  947. }
  948. if (format == VK_FORMAT_UNDEFINED) {
  949. free(surfFormats);
  950. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "No usable surface format found.");
  951. }
  952. }
  953. free(surfFormats);
  954. Error serr = _create_semaphores();
  955. if (serr) {
  956. return serr;
  957. }
  958. queues_initialized = true;
  959. return OK;
  960. }
  961. Error VulkanContext::_create_semaphores() {
  962. VkResult err;
  963. // Create semaphores to synchronize acquiring presentable buffers before
  964. // rendering and waiting for drawing to be complete before presenting
  965. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  966. /*sType*/ VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  967. /*pNext*/ nullptr,
  968. /*flags*/ 0,
  969. };
  970. // Create fences that we can use to throttle if we get too far
  971. // ahead of the image presents
  972. VkFenceCreateInfo fence_ci = {
  973. /*sType*/ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
  974. /*pNext*/ nullptr,
  975. /*flags*/ VK_FENCE_CREATE_SIGNALED_BIT
  976. };
  977. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  978. err = vkCreateFence(device, &fence_ci, nullptr, &fences[i]);
  979. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  980. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_acquired_semaphores[i]);
  981. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  982. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &draw_complete_semaphores[i]);
  983. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  984. if (separate_present_queue) {
  985. err = vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &image_ownership_semaphores[i]);
  986. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  987. }
  988. }
  989. frame_index = 0;
  990. // Get Memory information and properties
  991. vkGetPhysicalDeviceMemoryProperties(gpu, &memory_properties);
  992. return OK;
  993. }
  994. bool VulkanContext::_use_validation_layers() {
  995. return Engine::get_singleton()->is_validation_layers_enabled();
  996. }
  997. Error VulkanContext::_window_create(DisplayServer::WindowID p_window_id, VkSurfaceKHR p_surface, int p_width, int p_height) {
  998. ERR_FAIL_COND_V(windows.has(p_window_id), ERR_INVALID_PARAMETER);
  999. if (!queues_initialized) {
  1000. // We use a single GPU, but we need a surface to initialize the
  1001. // queues, so this process must be deferred until a surface
  1002. // is created.
  1003. Error err = _initialize_queues(p_surface);
  1004. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  1005. }
  1006. Window window;
  1007. window.surface = p_surface;
  1008. window.width = p_width;
  1009. window.height = p_height;
  1010. Error err = _update_swap_chain(&window);
  1011. ERR_FAIL_COND_V(err != OK, ERR_CANT_CREATE);
  1012. windows[p_window_id] = window;
  1013. return OK;
  1014. }
  1015. void VulkanContext::window_resize(DisplayServer::WindowID p_window, int p_width, int p_height) {
  1016. ERR_FAIL_COND(!windows.has(p_window));
  1017. windows[p_window].width = p_width;
  1018. windows[p_window].height = p_height;
  1019. _update_swap_chain(&windows[p_window]);
  1020. }
  1021. int VulkanContext::window_get_width(DisplayServer::WindowID p_window) {
  1022. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  1023. return windows[p_window].width;
  1024. }
  1025. int VulkanContext::window_get_height(DisplayServer::WindowID p_window) {
  1026. ERR_FAIL_COND_V(!windows.has(p_window), -1);
  1027. return windows[p_window].height;
  1028. }
  1029. VkRenderPass VulkanContext::window_get_render_pass(DisplayServer::WindowID p_window) {
  1030. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  1031. Window *w = &windows[p_window];
  1032. //vulkan use of currentbuffer
  1033. return w->render_pass;
  1034. }
  1035. VkFramebuffer VulkanContext::window_get_framebuffer(DisplayServer::WindowID p_window) {
  1036. ERR_FAIL_COND_V(!windows.has(p_window), VK_NULL_HANDLE);
  1037. ERR_FAIL_COND_V(!buffers_prepared, VK_NULL_HANDLE);
  1038. Window *w = &windows[p_window];
  1039. //vulkan use of currentbuffer
  1040. return w->swapchain_image_resources[w->current_buffer].framebuffer;
  1041. }
  1042. void VulkanContext::window_destroy(DisplayServer::WindowID p_window_id) {
  1043. ERR_FAIL_COND(!windows.has(p_window_id));
  1044. _clean_up_swap_chain(&windows[p_window_id]);
  1045. vkDestroySurfaceKHR(inst, windows[p_window_id].surface, nullptr);
  1046. windows.erase(p_window_id);
  1047. }
  1048. Error VulkanContext::_clean_up_swap_chain(Window *window) {
  1049. if (!window->swapchain) {
  1050. return OK;
  1051. }
  1052. vkDeviceWaitIdle(device);
  1053. //this destroys images associated it seems
  1054. fpDestroySwapchainKHR(device, window->swapchain, nullptr);
  1055. window->swapchain = VK_NULL_HANDLE;
  1056. vkDestroyRenderPass(device, window->render_pass, nullptr);
  1057. if (window->swapchain_image_resources) {
  1058. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1059. vkDestroyImageView(device, window->swapchain_image_resources[i].view, nullptr);
  1060. vkDestroyFramebuffer(device, window->swapchain_image_resources[i].framebuffer, nullptr);
  1061. }
  1062. free(window->swapchain_image_resources);
  1063. window->swapchain_image_resources = nullptr;
  1064. }
  1065. if (separate_present_queue) {
  1066. vkDestroyCommandPool(device, window->present_cmd_pool, nullptr);
  1067. }
  1068. return OK;
  1069. }
  1070. Error VulkanContext::_update_swap_chain(Window *window) {
  1071. VkResult err;
  1072. if (window->swapchain) {
  1073. _clean_up_swap_chain(window);
  1074. }
  1075. // Check the surface capabilities and formats
  1076. VkSurfaceCapabilitiesKHR surfCapabilities;
  1077. err = fpGetPhysicalDeviceSurfaceCapabilitiesKHR(gpu, window->surface, &surfCapabilities);
  1078. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1079. uint32_t presentModeCount;
  1080. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, nullptr);
  1081. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1082. VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  1083. ERR_FAIL_COND_V(!presentModes, ERR_CANT_CREATE);
  1084. err = fpGetPhysicalDeviceSurfacePresentModesKHR(gpu, window->surface, &presentModeCount, presentModes);
  1085. if (err) {
  1086. free(presentModes);
  1087. ERR_FAIL_V(ERR_CANT_CREATE);
  1088. }
  1089. VkExtent2D swapchainExtent;
  1090. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  1091. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  1092. // If the surface size is undefined, the size is set to the size
  1093. // of the images requested, which must fit within the minimum and
  1094. // maximum values.
  1095. swapchainExtent.width = window->width;
  1096. swapchainExtent.height = window->height;
  1097. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  1098. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  1099. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  1100. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  1101. }
  1102. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  1103. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  1104. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  1105. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  1106. }
  1107. } else {
  1108. // If the surface size is defined, the swap chain size must match
  1109. swapchainExtent = surfCapabilities.currentExtent;
  1110. window->width = surfCapabilities.currentExtent.width;
  1111. window->height = surfCapabilities.currentExtent.height;
  1112. }
  1113. if (window->width == 0 || window->height == 0) {
  1114. free(presentModes);
  1115. //likely window minimized, no swapchain created
  1116. return OK;
  1117. }
  1118. // The FIFO present mode is guaranteed by the spec to be supported
  1119. // and to have no tearing. It's a great default present mode to use.
  1120. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  1121. // There are times when you may wish to use another present mode. The
  1122. // following code shows how to select them, and the comments provide some
  1123. // reasons you may wish to use them.
  1124. //
  1125. // It should be noted that Vulkan 1.0 doesn't provide a method for
  1126. // synchronizing rendering with the presentation engine's display. There
  1127. // is a method provided for throttling rendering with the display, but
  1128. // there are some presentation engines for which this method will not work.
  1129. // If an application doesn't throttle its rendering, and if it renders much
  1130. // faster than the refresh rate of the display, this can waste power on
  1131. // mobile devices. That is because power is being spent rendering images
  1132. // that may never be seen.
  1133. // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care about
  1134. // tearing, or have some way of synchronizing their rendering with the
  1135. // display.
  1136. // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
  1137. // generally render a new presentable image every refresh cycle, but are
  1138. // occasionally early. In this case, the application wants the new image
  1139. // to be displayed instead of the previously-queued-for-presentation image
  1140. // that has not yet been displayed.
  1141. // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
  1142. // render a new presentable image every refresh cycle, but are occasionally
  1143. // late. In this case (perhaps because of stuttering/latency concerns),
  1144. // the application wants the late image to be immediately displayed, even
  1145. // though that may mean some tearing.
  1146. if (window->presentMode != swapchainPresentMode) {
  1147. for (size_t i = 0; i < presentModeCount; ++i) {
  1148. if (presentModes[i] == window->presentMode) {
  1149. swapchainPresentMode = window->presentMode;
  1150. break;
  1151. }
  1152. }
  1153. }
  1154. free(presentModes);
  1155. ERR_FAIL_COND_V_MSG(swapchainPresentMode != window->presentMode, ERR_CANT_CREATE, "Present mode specified is not supported\n");
  1156. // Determine the number of VkImages to use in the swap chain.
  1157. // Application desires to acquire 3 images at a time for triple
  1158. // buffering
  1159. uint32_t desiredNumOfSwapchainImages = 3;
  1160. if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
  1161. desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  1162. }
  1163. // If maxImageCount is 0, we can ask for as many images as we want;
  1164. // otherwise we're limited to maxImageCount
  1165. if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  1166. // Application must settle for fewer images than desired:
  1167. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  1168. }
  1169. VkSurfaceTransformFlagsKHR preTransform;
  1170. if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  1171. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  1172. } else {
  1173. preTransform = surfCapabilities.currentTransform;
  1174. }
  1175. // Find a supported composite alpha mode - one of these is guaranteed to be set
  1176. VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  1177. VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
  1178. VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  1179. VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
  1180. VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
  1181. VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
  1182. };
  1183. for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
  1184. if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
  1185. compositeAlpha = compositeAlphaFlags[i];
  1186. break;
  1187. }
  1188. }
  1189. VkSwapchainCreateInfoKHR swapchain_ci = {
  1190. /*sType*/ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  1191. /*pNext*/ nullptr,
  1192. /*flags*/ 0,
  1193. /*surface*/ window->surface,
  1194. /*minImageCount*/ desiredNumOfSwapchainImages,
  1195. /*imageFormat*/ format,
  1196. /*imageColorSpace*/ color_space,
  1197. /*imageExtent*/ {
  1198. /*width*/ swapchainExtent.width,
  1199. /*height*/ swapchainExtent.height,
  1200. },
  1201. /*imageArrayLayers*/ 1,
  1202. /*imageUsage*/ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  1203. /*imageSharingMode*/ VK_SHARING_MODE_EXCLUSIVE,
  1204. /*queueFamilyIndexCount*/ 0,
  1205. /*pQueueFamilyIndices*/ nullptr,
  1206. /*preTransform*/ (VkSurfaceTransformFlagBitsKHR)preTransform,
  1207. /*compositeAlpha*/ compositeAlpha,
  1208. /*presentMode*/ swapchainPresentMode,
  1209. /*clipped*/ true,
  1210. /*oldSwapchain*/ VK_NULL_HANDLE,
  1211. };
  1212. err = fpCreateSwapchainKHR(device, &swapchain_ci, nullptr, &window->swapchain);
  1213. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1214. uint32_t sp_image_count;
  1215. err = fpGetSwapchainImagesKHR(device, window->swapchain, &sp_image_count, nullptr);
  1216. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1217. if (swapchainImageCount == 0) {
  1218. //assign here for the first time.
  1219. swapchainImageCount = sp_image_count;
  1220. } else {
  1221. ERR_FAIL_COND_V(swapchainImageCount != sp_image_count, ERR_BUG);
  1222. }
  1223. VkImage *swapchainImages = (VkImage *)malloc(swapchainImageCount * sizeof(VkImage));
  1224. ERR_FAIL_COND_V(!swapchainImages, ERR_CANT_CREATE);
  1225. err = fpGetSwapchainImagesKHR(device, window->swapchain, &swapchainImageCount, swapchainImages);
  1226. if (err) {
  1227. free(swapchainImages);
  1228. ERR_FAIL_V(ERR_CANT_CREATE);
  1229. }
  1230. window->swapchain_image_resources =
  1231. (SwapchainImageResources *)malloc(sizeof(SwapchainImageResources) * swapchainImageCount);
  1232. if (!window->swapchain_image_resources) {
  1233. free(swapchainImages);
  1234. ERR_FAIL_V(ERR_CANT_CREATE);
  1235. }
  1236. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1237. VkImageViewCreateInfo color_image_view = {
  1238. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  1239. /*pNext*/ nullptr,
  1240. /*flags*/ 0,
  1241. /*image*/ swapchainImages[i],
  1242. /*viewType*/ VK_IMAGE_VIEW_TYPE_2D,
  1243. /*format*/ format,
  1244. /*components*/ {
  1245. /*r*/ VK_COMPONENT_SWIZZLE_R,
  1246. /*g*/ VK_COMPONENT_SWIZZLE_G,
  1247. /*b*/ VK_COMPONENT_SWIZZLE_B,
  1248. /*a*/ VK_COMPONENT_SWIZZLE_A,
  1249. },
  1250. /*subresourceRange*/ { /*aspectMask*/ VK_IMAGE_ASPECT_COLOR_BIT,
  1251. /*baseMipLevel*/ 0,
  1252. /*levelCount*/ 1,
  1253. /*baseArrayLayer*/ 0,
  1254. /*layerCount*/ 1 },
  1255. };
  1256. window->swapchain_image_resources[i].image = swapchainImages[i];
  1257. color_image_view.image = window->swapchain_image_resources[i].image;
  1258. err = vkCreateImageView(device, &color_image_view, nullptr, &window->swapchain_image_resources[i].view);
  1259. if (err) {
  1260. free(swapchainImages);
  1261. ERR_FAIL_V(ERR_CANT_CREATE);
  1262. }
  1263. }
  1264. free(swapchainImages);
  1265. /******** FRAMEBUFFER ************/
  1266. {
  1267. const VkAttachmentDescription attachment = {
  1268. /*flags*/ 0,
  1269. /*format*/ format,
  1270. /*samples*/ VK_SAMPLE_COUNT_1_BIT,
  1271. /*loadOp*/ VK_ATTACHMENT_LOAD_OP_CLEAR,
  1272. /*storeOp*/ VK_ATTACHMENT_STORE_OP_STORE,
  1273. /*stencilLoadOp*/ VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  1274. /*stencilStoreOp*/ VK_ATTACHMENT_STORE_OP_DONT_CARE,
  1275. /*initialLayout*/ VK_IMAGE_LAYOUT_UNDEFINED,
  1276. /*finalLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1277. };
  1278. const VkAttachmentReference color_reference = {
  1279. /*attachment*/ 0,
  1280. /*layout*/ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1281. };
  1282. const VkSubpassDescription subpass = {
  1283. /*flags*/ 0,
  1284. /*pipelineBindPoint*/ VK_PIPELINE_BIND_POINT_GRAPHICS,
  1285. /*inputAttachmentCount*/ 0,
  1286. /*pInputAttachments*/ nullptr,
  1287. /*colorAttachmentCount*/ 1,
  1288. /*pColorAttachments*/ &color_reference,
  1289. /*pResolveAttachments*/ nullptr,
  1290. /*pDepthStencilAttachment*/ nullptr,
  1291. /*preserveAttachmentCount*/ 0,
  1292. /*pPreserveAttachments*/ nullptr,
  1293. };
  1294. const VkRenderPassCreateInfo rp_info = {
  1295. /*sTyp*/ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1296. /*pNext*/ nullptr,
  1297. /*flags*/ 0,
  1298. /*attachmentCount*/ 1,
  1299. /*pAttachments*/ &attachment,
  1300. /*subpassCount*/ 1,
  1301. /*pSubpasses*/ &subpass,
  1302. /*dependencyCount*/ 0,
  1303. /*pDependencies*/ nullptr,
  1304. };
  1305. err = vkCreateRenderPass(device, &rp_info, nullptr, &window->render_pass);
  1306. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1307. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1308. const VkFramebufferCreateInfo fb_info = {
  1309. /*sType*/ VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1310. /*pNext*/ nullptr,
  1311. /*flags*/ 0,
  1312. /*renderPass*/ window->render_pass,
  1313. /*attachmentCount*/ 1,
  1314. /*pAttachments*/ &window->swapchain_image_resources[i].view,
  1315. /*width*/ (uint32_t)window->width,
  1316. /*height*/ (uint32_t)window->height,
  1317. /*layers*/ 1,
  1318. };
  1319. err = vkCreateFramebuffer(device, &fb_info, nullptr, &window->swapchain_image_resources[i].framebuffer);
  1320. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1321. }
  1322. }
  1323. /******** SEPARATE PRESENT QUEUE ************/
  1324. if (separate_present_queue) {
  1325. const VkCommandPoolCreateInfo present_cmd_pool_info = {
  1326. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1327. /*pNext*/ nullptr,
  1328. /*flags*/ 0,
  1329. /*queueFamilyIndex*/ present_queue_family_index,
  1330. };
  1331. err = vkCreateCommandPool(device, &present_cmd_pool_info, nullptr, &window->present_cmd_pool);
  1332. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1333. const VkCommandBufferAllocateInfo present_cmd_info = {
  1334. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1335. /*pNext*/ nullptr,
  1336. /*commandPool*/ window->present_cmd_pool,
  1337. /*level*/ VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1338. /*commandBufferCount*/ 1,
  1339. };
  1340. for (uint32_t i = 0; i < swapchainImageCount; i++) {
  1341. err = vkAllocateCommandBuffers(device, &present_cmd_info,
  1342. &window->swapchain_image_resources[i].graphics_to_present_cmd);
  1343. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1344. const VkCommandBufferBeginInfo cmd_buf_info = {
  1345. /*sType*/ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  1346. /*pNext*/ nullptr,
  1347. /*flags*/ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
  1348. /*pInheritanceInfo*/ nullptr,
  1349. };
  1350. err = vkBeginCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd, &cmd_buf_info);
  1351. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1352. VkImageMemoryBarrier image_ownership_barrier = {
  1353. /*sType*/ VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  1354. /*pNext*/ nullptr,
  1355. /*srcAccessMask*/ 0,
  1356. /*dstAccessMask*/ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  1357. /*oldLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1358. /*newLayout*/ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  1359. /*srcQueueFamilyIndex*/ graphics_queue_family_index,
  1360. /*dstQueueFamilyIndex*/ present_queue_family_index,
  1361. /*image*/ window->swapchain_image_resources[i].image,
  1362. /*subresourceRange*/ { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 }
  1363. };
  1364. vkCmdPipelineBarrier(window->swapchain_image_resources[i].graphics_to_present_cmd, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  1365. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, 1, &image_ownership_barrier);
  1366. err = vkEndCommandBuffer(window->swapchain_image_resources[i].graphics_to_present_cmd);
  1367. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1368. }
  1369. }
  1370. //reset current buffer
  1371. window->current_buffer = 0;
  1372. return OK;
  1373. }
  1374. Error VulkanContext::initialize() {
  1375. Error err = _create_physical_device();
  1376. if (err) {
  1377. return err;
  1378. }
  1379. device_initialized = true;
  1380. return OK;
  1381. }
  1382. void VulkanContext::set_setup_buffer(const VkCommandBuffer &pCommandBuffer) {
  1383. command_buffer_queue.write[0] = pCommandBuffer;
  1384. }
  1385. void VulkanContext::append_command_buffer(const VkCommandBuffer &pCommandBuffer) {
  1386. if (command_buffer_queue.size() <= command_buffer_count) {
  1387. command_buffer_queue.resize(command_buffer_count + 1);
  1388. }
  1389. command_buffer_queue.write[command_buffer_count] = pCommandBuffer;
  1390. command_buffer_count++;
  1391. }
  1392. void VulkanContext::flush(bool p_flush_setup, bool p_flush_pending) {
  1393. // ensure everything else pending is executed
  1394. vkDeviceWaitIdle(device);
  1395. //flush the pending setup buffer
  1396. if (p_flush_setup && command_buffer_queue[0]) {
  1397. //use a fence to wait for everything done
  1398. VkSubmitInfo submit_info;
  1399. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1400. submit_info.pNext = nullptr;
  1401. submit_info.pWaitDstStageMask = nullptr;
  1402. submit_info.waitSemaphoreCount = 0;
  1403. submit_info.pWaitSemaphores = nullptr;
  1404. submit_info.commandBufferCount = 1;
  1405. submit_info.pCommandBuffers = command_buffer_queue.ptr();
  1406. submit_info.signalSemaphoreCount = 0;
  1407. submit_info.pSignalSemaphores = nullptr;
  1408. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1409. command_buffer_queue.write[0] = nullptr;
  1410. ERR_FAIL_COND(err);
  1411. vkDeviceWaitIdle(device);
  1412. }
  1413. if (p_flush_pending && command_buffer_count > 1) {
  1414. //use a fence to wait for everything done
  1415. VkSubmitInfo submit_info;
  1416. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1417. submit_info.pNext = nullptr;
  1418. submit_info.pWaitDstStageMask = nullptr;
  1419. submit_info.waitSemaphoreCount = 0;
  1420. submit_info.pWaitSemaphores = nullptr;
  1421. submit_info.commandBufferCount = command_buffer_count - 1;
  1422. submit_info.pCommandBuffers = command_buffer_queue.ptr() + 1;
  1423. submit_info.signalSemaphoreCount = 0;
  1424. submit_info.pSignalSemaphores = nullptr;
  1425. VkResult err = vkQueueSubmit(graphics_queue, 1, &submit_info, VK_NULL_HANDLE);
  1426. ERR_FAIL_COND(err);
  1427. vkDeviceWaitIdle(device);
  1428. command_buffer_count = 1;
  1429. }
  1430. }
  1431. Error VulkanContext::prepare_buffers() {
  1432. if (!queues_initialized) {
  1433. return OK;
  1434. }
  1435. VkResult err;
  1436. // Ensure no more than FRAME_LAG renderings are outstanding
  1437. vkWaitForFences(device, 1, &fences[frame_index], VK_TRUE, UINT64_MAX);
  1438. vkResetFences(device, 1, &fences[frame_index]);
  1439. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1440. Window *w = &E->get();
  1441. if (w->swapchain == VK_NULL_HANDLE) {
  1442. continue;
  1443. }
  1444. do {
  1445. // Get the index of the next available swapchain image:
  1446. err =
  1447. fpAcquireNextImageKHR(device, w->swapchain, UINT64_MAX,
  1448. image_acquired_semaphores[frame_index], VK_NULL_HANDLE, &w->current_buffer);
  1449. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1450. // swapchain is out of date (e.g. the window was resized) and
  1451. // must be recreated:
  1452. print_verbose("Vulkan: Early out of date swapchain, recreating.");
  1453. //resize_notify();
  1454. _update_swap_chain(w);
  1455. } else if (err == VK_SUBOPTIMAL_KHR) {
  1456. // swapchain is not as optimal as it could be, but the platform's
  1457. // presentation engine will still present the image correctly.
  1458. print_verbose("Vulkan: Early suboptimal swapchain.");
  1459. break;
  1460. } else {
  1461. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1462. }
  1463. } while (err != VK_SUCCESS);
  1464. }
  1465. buffers_prepared = true;
  1466. return OK;
  1467. }
  1468. Error VulkanContext::swap_buffers() {
  1469. if (!queues_initialized) {
  1470. return OK;
  1471. }
  1472. // print_line("swapbuffers?");
  1473. VkResult err;
  1474. #if 0
  1475. if (VK_GOOGLE_display_timing_enabled) {
  1476. // Look at what happened to previous presents, and make appropriate
  1477. // adjustments in timing:
  1478. DemoUpdateTargetIPD(demo);
  1479. // Note: a real application would position its geometry to that it's in
  1480. // the correct location for when the next image is presented. It might
  1481. // also wait, so that there's less latency between any input and when
  1482. // the next image is rendered/presented. This demo program is so
  1483. // simple that it doesn't do either of those.
  1484. }
  1485. #endif
  1486. // Wait for the image acquired semaphore to be signalled to ensure
  1487. // that the image won't be rendered to until the presentation
  1488. // engine has fully released ownership to the application, and it is
  1489. // okay to render to the image.
  1490. const VkCommandBuffer *commands_ptr = nullptr;
  1491. uint32_t commands_to_submit = 0;
  1492. if (command_buffer_queue[0] == nullptr) {
  1493. //no setup command, but commands to submit, submit from the first and skip command
  1494. if (command_buffer_count > 1) {
  1495. commands_ptr = command_buffer_queue.ptr() + 1;
  1496. commands_to_submit = command_buffer_count - 1;
  1497. }
  1498. } else {
  1499. commands_ptr = command_buffer_queue.ptr();
  1500. commands_to_submit = command_buffer_count;
  1501. }
  1502. VkPipelineStageFlags pipe_stage_flags;
  1503. VkSubmitInfo submit_info;
  1504. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1505. submit_info.pNext = nullptr;
  1506. submit_info.pWaitDstStageMask = &pipe_stage_flags;
  1507. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1508. submit_info.waitSemaphoreCount = 1;
  1509. submit_info.pWaitSemaphores = &image_acquired_semaphores[frame_index];
  1510. submit_info.commandBufferCount = commands_to_submit;
  1511. submit_info.pCommandBuffers = commands_ptr;
  1512. submit_info.signalSemaphoreCount = 1;
  1513. submit_info.pSignalSemaphores = &draw_complete_semaphores[frame_index];
  1514. err = vkQueueSubmit(graphics_queue, 1, &submit_info, fences[frame_index]);
  1515. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1516. command_buffer_queue.write[0] = nullptr;
  1517. command_buffer_count = 1;
  1518. if (separate_present_queue) {
  1519. // If we are using separate queues, change image ownership to the
  1520. // present queue before presenting, waiting for the draw complete
  1521. // semaphore and signalling the ownership released semaphore when finished
  1522. VkFence nullFence = VK_NULL_HANDLE;
  1523. pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  1524. submit_info.waitSemaphoreCount = 1;
  1525. submit_info.pWaitSemaphores = &draw_complete_semaphores[frame_index];
  1526. submit_info.commandBufferCount = 0;
  1527. VkCommandBuffer *cmdbufptr = (VkCommandBuffer *)alloca(sizeof(VkCommandBuffer *) * windows.size());
  1528. submit_info.pCommandBuffers = cmdbufptr;
  1529. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1530. Window *w = &E->get();
  1531. if (w->swapchain == VK_NULL_HANDLE) {
  1532. continue;
  1533. }
  1534. cmdbufptr[submit_info.commandBufferCount] = w->swapchain_image_resources[w->current_buffer].graphics_to_present_cmd;
  1535. submit_info.commandBufferCount++;
  1536. }
  1537. submit_info.signalSemaphoreCount = 1;
  1538. submit_info.pSignalSemaphores = &image_ownership_semaphores[frame_index];
  1539. err = vkQueueSubmit(present_queue, 1, &submit_info, nullFence);
  1540. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1541. }
  1542. // If we are using separate queues, we have to wait for image ownership,
  1543. // otherwise wait for draw complete
  1544. VkPresentInfoKHR present = {
  1545. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  1546. /*pNext*/ nullptr,
  1547. /*waitSemaphoreCount*/ 1,
  1548. /*pWaitSemaphores*/ (separate_present_queue) ? &image_ownership_semaphores[frame_index] : &draw_complete_semaphores[frame_index],
  1549. /*swapchainCount*/ 0,
  1550. /*pSwapchain*/ nullptr,
  1551. /*pImageIndices*/ nullptr,
  1552. /*pResults*/ nullptr,
  1553. };
  1554. VkSwapchainKHR *pSwapchains = (VkSwapchainKHR *)alloca(sizeof(VkSwapchainKHR *) * windows.size());
  1555. uint32_t *pImageIndices = (uint32_t *)alloca(sizeof(uint32_t *) * windows.size());
  1556. present.pSwapchains = pSwapchains;
  1557. present.pImageIndices = pImageIndices;
  1558. for (Map<int, Window>::Element *E = windows.front(); E; E = E->next()) {
  1559. Window *w = &E->get();
  1560. if (w->swapchain == VK_NULL_HANDLE) {
  1561. continue;
  1562. }
  1563. pSwapchains[present.swapchainCount] = w->swapchain;
  1564. pImageIndices[present.swapchainCount] = w->current_buffer;
  1565. present.swapchainCount++;
  1566. }
  1567. #if 0
  1568. if (VK_KHR_incremental_present_enabled) {
  1569. // If using VK_KHR_incremental_present, we provide a hint of the region
  1570. // that contains changed content relative to the previously-presented
  1571. // image. The implementation can use this hint in order to save
  1572. // work/power (by only copying the region in the hint). The
  1573. // implementation is free to ignore the hint though, and so we must
  1574. // ensure that the entire image has the correctly-drawn content.
  1575. uint32_t eighthOfWidth = width / 8;
  1576. uint32_t eighthOfHeight = height / 8;
  1577. VkRectLayerKHR rect = {
  1578. /*offset.x*/ eighthOfWidth,
  1579. /*offset.y*/ eighthOfHeight,
  1580. /*extent.width*/ eighthOfWidth * 6,
  1581. /*extent.height*/ eighthOfHeight * 6,
  1582. /*layer*/ 0,
  1583. };
  1584. VkPresentRegionKHR region = {
  1585. /*rectangleCount*/ 1,
  1586. /*pRectangles*/ &rect,
  1587. };
  1588. VkPresentRegionsKHR regions = {
  1589. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
  1590. /*pNext*/ present.pNext,
  1591. /*swapchainCount*/ present.swapchainCount,
  1592. /*pRegions*/ &region,
  1593. };
  1594. present.pNext = &regions;
  1595. }
  1596. #endif
  1597. #if 0
  1598. if (VK_GOOGLE_display_timing_enabled) {
  1599. VkPresentTimeGOOGLE ptime;
  1600. if (prev_desired_present_time == 0) {
  1601. // This must be the first present for this swapchain.
  1602. //
  1603. // We don't know where we are relative to the presentation engine's
  1604. // display's refresh cycle. We also don't know how long rendering
  1605. // takes. Let's make a grossly-simplified assumption that the
  1606. // desiredPresentTime should be half way between now and
  1607. // now+target_IPD. We will adjust over time.
  1608. uint64_t curtime = getTimeInNanoseconds();
  1609. if (curtime == 0) {
  1610. // Since we didn't find out the current time, don't give a
  1611. // desiredPresentTime:
  1612. ptime.desiredPresentTime = 0;
  1613. } else {
  1614. ptime.desiredPresentTime = curtime + (target_IPD >> 1);
  1615. }
  1616. } else {
  1617. ptime.desiredPresentTime = (prev_desired_present_time + target_IPD);
  1618. }
  1619. ptime.presentID = next_present_id++;
  1620. prev_desired_present_time = ptime.desiredPresentTime;
  1621. VkPresentTimesInfoGOOGLE present_time = {
  1622. /*sType*/ VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
  1623. /*pNext*/ present.pNext,
  1624. /*swapchainCount*/ present.swapchainCount,
  1625. /*pTimes*/ &ptime,
  1626. };
  1627. if (VK_GOOGLE_display_timing_enabled) {
  1628. present.pNext = &present_time;
  1629. }
  1630. }
  1631. #endif
  1632. static int total_frames = 0;
  1633. total_frames++;
  1634. // print_line("current buffer: " + itos(current_buffer));
  1635. err = fpQueuePresentKHR(present_queue, &present);
  1636. frame_index += 1;
  1637. frame_index %= FRAME_LAG;
  1638. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  1639. // swapchain is out of date (e.g. the window was resized) and
  1640. // must be recreated:
  1641. print_verbose("Vulkan: Swapchain is out of date, recreating.");
  1642. resize_notify();
  1643. } else if (err == VK_SUBOPTIMAL_KHR) {
  1644. // swapchain is not as optimal as it could be, but the platform's
  1645. // presentation engine will still present the image correctly.
  1646. print_verbose("Vulkan: Swapchain is suboptimal.");
  1647. } else {
  1648. ERR_FAIL_COND_V(err, ERR_CANT_CREATE);
  1649. }
  1650. buffers_prepared = false;
  1651. return OK;
  1652. }
  1653. void VulkanContext::resize_notify() {
  1654. }
  1655. VkDevice VulkanContext::get_device() {
  1656. return device;
  1657. }
  1658. VkPhysicalDevice VulkanContext::get_physical_device() {
  1659. return gpu;
  1660. }
  1661. int VulkanContext::get_swapchain_image_count() const {
  1662. return swapchainImageCount;
  1663. }
  1664. uint32_t VulkanContext::get_graphics_queue() const {
  1665. return graphics_queue_family_index;
  1666. }
  1667. VkFormat VulkanContext::get_screen_format() const {
  1668. return format;
  1669. }
  1670. VkPhysicalDeviceLimits VulkanContext::get_device_limits() const {
  1671. return gpu_props.limits;
  1672. }
  1673. RID VulkanContext::local_device_create() {
  1674. LocalDevice ld;
  1675. { //create device
  1676. VkResult err;
  1677. float queue_priorities[1] = { 0.0 };
  1678. VkDeviceQueueCreateInfo queues[2];
  1679. queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  1680. queues[0].pNext = nullptr;
  1681. queues[0].queueFamilyIndex = graphics_queue_family_index;
  1682. queues[0].queueCount = 1;
  1683. queues[0].pQueuePriorities = queue_priorities;
  1684. queues[0].flags = 0;
  1685. VkDeviceCreateInfo sdevice = {
  1686. /*sType =*/VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1687. /*pNext */ nullptr,
  1688. /*flags */ 0,
  1689. /*queueCreateInfoCount */ 1,
  1690. /*pQueueCreateInfos */ queues,
  1691. /*enabledLayerCount */ 0,
  1692. /*ppEnabledLayerNames */ nullptr,
  1693. /*enabledExtensionCount */ enabled_extension_count,
  1694. /*ppEnabledExtensionNames */ (const char *const *)extension_names,
  1695. /*pEnabledFeatures */ &physical_device_features, // If specific features are required, pass them in here
  1696. };
  1697. err = vkCreateDevice(gpu, &sdevice, nullptr, &ld.device);
  1698. ERR_FAIL_COND_V(err, RID());
  1699. }
  1700. { //create graphics queue
  1701. vkGetDeviceQueue(ld.device, graphics_queue_family_index, 0, &ld.queue);
  1702. }
  1703. return local_device_owner.make_rid(ld);
  1704. }
  1705. VkDevice VulkanContext::local_device_get_vk_device(RID p_local_device) {
  1706. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1707. return ld->device;
  1708. }
  1709. void VulkanContext::local_device_push_command_buffers(RID p_local_device, const VkCommandBuffer *p_buffers, int p_count) {
  1710. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1711. ERR_FAIL_COND(ld->waiting);
  1712. VkSubmitInfo submit_info;
  1713. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1714. submit_info.pNext = nullptr;
  1715. submit_info.pWaitDstStageMask = nullptr;
  1716. submit_info.waitSemaphoreCount = 0;
  1717. submit_info.pWaitSemaphores = nullptr;
  1718. submit_info.commandBufferCount = p_count;
  1719. submit_info.pCommandBuffers = p_buffers;
  1720. submit_info.signalSemaphoreCount = 0;
  1721. submit_info.pSignalSemaphores = nullptr;
  1722. VkResult err = vkQueueSubmit(ld->queue, 1, &submit_info, VK_NULL_HANDLE);
  1723. if (err == VK_ERROR_OUT_OF_HOST_MEMORY) {
  1724. print_line("Vulkan: Out of host memory!");
  1725. }
  1726. if (err == VK_ERROR_OUT_OF_DEVICE_MEMORY) {
  1727. print_line("Vulkan: Out of device memory!");
  1728. }
  1729. if (err == VK_ERROR_DEVICE_LOST) {
  1730. print_line("Vulkan: Device lost!");
  1731. }
  1732. ERR_FAIL_COND(err);
  1733. ld->waiting = true;
  1734. }
  1735. void VulkanContext::local_device_sync(RID p_local_device) {
  1736. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1737. ERR_FAIL_COND(!ld->waiting);
  1738. vkDeviceWaitIdle(ld->device);
  1739. ld->waiting = false;
  1740. }
  1741. void VulkanContext::local_device_free(RID p_local_device) {
  1742. LocalDevice *ld = local_device_owner.getornull(p_local_device);
  1743. vkDestroyDevice(ld->device, nullptr);
  1744. local_device_owner.free(p_local_device);
  1745. }
  1746. void VulkanContext::command_begin_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1747. if (!enabled_debug_utils) {
  1748. return;
  1749. }
  1750. CharString cs = p_label_name.utf8().get_data();
  1751. VkDebugUtilsLabelEXT label;
  1752. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1753. label.pNext = nullptr;
  1754. label.pLabelName = cs.get_data();
  1755. label.color[0] = p_color[0];
  1756. label.color[1] = p_color[1];
  1757. label.color[2] = p_color[2];
  1758. label.color[3] = p_color[3];
  1759. CmdBeginDebugUtilsLabelEXT(p_command_buffer, &label);
  1760. }
  1761. void VulkanContext::command_insert_label(VkCommandBuffer p_command_buffer, String p_label_name, const Color p_color) {
  1762. if (!enabled_debug_utils) {
  1763. return;
  1764. }
  1765. CharString cs = p_label_name.utf8().get_data();
  1766. VkDebugUtilsLabelEXT label;
  1767. label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  1768. label.pNext = nullptr;
  1769. label.pLabelName = cs.get_data();
  1770. label.color[0] = p_color[0];
  1771. label.color[1] = p_color[1];
  1772. label.color[2] = p_color[2];
  1773. label.color[3] = p_color[3];
  1774. CmdInsertDebugUtilsLabelEXT(p_command_buffer, &label);
  1775. }
  1776. void VulkanContext::command_end_label(VkCommandBuffer p_command_buffer) {
  1777. if (!enabled_debug_utils) {
  1778. return;
  1779. }
  1780. CmdEndDebugUtilsLabelEXT(p_command_buffer);
  1781. }
  1782. void VulkanContext::set_object_name(VkObjectType p_object_type, uint64_t p_object_handle, String p_object_name) {
  1783. if (!enabled_debug_utils) {
  1784. return;
  1785. }
  1786. CharString obj_data = p_object_name.utf8();
  1787. VkDebugUtilsObjectNameInfoEXT name_info;
  1788. name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  1789. name_info.pNext = nullptr;
  1790. name_info.objectType = p_object_type;
  1791. name_info.objectHandle = p_object_handle;
  1792. name_info.pObjectName = obj_data.get_data();
  1793. SetDebugUtilsObjectNameEXT(device, &name_info);
  1794. }
  1795. String VulkanContext::get_device_vendor_name() const {
  1796. return device_vendor;
  1797. }
  1798. String VulkanContext::get_device_name() const {
  1799. return device_name;
  1800. }
  1801. String VulkanContext::get_device_pipeline_cache_uuid() const {
  1802. return pipeline_cache_id;
  1803. }
  1804. VulkanContext::VulkanContext() {
  1805. command_buffer_queue.resize(1); // First one is always the setup command.
  1806. command_buffer_queue.write[0] = nullptr;
  1807. }
  1808. VulkanContext::~VulkanContext() {
  1809. if (queue_props) {
  1810. free(queue_props);
  1811. }
  1812. if (device_initialized) {
  1813. for (uint32_t i = 0; i < FRAME_LAG; i++) {
  1814. vkDestroyFence(device, fences[i], nullptr);
  1815. vkDestroySemaphore(device, image_acquired_semaphores[i], nullptr);
  1816. vkDestroySemaphore(device, draw_complete_semaphores[i], nullptr);
  1817. if (separate_present_queue) {
  1818. vkDestroySemaphore(device, image_ownership_semaphores[i], nullptr);
  1819. }
  1820. }
  1821. if (inst_initialized && enabled_debug_utils) {
  1822. DestroyDebugUtilsMessengerEXT(inst, dbg_messenger, nullptr);
  1823. }
  1824. if (inst_initialized && dbg_debug_report != VK_NULL_HANDLE) {
  1825. DestroyDebugReportCallbackEXT(inst, dbg_debug_report, nullptr);
  1826. }
  1827. vkDestroyDevice(device, nullptr);
  1828. }
  1829. if (inst_initialized) {
  1830. vkDestroyInstance(inst, nullptr);
  1831. }
  1832. }