rendering_context_driver_vulkan.cpp 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042
  1. /**************************************************************************/
  2. /* rendering_context_driver_vulkan.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #ifdef VULKAN_ENABLED
  31. #include "rendering_context_driver_vulkan.h"
  32. #include "vk_enum_string_helper.h"
  33. #include "core/config/project_settings.h"
  34. #include "core/version.h"
  35. #include "rendering_device_driver_vulkan.h"
  36. #include "vulkan_hooks.h"
  37. #if defined(VK_TRACK_DRIVER_MEMORY)
  38. /*************************************************/
  39. // Driver memory tracking
  40. /*************************************************/
  41. // Total driver memory and allocation amount.
  42. SafeNumeric<size_t> driver_memory_total_memory;
  43. SafeNumeric<size_t> driver_memory_total_alloc_count;
  44. // Amount of driver memory for every object type.
  45. SafeNumeric<size_t> driver_memory_tracker[RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_COUNT][RenderingContextDriverVulkan::VK_TRACKED_SYSTEM_ALLOCATION_SCOPE_COUNT];
  46. // Amount of allocations for every object type.
  47. SafeNumeric<uint32_t> driver_memory_allocation_count[RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_COUNT][RenderingContextDriverVulkan::VK_TRACKED_SYSTEM_ALLOCATION_SCOPE_COUNT];
  48. #endif
  49. #if defined(VK_TRACK_DEVICE_MEMORY)
  50. /*************************************************/
  51. // Device memory report
  52. /*************************************************/
  53. // Total device memory and allocation amount.
  54. HashMap<uint64_t, size_t> memory_report_table;
  55. // Total memory and allocation amount.
  56. SafeNumeric<uint64_t> memory_report_total_memory;
  57. SafeNumeric<uint64_t> memory_report_total_alloc_count;
  58. // Amount of device memory for every object type.
  59. SafeNumeric<size_t> memory_report_mem_usage[RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_COUNT];
  60. // Amount of device memory allocations for every object type.
  61. SafeNumeric<size_t> memory_report_allocation_count[RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_COUNT];
  62. #endif
  63. const char *RenderingContextDriverVulkan::get_tracked_object_name(uint32_t p_type_index) const {
  64. #if defined(VK_TRACK_DRIVER_MEMORY) || defined(VK_TRACK_DEVICE_MEMORY)
  65. static constexpr const char *vkTrackedObjectTypeNames[] = { "UNKNOWN",
  66. "INSTANCE",
  67. "PHYSICAL_DEVICE",
  68. "DEVICE",
  69. "QUEUE",
  70. "SEMAPHORE",
  71. "COMMAND_BUFFER",
  72. "FENCE",
  73. "DEVICE_MEMORY",
  74. "BUFFER",
  75. "IMAGE",
  76. "EVENT",
  77. "QUERY_POOL",
  78. "BUFFER_VIEW",
  79. "IMAGE_VIEW",
  80. "SHADER_MODULE",
  81. "PIPELINE_CACHE",
  82. "PIPELINE_LAYOUT",
  83. "RENDER_PASS",
  84. "PIPELINE",
  85. "DESCRIPTOR_SET_LAYOUT",
  86. "SAMPLER",
  87. "DESCRIPTOR_POOL",
  88. "DESCRIPTOR_SET",
  89. "FRAMEBUFFER",
  90. "COMMAND_POOL",
  91. "DESCRIPTOR_UPDATE_TEMPLATE_KHR",
  92. "SURFACE_KHR",
  93. "SWAPCHAIN_KHR",
  94. "DEBUG_UTILS_MESSENGER_EXT",
  95. "DEBUG_REPORT_CALLBACK_EXT",
  96. "ACCELERATION_STRUCTURE",
  97. "VMA_BUFFER_OR_IMAGE" };
  98. return vkTrackedObjectTypeNames[p_type_index];
  99. #else
  100. return "VK_TRACK_DRIVER_* disabled at build time";
  101. #endif
  102. }
  103. #if defined(VK_TRACK_DRIVER_MEMORY) || defined(VK_TRACK_DEVICE_MEMORY)
  104. uint64_t RenderingContextDriverVulkan::get_tracked_object_type_count() const {
  105. return VK_TRACKED_OBJECT_TYPE_COUNT;
  106. }
  107. #endif
  108. #if defined(VK_TRACK_DRIVER_MEMORY) || defined(VK_TRACK_DEVICE_MEMORY)
  109. RenderingContextDriverVulkan::VkTrackedObjectType vk_object_to_tracked_object(VkObjectType p_type) {
  110. if (p_type > VK_OBJECT_TYPE_COMMAND_POOL && p_type != (VkObjectType)RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_VMA) {
  111. switch (p_type) {
  112. case VK_OBJECT_TYPE_SURFACE_KHR:
  113. return RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_SURFACE;
  114. case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
  115. return RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_SWAPCHAIN;
  116. case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
  117. return RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT;
  118. case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
  119. return RenderingContextDriverVulkan::VK_TRACKED_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
  120. default:
  121. _err_print_error(FUNCTION_STR, __FILE__, __LINE__, "Unknown VkObjectType enum value " + itos((uint32_t)p_type) + ".Please add it to VkTrackedObjectType, switch statement in "
  122. "vk_object_to_tracked_object and get_tracked_object_name.",
  123. (int)p_type);
  124. return (RenderingContextDriverVulkan::VkTrackedObjectType)VK_OBJECT_TYPE_UNKNOWN;
  125. }
  126. }
  127. return (RenderingContextDriverVulkan::VkTrackedObjectType)p_type;
  128. }
  129. #endif
  130. #if defined(VK_TRACK_DEVICE_MEMORY)
  131. uint64_t RenderingContextDriverVulkan::get_device_total_memory() const {
  132. return memory_report_total_memory.get();
  133. }
  134. uint64_t RenderingContextDriverVulkan::get_device_allocation_count() const {
  135. return memory_report_total_alloc_count.get();
  136. }
  137. uint64_t RenderingContextDriverVulkan::get_device_memory_by_object_type(uint32_t p_type) const {
  138. return memory_report_mem_usage[p_type].get();
  139. }
  140. uint64_t RenderingContextDriverVulkan::get_device_allocs_by_object_type(uint32_t p_type) const {
  141. return memory_report_allocation_count[p_type].get();
  142. }
  143. #endif
  144. #if defined(VK_TRACK_DRIVER_MEMORY)
  145. uint64_t RenderingContextDriverVulkan::get_driver_total_memory() const {
  146. return driver_memory_total_memory.get();
  147. }
  148. uint64_t RenderingContextDriverVulkan::get_driver_allocation_count() const {
  149. return driver_memory_total_alloc_count.get();
  150. }
  151. uint64_t RenderingContextDriverVulkan::get_driver_memory_by_object_type(uint32_t p_type) const {
  152. uint64_t ret = 0;
  153. for (uint32_t i = 0; i < VK_TRACKED_SYSTEM_ALLOCATION_SCOPE_COUNT; i++) {
  154. ret += driver_memory_tracker[p_type][i].get();
  155. }
  156. return ret;
  157. }
  158. uint64_t RenderingContextDriverVulkan::get_driver_allocs_by_object_type(uint32_t p_type) const {
  159. uint64_t ret = 0;
  160. for (uint32_t i = 0; i < VK_TRACKED_SYSTEM_ALLOCATION_SCOPE_COUNT; i++) {
  161. ret += driver_memory_allocation_count[p_type][i].get();
  162. }
  163. return ret;
  164. }
  165. #endif
  166. #if defined(VK_TRACK_DEVICE_MEMORY)
  167. void RenderingContextDriverVulkan::memory_report_callback(const VkDeviceMemoryReportCallbackDataEXT *p_callback_data, void *p_user_data) {
  168. if (!p_callback_data) {
  169. return;
  170. }
  171. const RenderingContextDriverVulkan::VkTrackedObjectType obj_type = vk_object_to_tracked_object(p_callback_data->objectType);
  172. uint64_t obj_id = p_callback_data->memoryObjectId;
  173. if (p_callback_data->type == VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT) {
  174. // Realloc, update size
  175. if (memory_report_table.has(obj_id)) {
  176. memory_report_total_memory.sub(memory_report_table[obj_id]);
  177. memory_report_mem_usage[obj_type].sub(memory_report_table[obj_id]);
  178. memory_report_total_memory.add(p_callback_data->size);
  179. memory_report_mem_usage[obj_type].add(p_callback_data->size);
  180. memory_report_table[p_callback_data->memoryObjectId] = p_callback_data->size;
  181. } else {
  182. memory_report_table[obj_id] = p_callback_data->size;
  183. memory_report_total_alloc_count.increment();
  184. memory_report_allocation_count[obj_type].increment();
  185. memory_report_mem_usage[obj_type].add(p_callback_data->size);
  186. memory_report_total_memory.add(p_callback_data->size);
  187. }
  188. } else if (p_callback_data->type == VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT) {
  189. if (memory_report_table.has(obj_id)) {
  190. memory_report_total_alloc_count.decrement();
  191. memory_report_allocation_count[obj_type].decrement();
  192. memory_report_mem_usage[obj_type].sub(p_callback_data->size);
  193. memory_report_total_memory.sub(p_callback_data->size);
  194. memory_report_table.remove(memory_report_table.find(obj_id));
  195. }
  196. }
  197. }
  198. #endif
  199. VkAllocationCallbacks *RenderingContextDriverVulkan::get_allocation_callbacks(VkObjectType p_type) {
  200. #if !defined(VK_TRACK_DRIVER_MEMORY)
  201. return nullptr;
  202. #else
  203. struct TrackedMemHeader {
  204. size_t size;
  205. VkSystemAllocationScope allocation_scope;
  206. VkTrackedObjectType type;
  207. };
  208. VkAllocationCallbacks tracking_callbacks = {
  209. // Allocation function
  210. nullptr,
  211. [](
  212. void *p_user_data,
  213. size_t size,
  214. size_t alignment,
  215. VkSystemAllocationScope allocation_scope) -> void * {
  216. static constexpr size_t tracking_data_size = 32;
  217. VkTrackedObjectType type = static_cast<VkTrackedObjectType>(*reinterpret_cast<VkTrackedObjectType *>(p_user_data));
  218. driver_memory_total_memory.add(size);
  219. driver_memory_total_alloc_count.increment();
  220. driver_memory_tracker[type][allocation_scope].add(size);
  221. driver_memory_allocation_count[type][allocation_scope].increment();
  222. alignment = MAX(alignment, tracking_data_size);
  223. uint8_t *ret = reinterpret_cast<uint8_t *>(Memory::alloc_aligned_static(size + alignment, alignment));
  224. if (ret == nullptr) {
  225. return nullptr;
  226. }
  227. // Track allocation
  228. TrackedMemHeader *header = reinterpret_cast<TrackedMemHeader *>(ret);
  229. header->size = size;
  230. header->allocation_scope = allocation_scope;
  231. header->type = type;
  232. *reinterpret_cast<size_t *>(ret + alignment - sizeof(size_t)) = alignment;
  233. // Return first available chunk of memory
  234. return ret + alignment;
  235. },
  236. // Reallocation function
  237. [](
  238. void *p_user_data,
  239. void *p_original,
  240. size_t size,
  241. size_t alignment,
  242. VkSystemAllocationScope allocation_scope) -> void * {
  243. if (p_original == nullptr) {
  244. VkObjectType type = static_cast<VkObjectType>(*reinterpret_cast<uint32_t *>(p_user_data));
  245. return get_allocation_callbacks(type)->pfnAllocation(p_user_data, size, alignment, allocation_scope);
  246. }
  247. uint8_t *mem = reinterpret_cast<uint8_t *>(p_original);
  248. // Retrieve alignment
  249. alignment = *reinterpret_cast<size_t *>(mem - sizeof(size_t));
  250. // Retrieve allocation data
  251. TrackedMemHeader *header = reinterpret_cast<TrackedMemHeader *>(mem - alignment);
  252. // Update allocation size
  253. driver_memory_total_memory.sub(header->size);
  254. driver_memory_total_memory.add(size);
  255. driver_memory_tracker[header->type][header->allocation_scope].sub(header->size);
  256. driver_memory_tracker[header->type][header->allocation_scope].add(size);
  257. uint8_t *ret = reinterpret_cast<uint8_t *>(Memory::realloc_aligned_static(header, size + alignment, header->size + alignment, alignment));
  258. if (ret == nullptr) {
  259. return nullptr;
  260. }
  261. // Update tracker
  262. header = reinterpret_cast<TrackedMemHeader *>(ret);
  263. header->size = size;
  264. return ret + alignment;
  265. },
  266. // Free function
  267. [](
  268. void *p_user_data,
  269. void *p_memory) {
  270. if (!p_memory) {
  271. return;
  272. }
  273. uint8_t *mem = reinterpret_cast<uint8_t *>(p_memory);
  274. size_t alignment = *reinterpret_cast<size_t *>(mem - sizeof(size_t));
  275. TrackedMemHeader *header = reinterpret_cast<TrackedMemHeader *>(mem - alignment);
  276. driver_memory_total_alloc_count.decrement();
  277. driver_memory_total_memory.sub(header->size);
  278. driver_memory_tracker[header->type][header->allocation_scope].sub(header->size);
  279. driver_memory_allocation_count[header->type][header->allocation_scope].decrement();
  280. Memory::free_aligned_static(header);
  281. },
  282. // Internal allocation / deallocation. We don't track them as they cannot really be controlled or optimized by the programmer.
  283. [](
  284. void *p_user_data,
  285. size_t size,
  286. VkInternalAllocationType allocation_type,
  287. VkSystemAllocationScope allocation_scope) {
  288. },
  289. [](
  290. void *p_user_data,
  291. size_t size,
  292. VkInternalAllocationType allocation_type,
  293. VkSystemAllocationScope allocation_scope) {
  294. },
  295. };
  296. // Create a callback per object type
  297. static VkAllocationCallbacks object_callbacks[VK_TRACKED_OBJECT_TYPE_COUNT] = {};
  298. static uint32_t object_user_data[VK_TRACKED_OBJECT_TYPE_COUNT] = {};
  299. // Only build the first time
  300. if (!object_callbacks[0].pfnAllocation) {
  301. for (uint32_t c = 0; c < VK_TRACKED_OBJECT_TYPE_COUNT; ++c) {
  302. object_callbacks[c] = tracking_callbacks;
  303. object_user_data[c] = c;
  304. object_callbacks[c].pUserData = &object_user_data[c];
  305. for (uint32_t i = 0; i < VK_TRACKED_SYSTEM_ALLOCATION_SCOPE_COUNT; i++) {
  306. driver_memory_tracker[c][i].set(0);
  307. driver_memory_allocation_count[c][i].set(0);
  308. }
  309. }
  310. }
  311. uint32_t type_index = vk_object_to_tracked_object(p_type);
  312. return &object_callbacks[type_index];
  313. #endif
  314. }
  315. RenderingContextDriverVulkan::RenderingContextDriverVulkan() {
  316. // Empty constructor.
  317. }
  318. RenderingContextDriverVulkan::~RenderingContextDriverVulkan() {
  319. if (debug_messenger != VK_NULL_HANDLE && functions.DestroyDebugUtilsMessengerEXT != nullptr) {
  320. functions.DestroyDebugUtilsMessengerEXT(instance, debug_messenger, get_allocation_callbacks(VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT));
  321. }
  322. if (debug_report != VK_NULL_HANDLE && functions.DestroyDebugReportCallbackEXT != nullptr) {
  323. functions.DestroyDebugReportCallbackEXT(instance, debug_report, get_allocation_callbacks(VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT));
  324. }
  325. if (instance != VK_NULL_HANDLE) {
  326. vkDestroyInstance(instance, get_allocation_callbacks(VK_OBJECT_TYPE_INSTANCE));
  327. }
  328. }
  329. Error RenderingContextDriverVulkan::_initialize_vulkan_version() {
  330. // https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkApplicationInfo.html#_description
  331. // For Vulkan 1.0 vkEnumerateInstanceVersion is not available, including not in the loader we compile against on Android.
  332. typedef VkResult(VKAPI_PTR * _vkEnumerateInstanceVersion)(uint32_t *);
  333. _vkEnumerateInstanceVersion func = (_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
  334. if (func != nullptr) {
  335. uint32_t api_version;
  336. VkResult res = func(&api_version);
  337. if (res == VK_SUCCESS) {
  338. instance_api_version = api_version;
  339. } else {
  340. // According to the documentation this shouldn't fail with anything except a memory allocation error
  341. // in which case we're in deep trouble anyway.
  342. ERR_FAIL_V(ERR_CANT_CREATE);
  343. }
  344. } else {
  345. print_line("vkEnumerateInstanceVersion not available, assuming Vulkan 1.0.");
  346. instance_api_version = VK_API_VERSION_1_0;
  347. }
  348. return OK;
  349. }
  350. void RenderingContextDriverVulkan::_register_requested_instance_extension(const CharString &p_extension_name, bool p_required) {
  351. ERR_FAIL_COND(requested_instance_extensions.has(p_extension_name));
  352. requested_instance_extensions[p_extension_name] = p_required;
  353. }
  354. Error RenderingContextDriverVulkan::_initialize_instance_extensions() {
  355. enabled_instance_extension_names.clear();
  356. // The surface extension and the platform-specific surface extension are core requirements.
  357. _register_requested_instance_extension(VK_KHR_SURFACE_EXTENSION_NAME, true);
  358. if (_get_platform_surface_extension()) {
  359. _register_requested_instance_extension(_get_platform_surface_extension(), true);
  360. }
  361. if (_use_validation_layers()) {
  362. _register_requested_instance_extension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, false);
  363. }
  364. // This extension allows us to use the properties2 features to query additional device capabilities.
  365. _register_requested_instance_extension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, false);
  366. #if defined(USE_VOLK) && (defined(MACOS_ENABLED) || defined(IOS_ENABLED))
  367. _register_requested_instance_extension(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME, true);
  368. #endif
  369. // Only enable debug utils in verbose mode or DEV_ENABLED.
  370. // End users would get spammed with messages of varying verbosity due to the
  371. // mess that thirdparty layers/extensions and drivers seem to leave in their
  372. // wake, making the Windows registry a bottomless pit of broken layer JSON.
  373. #ifdef DEV_ENABLED
  374. bool want_debug_utils = true;
  375. #else
  376. bool want_debug_utils = OS::get_singleton()->is_stdout_verbose();
  377. #endif
  378. if (want_debug_utils) {
  379. _register_requested_instance_extension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, false);
  380. }
  381. // Load instance extensions that are available.
  382. uint32_t instance_extension_count = 0;
  383. VkResult err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr);
  384. ERR_FAIL_COND_V(err != VK_SUCCESS && err != VK_INCOMPLETE, ERR_CANT_CREATE);
  385. ERR_FAIL_COND_V_MSG(instance_extension_count == 0, ERR_CANT_CREATE, "No instance extensions were found.");
  386. TightLocalVector<VkExtensionProperties> instance_extensions;
  387. instance_extensions.resize(instance_extension_count);
  388. err = vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions.ptr());
  389. if (err != VK_SUCCESS && err != VK_INCOMPLETE) {
  390. ERR_FAIL_V(ERR_CANT_CREATE);
  391. }
  392. #ifdef DEV_ENABLED
  393. for (uint32_t i = 0; i < instance_extension_count; i++) {
  394. print_verbose(String("VULKAN: Found instance extension ") + String::utf8(instance_extensions[i].extensionName) + String("."));
  395. }
  396. #endif
  397. // Enable all extensions that are supported and requested.
  398. for (uint32_t i = 0; i < instance_extension_count; i++) {
  399. CharString extension_name(instance_extensions[i].extensionName);
  400. if (requested_instance_extensions.has(extension_name)) {
  401. enabled_instance_extension_names.insert(extension_name);
  402. }
  403. }
  404. // Now check our requested extensions.
  405. for (KeyValue<CharString, bool> &requested_extension : requested_instance_extensions) {
  406. if (!enabled_instance_extension_names.has(requested_extension.key)) {
  407. if (requested_extension.value) {
  408. ERR_FAIL_V_MSG(ERR_BUG, String("Required extension ") + String::utf8(requested_extension.key) + String(" not found."));
  409. } else {
  410. print_verbose(String("Optional extension ") + String::utf8(requested_extension.key) + String(" not found."));
  411. }
  412. }
  413. }
  414. return OK;
  415. }
  416. Error RenderingContextDriverVulkan::_find_validation_layers(TightLocalVector<const char *> &r_layer_names) const {
  417. r_layer_names.clear();
  418. uint32_t instance_layer_count = 0;
  419. VkResult err = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
  420. ERR_FAIL_COND_V(err != VK_SUCCESS, ERR_CANT_CREATE);
  421. if (instance_layer_count > 0) {
  422. TightLocalVector<VkLayerProperties> layer_properties;
  423. layer_properties.resize(instance_layer_count);
  424. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, layer_properties.ptr());
  425. ERR_FAIL_COND_V(err != VK_SUCCESS, ERR_CANT_CREATE);
  426. // Preferred set of validation layers.
  427. const std::initializer_list<const char *> preferred = { "VK_LAYER_KHRONOS_validation" };
  428. // Alternative (deprecated, removed in SDK 1.1.126.0) set of validation layers.
  429. const std::initializer_list<const char *> lunarg = { "VK_LAYER_LUNARG_standard_validation" };
  430. // Alternative (deprecated, removed in SDK 1.1.121.1) set of validation layers.
  431. const std::initializer_list<const char *> google = { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" };
  432. // Verify all the layers of the list are present.
  433. for (const std::initializer_list<const char *> &list : { preferred, lunarg, google }) {
  434. bool layers_found = false;
  435. for (const char *layer_name : list) {
  436. layers_found = false;
  437. for (const VkLayerProperties &properties : layer_properties) {
  438. if (!strcmp(properties.layerName, layer_name)) {
  439. layers_found = true;
  440. break;
  441. }
  442. }
  443. if (!layers_found) {
  444. break;
  445. }
  446. }
  447. if (layers_found) {
  448. r_layer_names.reserve(list.size());
  449. for (const char *layer_name : list) {
  450. r_layer_names.push_back(layer_name);
  451. }
  452. break;
  453. }
  454. }
  455. }
  456. return OK;
  457. }
  458. VKAPI_ATTR VkBool32 VKAPI_CALL RenderingContextDriverVulkan::_debug_messenger_callback(VkDebugUtilsMessageSeverityFlagBitsEXT p_message_severity, VkDebugUtilsMessageTypeFlagsEXT p_message_type, const VkDebugUtilsMessengerCallbackDataEXT *p_callback_data, void *p_user_data) {
  459. // This error needs to be ignored because the AMD allocator will mix up memory types on IGP processors.
  460. if (strstr(p_callback_data->pMessage, "Mapping an image with layout") != nullptr && strstr(p_callback_data->pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr) {
  461. return VK_FALSE;
  462. }
  463. // This needs to be ignored because Validator is wrong here.
  464. if (strstr(p_callback_data->pMessage, "Invalid SPIR-V binary version 1.3") != nullptr) {
  465. return VK_FALSE;
  466. }
  467. // This needs to be ignored because Validator is wrong here.
  468. if (strstr(p_callback_data->pMessage, "Shader requires flag") != nullptr) {
  469. return VK_FALSE;
  470. }
  471. // This needs to be ignored because Validator is wrong here.
  472. if (strstr(p_callback_data->pMessage, "SPIR-V module not valid: Pointer operand") != nullptr && strstr(p_callback_data->pMessage, "must be a memory object") != nullptr) {
  473. return VK_FALSE;
  474. }
  475. if (p_callback_data->pMessageIdName && strstr(p_callback_data->pMessageIdName, "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw") != nullptr) {
  476. return VK_FALSE;
  477. }
  478. String type_string;
  479. switch (p_message_type) {
  480. case (VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT):
  481. type_string = "GENERAL";
  482. break;
  483. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT):
  484. type_string = "VALIDATION";
  485. break;
  486. case (VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  487. type_string = "PERFORMANCE";
  488. break;
  489. case (VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT):
  490. type_string = "VALIDATION|PERFORMANCE";
  491. break;
  492. }
  493. String objects_string;
  494. if (p_callback_data->objectCount > 0) {
  495. objects_string = "\n\tObjects - " + String::num_int64(p_callback_data->objectCount);
  496. for (uint32_t object = 0; object < p_callback_data->objectCount; ++object) {
  497. objects_string +=
  498. "\n\t\tObject[" + String::num_int64(object) + "]" +
  499. " - " + string_VkObjectType(p_callback_data->pObjects[object].objectType) +
  500. ", Handle " + String::num_int64(p_callback_data->pObjects[object].objectHandle);
  501. if (p_callback_data->pObjects[object].pObjectName != nullptr && strlen(p_callback_data->pObjects[object].pObjectName) > 0) {
  502. objects_string += ", Name \"" + String(p_callback_data->pObjects[object].pObjectName) + "\"";
  503. }
  504. }
  505. }
  506. String labels_string;
  507. if (p_callback_data->cmdBufLabelCount > 0) {
  508. labels_string = "\n\tCommand Buffer Labels - " + String::num_int64(p_callback_data->cmdBufLabelCount);
  509. for (uint32_t cmd_buf_label = 0; cmd_buf_label < p_callback_data->cmdBufLabelCount; ++cmd_buf_label) {
  510. labels_string +=
  511. "\n\t\tLabel[" + String::num_int64(cmd_buf_label) + "]" +
  512. " - " + p_callback_data->pCmdBufLabels[cmd_buf_label].pLabelName +
  513. "{ ";
  514. for (int color_idx = 0; color_idx < 4; ++color_idx) {
  515. labels_string += String::num(p_callback_data->pCmdBufLabels[cmd_buf_label].color[color_idx]);
  516. if (color_idx < 3) {
  517. labels_string += ", ";
  518. }
  519. }
  520. labels_string += " }";
  521. }
  522. }
  523. String error_message(type_string +
  524. " - Message Id Number: " + String::num_int64(p_callback_data->messageIdNumber) +
  525. " | Message Id Name: " + p_callback_data->pMessageIdName +
  526. "\n\t" + p_callback_data->pMessage +
  527. objects_string + labels_string);
  528. // Convert VK severity to our own log macros.
  529. switch (p_message_severity) {
  530. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  531. print_verbose(error_message);
  532. break;
  533. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  534. print_line(error_message);
  535. break;
  536. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  537. WARN_PRINT(error_message);
  538. break;
  539. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  540. ERR_PRINT(error_message);
  541. CRASH_COND_MSG(Engine::get_singleton()->is_abort_on_gpu_errors_enabled(), "Crashing, because abort on GPU errors is enabled.");
  542. break;
  543. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT:
  544. break; // Shouldn't happen, only handling to make compilers happy.
  545. }
  546. return VK_FALSE;
  547. }
  548. VKAPI_ATTR VkBool32 VKAPI_CALL RenderingContextDriverVulkan::_debug_report_callback(VkDebugReportFlagsEXT p_flags, VkDebugReportObjectTypeEXT p_object_type, uint64_t p_object, size_t p_location, int32_t p_message_code, const char *p_layer_prefix, const char *p_message, void *p_user_data) {
  549. String debug_message = String("Vulkan Debug Report: object - ") + String::num_int64(p_object) + "\n" + p_message;
  550. switch (p_flags) {
  551. case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
  552. case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
  553. print_line(debug_message);
  554. break;
  555. case VK_DEBUG_REPORT_WARNING_BIT_EXT:
  556. case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
  557. WARN_PRINT(debug_message);
  558. break;
  559. case VK_DEBUG_REPORT_ERROR_BIT_EXT:
  560. ERR_PRINT(debug_message);
  561. break;
  562. }
  563. return VK_FALSE;
  564. }
  565. Error RenderingContextDriverVulkan::_initialize_instance() {
  566. Error err;
  567. TightLocalVector<const char *> enabled_extension_names;
  568. enabled_extension_names.reserve(enabled_instance_extension_names.size());
  569. for (const CharString &extension_name : enabled_instance_extension_names) {
  570. enabled_extension_names.push_back(extension_name.ptr());
  571. }
  572. // We'll set application version to the Vulkan version we're developing against, even if our instance is based on an older Vulkan
  573. // version, devices can still support newer versions of Vulkan. The exception is when we're on Vulkan 1.0, we should not set this
  574. // to anything but 1.0. Note that this value is only used by validation layers to warn us about version issues.
  575. uint32_t application_api_version = instance_api_version == VK_API_VERSION_1_0 ? VK_API_VERSION_1_0 : VK_API_VERSION_1_2;
  576. CharString cs = GLOBAL_GET("application/config/name").operator String().utf8();
  577. VkApplicationInfo app_info = {};
  578. app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  579. app_info.pApplicationName = cs.get_data();
  580. app_info.pEngineName = VERSION_NAME;
  581. app_info.engineVersion = VK_MAKE_VERSION(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
  582. app_info.apiVersion = application_api_version;
  583. TightLocalVector<const char *> enabled_layer_names;
  584. if (_use_validation_layers()) {
  585. err = _find_validation_layers(enabled_layer_names);
  586. ERR_FAIL_COND_V(err != OK, err);
  587. }
  588. VkInstanceCreateInfo instance_info = {};
  589. instance_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  590. #if defined(USE_VOLK) && (defined(MACOS_ENABLED) || defined(IOS_ENABLED))
  591. instance_info.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  592. #endif
  593. instance_info.pApplicationInfo = &app_info;
  594. instance_info.enabledExtensionCount = enabled_extension_names.size();
  595. instance_info.ppEnabledExtensionNames = enabled_extension_names.ptr();
  596. instance_info.enabledLayerCount = enabled_layer_names.size();
  597. instance_info.ppEnabledLayerNames = enabled_layer_names.ptr();
  598. // This is info for a temp callback to use during CreateInstance. After the instance is created, we use the instance-based function to register the final callback.
  599. VkDebugUtilsMessengerCreateInfoEXT debug_messenger_create_info = {};
  600. VkDebugReportCallbackCreateInfoEXT debug_report_callback_create_info = {};
  601. const bool has_debug_utils_extension = enabled_instance_extension_names.has(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  602. const bool has_debug_report_extension = enabled_instance_extension_names.has(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
  603. if (has_debug_utils_extension) {
  604. debug_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  605. debug_messenger_create_info.pNext = nullptr;
  606. debug_messenger_create_info.flags = 0;
  607. debug_messenger_create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  608. debug_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  609. debug_messenger_create_info.pfnUserCallback = _debug_messenger_callback;
  610. debug_messenger_create_info.pUserData = this;
  611. instance_info.pNext = &debug_messenger_create_info;
  612. } else if (has_debug_report_extension) {
  613. debug_report_callback_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
  614. debug_report_callback_create_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
  615. debug_report_callback_create_info.pfnCallback = _debug_report_callback;
  616. debug_report_callback_create_info.pUserData = this;
  617. instance_info.pNext = &debug_report_callback_create_info;
  618. }
  619. err = _create_vulkan_instance(&instance_info, &instance);
  620. ERR_FAIL_COND_V(err != OK, err);
  621. #ifdef USE_VOLK
  622. volkLoadInstance(instance);
  623. #endif
  624. // Physical device.
  625. if (enabled_instance_extension_names.has(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
  626. functions.GetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceFeatures2"));
  627. functions.GetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties2"));
  628. // In Vulkan 1.0, the functions might be accessible under their original extension names.
  629. if (functions.GetPhysicalDeviceFeatures2 == nullptr) {
  630. functions.GetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceFeatures2KHR"));
  631. }
  632. if (functions.GetPhysicalDeviceProperties2 == nullptr) {
  633. functions.GetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties2KHR"));
  634. }
  635. }
  636. // Device.
  637. functions.GetDeviceProcAddr = PFN_vkGetDeviceProcAddr(vkGetInstanceProcAddr(instance, "vkGetDeviceProcAddr"));
  638. // Surfaces.
  639. functions.GetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceSurfaceSupportKHR"));
  640. functions.GetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR"));
  641. functions.GetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"));
  642. functions.GetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR"));
  643. // Debug utils and report.
  644. if (has_debug_utils_extension) {
  645. // Setup VK_EXT_debug_utils function pointers always (we use them for debug labels and names).
  646. functions.CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugUtilsMessengerEXT");
  647. functions.DestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT");
  648. functions.CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(instance, "vkCmdBeginDebugUtilsLabelEXT");
  649. functions.CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(instance, "vkCmdEndDebugUtilsLabelEXT");
  650. functions.SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(instance, "vkSetDebugUtilsObjectNameEXT");
  651. if (!functions.debug_util_functions_available()) {
  652. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "GetProcAddr: Failed to init VK_EXT_debug_utils\nGetProcAddr: Failure");
  653. }
  654. VkResult res = functions.CreateDebugUtilsMessengerEXT(instance, &debug_messenger_create_info, get_allocation_callbacks(VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT), &debug_messenger);
  655. switch (res) {
  656. case VK_SUCCESS:
  657. break;
  658. case VK_ERROR_OUT_OF_HOST_MEMORY:
  659. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "CreateDebugUtilsMessengerEXT: out of host memory\nCreateDebugUtilsMessengerEXT Failure");
  660. break;
  661. default:
  662. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "CreateDebugUtilsMessengerEXT: unknown failure\nCreateDebugUtilsMessengerEXT Failure");
  663. break;
  664. }
  665. } else if (has_debug_report_extension) {
  666. functions.CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugReportCallbackEXT");
  667. functions.DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(instance, "vkDebugReportMessageEXT");
  668. functions.DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugReportCallbackEXT");
  669. if (!functions.debug_report_functions_available()) {
  670. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "GetProcAddr: Failed to init VK_EXT_debug_report\nGetProcAddr: Failure");
  671. }
  672. VkResult res = functions.CreateDebugReportCallbackEXT(instance, &debug_report_callback_create_info, get_allocation_callbacks(VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT), &debug_report);
  673. switch (res) {
  674. case VK_SUCCESS:
  675. break;
  676. case VK_ERROR_OUT_OF_HOST_MEMORY:
  677. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "CreateDebugReportCallbackEXT: out of host memory\nCreateDebugReportCallbackEXT Failure");
  678. break;
  679. default:
  680. ERR_FAIL_V_MSG(ERR_CANT_CREATE, "CreateDebugReportCallbackEXT: unknown failure\nCreateDebugReportCallbackEXT Failure");
  681. break;
  682. }
  683. }
  684. return OK;
  685. }
  686. Error RenderingContextDriverVulkan::_initialize_devices() {
  687. if (VulkanHooks::get_singleton() != nullptr) {
  688. VkPhysicalDevice physical_device;
  689. bool device_retrieved = VulkanHooks::get_singleton()->get_physical_device(&physical_device);
  690. ERR_FAIL_COND_V(!device_retrieved, ERR_CANT_CREATE);
  691. // When a hook is active, pretend the device returned by the hook is the only device available.
  692. driver_devices.resize(1);
  693. physical_devices.resize(1);
  694. device_queue_families.resize(1);
  695. physical_devices[0] = physical_device;
  696. } else {
  697. uint32_t physical_device_count = 0;
  698. VkResult err = vkEnumeratePhysicalDevices(instance, &physical_device_count, nullptr);
  699. ERR_FAIL_COND_V(err != VK_SUCCESS, ERR_CANT_CREATE);
  700. ERR_FAIL_COND_V_MSG(physical_device_count == 0, ERR_CANT_CREATE, "vkEnumeratePhysicalDevices reported zero accessible devices.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\nvkEnumeratePhysicalDevices Failure.");
  701. driver_devices.resize(physical_device_count);
  702. physical_devices.resize(physical_device_count);
  703. device_queue_families.resize(physical_device_count);
  704. err = vkEnumeratePhysicalDevices(instance, &physical_device_count, physical_devices.ptr());
  705. ERR_FAIL_COND_V(err != VK_SUCCESS, ERR_CANT_CREATE);
  706. }
  707. // Fill the list of driver devices with the properties from the physical devices.
  708. for (uint32_t i = 0; i < physical_devices.size(); i++) {
  709. VkPhysicalDeviceProperties props;
  710. vkGetPhysicalDeviceProperties(physical_devices[i], &props);
  711. Device &driver_device = driver_devices[i];
  712. driver_device.name = String::utf8(props.deviceName);
  713. driver_device.vendor = Vendor(props.vendorID);
  714. driver_device.type = DeviceType(props.deviceType);
  715. driver_device.workarounds = Workarounds();
  716. _check_driver_workarounds(props, driver_device);
  717. uint32_t queue_family_properties_count = 0;
  718. vkGetPhysicalDeviceQueueFamilyProperties(physical_devices[i], &queue_family_properties_count, nullptr);
  719. if (queue_family_properties_count > 0) {
  720. device_queue_families[i].properties.resize(queue_family_properties_count);
  721. vkGetPhysicalDeviceQueueFamilyProperties(physical_devices[i], &queue_family_properties_count, device_queue_families[i].properties.ptr());
  722. }
  723. }
  724. return OK;
  725. }
  726. void RenderingContextDriverVulkan::_check_driver_workarounds(const VkPhysicalDeviceProperties &p_device_properties, Device &r_device) {
  727. // Workaround for the Adreno 6XX family of devices.
  728. //
  729. // There's a known issue with the Vulkan driver in this family of devices where it'll crash if a dynamic state for drawing is
  730. // used in a command buffer before a dispatch call is issued. As both dynamic scissor and viewport are basic requirements for
  731. // the engine to not bake this state into the PSO, the only known way to fix this issue is to reset the command buffer entirely.
  732. //
  733. // As the render graph has no built in limitations of whether it'll issue compute work before anything needs to draw on the
  734. // frame, and there's no guarantee that compute work will never be dependent on rasterization in the future, this workaround
  735. // will end recording on the current command buffer any time a compute list is encountered after a draw list was executed.
  736. // A new command buffer will be created afterwards and the appropriate synchronization primitives will be inserted.
  737. //
  738. // Executing this workaround has the added cost of synchronization between all the command buffers that are created as well as
  739. // all the individual submissions. This performance hit is accepted for the sake of being able to support these devices without
  740. // limiting the design of the renderer.
  741. //
  742. // This bug was fixed in driver version 512.503.0, so we only enabled it on devices older than this.
  743. //
  744. r_device.workarounds.avoid_compute_after_draw =
  745. r_device.vendor == VENDOR_QUALCOMM &&
  746. p_device_properties.deviceID >= 0x6000000 && // Adreno 6xx
  747. p_device_properties.driverVersion < VK_MAKE_VERSION(512, 503, 0) &&
  748. r_device.name.find("Turnip") < 0;
  749. }
  750. bool RenderingContextDriverVulkan::_use_validation_layers() const {
  751. return Engine::get_singleton()->is_validation_layers_enabled();
  752. }
  753. Error RenderingContextDriverVulkan::_create_vulkan_instance(const VkInstanceCreateInfo *p_create_info, VkInstance *r_instance) {
  754. if (VulkanHooks::get_singleton() != nullptr) {
  755. return VulkanHooks::get_singleton()->create_vulkan_instance(p_create_info, r_instance) ? OK : ERR_CANT_CREATE;
  756. } else {
  757. VkResult err = vkCreateInstance(p_create_info, get_allocation_callbacks(VK_OBJECT_TYPE_INSTANCE), r_instance);
  758. ERR_FAIL_COND_V_MSG(err == VK_ERROR_INCOMPATIBLE_DRIVER, ERR_CANT_CREATE,
  759. "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
  760. "vkCreateInstance Failure");
  761. ERR_FAIL_COND_V_MSG(err == VK_ERROR_EXTENSION_NOT_PRESENT, ERR_CANT_CREATE,
  762. "Cannot find a specified extension library.\n"
  763. "Make sure your layers path is set appropriately.\n"
  764. "vkCreateInstance Failure");
  765. ERR_FAIL_COND_V_MSG(err, ERR_CANT_CREATE,
  766. "vkCreateInstance failed.\n\n"
  767. "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
  768. "Please look at the Getting Started guide for additional information.\n"
  769. "vkCreateInstance Failure");
  770. }
  771. return OK;
  772. }
  773. Error RenderingContextDriverVulkan::initialize() {
  774. Error err;
  775. #ifdef USE_VOLK
  776. if (volkInitialize() != VK_SUCCESS) {
  777. return FAILED;
  778. }
  779. #endif
  780. err = _initialize_vulkan_version();
  781. ERR_FAIL_COND_V(err != OK, err);
  782. err = _initialize_instance_extensions();
  783. ERR_FAIL_COND_V(err != OK, err);
  784. err = _initialize_instance();
  785. ERR_FAIL_COND_V(err != OK, err);
  786. err = _initialize_devices();
  787. ERR_FAIL_COND_V(err != OK, err);
  788. return OK;
  789. }
  790. const RenderingContextDriver::Device &RenderingContextDriverVulkan::device_get(uint32_t p_device_index) const {
  791. DEV_ASSERT(p_device_index < driver_devices.size());
  792. return driver_devices[p_device_index];
  793. }
  794. uint32_t RenderingContextDriverVulkan::device_get_count() const {
  795. return driver_devices.size();
  796. }
  797. bool RenderingContextDriverVulkan::device_supports_present(uint32_t p_device_index, SurfaceID p_surface) const {
  798. DEV_ASSERT(p_device_index < physical_devices.size());
  799. // Check if any of the queues supported by the device supports presenting to the window's surface.
  800. const VkPhysicalDevice physical_device = physical_devices[p_device_index];
  801. const DeviceQueueFamilies &queue_families = device_queue_families[p_device_index];
  802. for (uint32_t i = 0; i < queue_families.properties.size(); i++) {
  803. if ((queue_families.properties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && queue_family_supports_present(physical_device, i, p_surface)) {
  804. return true;
  805. }
  806. }
  807. return false;
  808. }
  809. RenderingDeviceDriver *RenderingContextDriverVulkan::driver_create() {
  810. return memnew(RenderingDeviceDriverVulkan(this));
  811. }
  812. void RenderingContextDriverVulkan::driver_free(RenderingDeviceDriver *p_driver) {
  813. memdelete(p_driver);
  814. }
  815. RenderingContextDriver::SurfaceID RenderingContextDriverVulkan::surface_create(const void *p_platform_data) {
  816. DEV_ASSERT(false && "Surface creation should not be called on the platform-agnostic version of the driver.");
  817. return SurfaceID();
  818. }
  819. void RenderingContextDriverVulkan::surface_set_size(SurfaceID p_surface, uint32_t p_width, uint32_t p_height) {
  820. Surface *surface = (Surface *)(p_surface);
  821. surface->width = p_width;
  822. surface->height = p_height;
  823. surface->needs_resize = true;
  824. }
  825. void RenderingContextDriverVulkan::surface_set_vsync_mode(SurfaceID p_surface, DisplayServer::VSyncMode p_vsync_mode) {
  826. Surface *surface = (Surface *)(p_surface);
  827. surface->vsync_mode = p_vsync_mode;
  828. surface->needs_resize = true;
  829. }
  830. DisplayServer::VSyncMode RenderingContextDriverVulkan::surface_get_vsync_mode(SurfaceID p_surface) const {
  831. Surface *surface = (Surface *)(p_surface);
  832. return surface->vsync_mode;
  833. }
  834. uint32_t RenderingContextDriverVulkan::surface_get_width(SurfaceID p_surface) const {
  835. Surface *surface = (Surface *)(p_surface);
  836. return surface->width;
  837. }
  838. uint32_t RenderingContextDriverVulkan::surface_get_height(SurfaceID p_surface) const {
  839. Surface *surface = (Surface *)(p_surface);
  840. return surface->height;
  841. }
  842. void RenderingContextDriverVulkan::surface_set_needs_resize(SurfaceID p_surface, bool p_needs_resize) {
  843. Surface *surface = (Surface *)(p_surface);
  844. surface->needs_resize = p_needs_resize;
  845. }
  846. bool RenderingContextDriverVulkan::surface_get_needs_resize(SurfaceID p_surface) const {
  847. Surface *surface = (Surface *)(p_surface);
  848. return surface->needs_resize;
  849. }
  850. void RenderingContextDriverVulkan::surface_destroy(SurfaceID p_surface) {
  851. Surface *surface = (Surface *)(p_surface);
  852. vkDestroySurfaceKHR(instance, surface->vk_surface, get_allocation_callbacks(VK_OBJECT_TYPE_SURFACE_KHR));
  853. memdelete(surface);
  854. }
  855. bool RenderingContextDriverVulkan::is_debug_utils_enabled() const {
  856. return enabled_instance_extension_names.has(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  857. }
  858. VkInstance RenderingContextDriverVulkan::instance_get() const {
  859. return instance;
  860. }
  861. VkPhysicalDevice RenderingContextDriverVulkan::physical_device_get(uint32_t p_device_index) const {
  862. DEV_ASSERT(p_device_index < physical_devices.size());
  863. return physical_devices[p_device_index];
  864. }
  865. uint32_t RenderingContextDriverVulkan::queue_family_get_count(uint32_t p_device_index) const {
  866. DEV_ASSERT(p_device_index < physical_devices.size());
  867. return device_queue_families[p_device_index].properties.size();
  868. }
  869. VkQueueFamilyProperties RenderingContextDriverVulkan::queue_family_get(uint32_t p_device_index, uint32_t p_queue_family_index) const {
  870. DEV_ASSERT(p_device_index < physical_devices.size());
  871. DEV_ASSERT(p_queue_family_index < queue_family_get_count(p_device_index));
  872. return device_queue_families[p_device_index].properties[p_queue_family_index];
  873. }
  874. bool RenderingContextDriverVulkan::queue_family_supports_present(VkPhysicalDevice p_physical_device, uint32_t p_queue_family_index, SurfaceID p_surface) const {
  875. DEV_ASSERT(p_physical_device != VK_NULL_HANDLE);
  876. DEV_ASSERT(p_surface != 0);
  877. Surface *surface = (Surface *)(p_surface);
  878. VkBool32 present_supported = false;
  879. VkResult err = vkGetPhysicalDeviceSurfaceSupportKHR(p_physical_device, p_queue_family_index, surface->vk_surface, &present_supported);
  880. return err == VK_SUCCESS && present_supported;
  881. }
  882. const RenderingContextDriverVulkan::Functions &RenderingContextDriverVulkan::functions_get() const {
  883. return functions;
  884. }
  885. #endif // VULKAN_ENABLED