VkBootstrap.cpp 88 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004
  1. /*
  2. * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
  3. * documentation files (the “Software”), to deal in the Software without restriction, including without
  4. * limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
  5. * of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
  6. *
  7. * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
  8. *
  9. * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
  10. * LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  11. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
  12. * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  13. *
  14. * Copyright © 2020 Charles Giessen ([email protected])
  15. */
  16. #include "VkBootstrap.h"
  17. #include <cstring>
  18. #if defined(_WIN32)
  19. #include <fcntl.h>
  20. #ifndef NOMINMAX
  21. #define NOMINMAX
  22. #endif
  23. #include <windows.h>
  24. #endif // _WIN32
  25. #if defined(__linux__) || defined(__APPLE__)
  26. #include <dlfcn.h>
  27. #endif
  28. #include <mutex>
  29. #include <algorithm>
  30. namespace vkb {
  31. namespace detail {
  32. GenericFeaturesPNextNode::GenericFeaturesPNextNode() { memset(fields, UINT8_MAX, sizeof(VkBool32) * field_capacity); }
  33. bool GenericFeaturesPNextNode::match(GenericFeaturesPNextNode const& requested, GenericFeaturesPNextNode const& supported) noexcept {
  34. assert(requested.sType == supported.sType && "Non-matching sTypes in features nodes!");
  35. for (uint32_t i = 0; i < field_capacity; i++) {
  36. if (requested.fields[i] && !supported.fields[i]) return false;
  37. }
  38. return true;
  39. }
  40. class VulkanFunctions {
  41. private:
  42. std::mutex init_mutex;
  43. struct VulkanLibrary {
  44. #if defined(__linux__) || defined(__APPLE__)
  45. void* library;
  46. #elif defined(_WIN32)
  47. HMODULE library;
  48. #endif
  49. PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = VK_NULL_HANDLE;
  50. VulkanLibrary() {
  51. #if defined(__linux__)
  52. library = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL);
  53. if (!library) library = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL);
  54. #elif defined(__APPLE__)
  55. library = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL);
  56. if (!library) library = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL);
  57. #elif defined(_WIN32)
  58. library = LoadLibrary(TEXT("vulkan-1.dll"));
  59. #else
  60. assert(false && "Unsupported platform");
  61. #endif
  62. if (!library) return;
  63. load_func(ptr_vkGetInstanceProcAddr, "vkGetInstanceProcAddr");
  64. }
  65. template <typename T> void load_func(T& func_dest, const char* func_name) {
  66. #if defined(__linux__) || defined(__APPLE__)
  67. func_dest = reinterpret_cast<T>(dlsym(library, func_name));
  68. #elif defined(_WIN32)
  69. func_dest = reinterpret_cast<T>(GetProcAddress(library, func_name));
  70. #endif
  71. }
  72. void close() {
  73. #if defined(__linux__) || defined(__APPLE__)
  74. dlclose(library);
  75. #elif defined(_WIN32)
  76. FreeLibrary(library);
  77. #endif
  78. library = 0;
  79. }
  80. };
  81. VulkanLibrary& get_vulkan_library() {
  82. static VulkanLibrary lib;
  83. return lib;
  84. }
  85. bool load_vulkan(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr) {
  86. if (fp_vkGetInstanceProcAddr != nullptr) {
  87. ptr_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr;
  88. return true;
  89. } else {
  90. auto& lib = get_vulkan_library();
  91. ptr_vkGetInstanceProcAddr = lib.ptr_vkGetInstanceProcAddr;
  92. return lib.library != nullptr && lib.ptr_vkGetInstanceProcAddr != VK_NULL_HANDLE;
  93. }
  94. }
  95. void init_pre_instance_funcs() {
  96. fp_vkEnumerateInstanceExtensionProperties = reinterpret_cast<PFN_vkEnumerateInstanceExtensionProperties>(
  97. ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceExtensionProperties"));
  98. fp_vkEnumerateInstanceLayerProperties = reinterpret_cast<PFN_vkEnumerateInstanceLayerProperties>(
  99. ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceLayerProperties"));
  100. fp_vkEnumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
  101. ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkEnumerateInstanceVersion"));
  102. fp_vkCreateInstance =
  103. reinterpret_cast<PFN_vkCreateInstance>(ptr_vkGetInstanceProcAddr(VK_NULL_HANDLE, "vkCreateInstance"));
  104. }
  105. public:
  106. template <typename T> void get_inst_proc_addr(T& out_ptr, const char* func_name) {
  107. out_ptr = reinterpret_cast<T>(ptr_vkGetInstanceProcAddr(instance, func_name));
  108. }
  109. template <typename T> void get_device_proc_addr(VkDevice device, T& out_ptr, const char* func_name) {
  110. out_ptr = reinterpret_cast<T>(fp_vkGetDeviceProcAddr(device, func_name));
  111. }
  112. PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = nullptr;
  113. VkInstance instance = nullptr;
  114. PFN_vkEnumerateInstanceExtensionProperties fp_vkEnumerateInstanceExtensionProperties = nullptr;
  115. PFN_vkEnumerateInstanceLayerProperties fp_vkEnumerateInstanceLayerProperties = nullptr;
  116. PFN_vkEnumerateInstanceVersion fp_vkEnumerateInstanceVersion = nullptr;
  117. PFN_vkCreateInstance fp_vkCreateInstance = nullptr;
  118. PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr;
  119. PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr;
  120. PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr;
  121. PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr;
  122. PFN_vkGetPhysicalDeviceFeatures2KHR fp_vkGetPhysicalDeviceFeatures2KHR = nullptr;
  123. PFN_vkGetPhysicalDeviceFormatProperties fp_vkGetPhysicalDeviceFormatProperties = nullptr;
  124. PFN_vkGetPhysicalDeviceImageFormatProperties fp_vkGetPhysicalDeviceImageFormatProperties = nullptr;
  125. PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr;
  126. PFN_vkGetPhysicalDeviceProperties2 fp_vkGetPhysicalDeviceProperties2 = nullptr;
  127. PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr;
  128. PFN_vkGetPhysicalDeviceQueueFamilyProperties2 fp_vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr;
  129. PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr;
  130. PFN_vkGetPhysicalDeviceFormatProperties2 fp_vkGetPhysicalDeviceFormatProperties2 = nullptr;
  131. PFN_vkGetPhysicalDeviceMemoryProperties2 fp_vkGetPhysicalDeviceMemoryProperties2 = nullptr;
  132. PFN_vkGetDeviceProcAddr fp_vkGetDeviceProcAddr = nullptr;
  133. PFN_vkCreateDevice fp_vkCreateDevice = nullptr;
  134. PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr;
  135. PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr;
  136. PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  137. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  138. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  139. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  140. bool init_vulkan_funcs(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) {
  141. std::lock_guard<std::mutex> lg(init_mutex);
  142. if (!load_vulkan(fp_vkGetInstanceProcAddr)) return false;
  143. init_pre_instance_funcs();
  144. return true;
  145. }
  146. void init_instance_funcs(VkInstance inst) {
  147. instance = inst;
  148. get_inst_proc_addr(fp_vkDestroyInstance, "vkDestroyInstance");
  149. get_inst_proc_addr(fp_vkEnumeratePhysicalDevices, "vkEnumeratePhysicalDevices");
  150. get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures, "vkGetPhysicalDeviceFeatures");
  151. get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures2, "vkGetPhysicalDeviceFeatures2");
  152. get_inst_proc_addr(fp_vkGetPhysicalDeviceFeatures2KHR, "vkGetPhysicalDeviceFeatures2KHR");
  153. get_inst_proc_addr(fp_vkGetPhysicalDeviceFormatProperties, "vkGetPhysicalDeviceFormatProperties");
  154. get_inst_proc_addr(fp_vkGetPhysicalDeviceImageFormatProperties, "vkGetPhysicalDeviceImageFormatProperties");
  155. get_inst_proc_addr(fp_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties");
  156. get_inst_proc_addr(fp_vkGetPhysicalDeviceProperties2, "vkGetPhysicalDeviceProperties2");
  157. get_inst_proc_addr(fp_vkGetPhysicalDeviceQueueFamilyProperties, "vkGetPhysicalDeviceQueueFamilyProperties");
  158. get_inst_proc_addr(fp_vkGetPhysicalDeviceQueueFamilyProperties2, "vkGetPhysicalDeviceQueueFamilyProperties2");
  159. get_inst_proc_addr(fp_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties");
  160. get_inst_proc_addr(fp_vkGetPhysicalDeviceFormatProperties2, "vkGetPhysicalDeviceFormatProperties2");
  161. get_inst_proc_addr(fp_vkGetPhysicalDeviceMemoryProperties2, "vkGetPhysicalDeviceMemoryProperties2");
  162. get_inst_proc_addr(fp_vkGetDeviceProcAddr, "vkGetDeviceProcAddr");
  163. get_inst_proc_addr(fp_vkCreateDevice, "vkCreateDevice");
  164. get_inst_proc_addr(fp_vkEnumerateDeviceExtensionProperties, "vkEnumerateDeviceExtensionProperties");
  165. get_inst_proc_addr(fp_vkDestroySurfaceKHR, "vkDestroySurfaceKHR");
  166. get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceSupportKHR, "vkGetPhysicalDeviceSurfaceSupportKHR");
  167. get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceFormatsKHR, "vkGetPhysicalDeviceSurfaceFormatsKHR");
  168. get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfacePresentModesKHR, "vkGetPhysicalDeviceSurfacePresentModesKHR");
  169. get_inst_proc_addr(fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
  170. }
  171. };
  172. VulkanFunctions& vulkan_functions() {
  173. static VulkanFunctions v;
  174. return v;
  175. }
  176. // Helper for robustly executing the two-call pattern
  177. template <typename T, typename F, typename... Ts> auto get_vector(std::vector<T>& out, F&& f, Ts&&... ts) -> VkResult {
  178. uint32_t count = 0;
  179. VkResult err;
  180. do {
  181. err = f(ts..., &count, nullptr);
  182. if (err != VK_SUCCESS) {
  183. return err;
  184. };
  185. out.resize(count);
  186. err = f(ts..., &count, out.data());
  187. out.resize(count);
  188. } while (err == VK_INCOMPLETE);
  189. return err;
  190. }
  191. template <typename T, typename F, typename... Ts> auto get_vector_noerror(F&& f, Ts&&... ts) -> std::vector<T> {
  192. uint32_t count = 0;
  193. std::vector<T> results;
  194. f(ts..., &count, nullptr);
  195. results.resize(count);
  196. f(ts..., &count, results.data());
  197. results.resize(count);
  198. return results;
  199. }
  200. } // namespace detail
  201. const char* to_string_message_severity(VkDebugUtilsMessageSeverityFlagBitsEXT s) {
  202. switch (s) {
  203. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
  204. return "VERBOSE";
  205. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
  206. return "ERROR";
  207. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
  208. return "WARNING";
  209. case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
  210. return "INFO";
  211. default:
  212. return "UNKNOWN";
  213. }
  214. }
  215. const char* to_string_message_type(VkDebugUtilsMessageTypeFlagsEXT s) {
  216. if (s == 7) return "General | Validation | Performance";
  217. if (s == 6) return "Validation | Performance";
  218. if (s == 5) return "General | Performance";
  219. if (s == 4 /*VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT*/) return "Performance";
  220. if (s == 3) return "General | Validation";
  221. if (s == 2 /*VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT*/) return "Validation";
  222. if (s == 1 /*VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT*/) return "General";
  223. return "Unknown";
  224. }
  225. VkResult create_debug_utils_messenger(VkInstance instance,
  226. PFN_vkDebugUtilsMessengerCallbackEXT debug_callback,
  227. VkDebugUtilsMessageSeverityFlagsEXT severity,
  228. VkDebugUtilsMessageTypeFlagsEXT type,
  229. void* user_data_pointer,
  230. VkDebugUtilsMessengerEXT* pDebugMessenger,
  231. VkAllocationCallbacks* allocation_callbacks) {
  232. if (debug_callback == nullptr) debug_callback = default_debug_callback;
  233. VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {};
  234. messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  235. messengerCreateInfo.pNext = nullptr;
  236. messengerCreateInfo.messageSeverity = severity;
  237. messengerCreateInfo.messageType = type;
  238. messengerCreateInfo.pfnUserCallback = debug_callback;
  239. messengerCreateInfo.pUserData = user_data_pointer;
  240. PFN_vkCreateDebugUtilsMessengerEXT createMessengerFunc;
  241. detail::vulkan_functions().get_inst_proc_addr(createMessengerFunc, "vkCreateDebugUtilsMessengerEXT");
  242. if (createMessengerFunc != nullptr) {
  243. return createMessengerFunc(instance, &messengerCreateInfo, allocation_callbacks, pDebugMessenger);
  244. } else {
  245. return VK_ERROR_EXTENSION_NOT_PRESENT;
  246. }
  247. }
  248. void destroy_debug_utils_messenger(
  249. VkInstance instance, VkDebugUtilsMessengerEXT debugMessenger, VkAllocationCallbacks* allocation_callbacks) {
  250. PFN_vkDestroyDebugUtilsMessengerEXT deleteMessengerFunc;
  251. detail::vulkan_functions().get_inst_proc_addr(deleteMessengerFunc, "vkDestroyDebugUtilsMessengerEXT");
  252. if (deleteMessengerFunc != nullptr) {
  253. deleteMessengerFunc(instance, debugMessenger, allocation_callbacks);
  254. }
  255. }
  256. namespace detail {
  257. bool check_layer_supported(std::vector<VkLayerProperties> const& available_layers, const char* layer_name) {
  258. if (!layer_name) return false;
  259. for (const auto& layer_properties : available_layers) {
  260. if (strcmp(layer_name, layer_properties.layerName) == 0) {
  261. return true;
  262. }
  263. }
  264. return false;
  265. }
  266. bool check_layers_supported(std::vector<VkLayerProperties> const& available_layers, std::vector<const char*> const& layer_names) {
  267. bool all_found = true;
  268. for (const auto& layer_name : layer_names) {
  269. bool found = check_layer_supported(available_layers, layer_name);
  270. if (!found) all_found = false;
  271. }
  272. return all_found;
  273. }
  274. bool check_extension_supported(std::vector<VkExtensionProperties> const& available_extensions, const char* extension_name) {
  275. if (!extension_name) return false;
  276. for (const auto& extension_properties : available_extensions) {
  277. if (strcmp(extension_name, extension_properties.extensionName) == 0) {
  278. return true;
  279. }
  280. }
  281. return false;
  282. }
  283. bool check_extensions_supported(
  284. std::vector<VkExtensionProperties> const& available_extensions, std::vector<const char*> const& extension_names) {
  285. bool all_found = true;
  286. for (const auto& extension_name : extension_names) {
  287. bool found = check_extension_supported(available_extensions, extension_name);
  288. if (!found) all_found = false;
  289. }
  290. return all_found;
  291. }
  292. template <typename T> void setup_pNext_chain(T& structure, std::vector<VkBaseOutStructure*> const& structs) {
  293. structure.pNext = nullptr;
  294. if (structs.size() <= 0) return;
  295. for (size_t i = 0; i < structs.size() - 1; i++) {
  296. structs.at(i)->pNext = structs.at(i + 1);
  297. }
  298. structure.pNext = structs.at(0);
  299. }
  300. const char* validation_layer_name = "VK_LAYER_KHRONOS_validation";
  301. struct InstanceErrorCategory : std::error_category {
  302. const char* name() const noexcept override { return "vkb_instance"; }
  303. std::string message(int err) const override { return to_string(static_cast<InstanceError>(err)); }
  304. };
  305. const InstanceErrorCategory instance_error_category;
  306. struct PhysicalDeviceErrorCategory : std::error_category {
  307. const char* name() const noexcept override { return "vkb_physical_device"; }
  308. std::string message(int err) const override { return to_string(static_cast<PhysicalDeviceError>(err)); }
  309. };
  310. const PhysicalDeviceErrorCategory physical_device_error_category;
  311. struct QueueErrorCategory : std::error_category {
  312. const char* name() const noexcept override { return "vkb_queue"; }
  313. std::string message(int err) const override { return to_string(static_cast<QueueError>(err)); }
  314. };
  315. const QueueErrorCategory queue_error_category;
  316. struct DeviceErrorCategory : std::error_category {
  317. const char* name() const noexcept override { return "vkb_device"; }
  318. std::string message(int err) const override { return to_string(static_cast<DeviceError>(err)); }
  319. };
  320. const DeviceErrorCategory device_error_category;
  321. struct SwapchainErrorCategory : std::error_category {
  322. const char* name() const noexcept override { return "vbk_swapchain"; }
  323. std::string message(int err) const override { return to_string(static_cast<SwapchainError>(err)); }
  324. };
  325. const SwapchainErrorCategory swapchain_error_category;
  326. } // namespace detail
  327. std::error_code make_error_code(InstanceError instance_error) {
  328. return { static_cast<int>(instance_error), detail::instance_error_category };
  329. }
  330. std::error_code make_error_code(PhysicalDeviceError physical_device_error) {
  331. return { static_cast<int>(physical_device_error), detail::physical_device_error_category };
  332. }
  333. std::error_code make_error_code(QueueError queue_error) {
  334. return { static_cast<int>(queue_error), detail::queue_error_category };
  335. }
  336. std::error_code make_error_code(DeviceError device_error) {
  337. return { static_cast<int>(device_error), detail::device_error_category };
  338. }
  339. std::error_code make_error_code(SwapchainError swapchain_error) {
  340. return { static_cast<int>(swapchain_error), detail::swapchain_error_category };
  341. }
  342. #define CASE_TO_STRING(CATEGORY, TYPE) \
  343. case CATEGORY::TYPE: \
  344. return #TYPE;
  345. const char* to_string(InstanceError err) {
  346. switch (err) {
  347. CASE_TO_STRING(InstanceError, vulkan_unavailable)
  348. CASE_TO_STRING(InstanceError, vulkan_version_unavailable)
  349. CASE_TO_STRING(InstanceError, vulkan_version_1_1_unavailable)
  350. CASE_TO_STRING(InstanceError, vulkan_version_1_2_unavailable)
  351. CASE_TO_STRING(InstanceError, failed_create_debug_messenger)
  352. CASE_TO_STRING(InstanceError, failed_create_instance)
  353. CASE_TO_STRING(InstanceError, requested_layers_not_present)
  354. CASE_TO_STRING(InstanceError, requested_extensions_not_present)
  355. CASE_TO_STRING(InstanceError, windowing_extensions_not_present)
  356. default:
  357. return "";
  358. }
  359. }
  360. const char* to_string(PhysicalDeviceError err) {
  361. switch (err) {
  362. CASE_TO_STRING(PhysicalDeviceError, no_surface_provided)
  363. CASE_TO_STRING(PhysicalDeviceError, failed_enumerate_physical_devices)
  364. CASE_TO_STRING(PhysicalDeviceError, no_physical_devices_found)
  365. CASE_TO_STRING(PhysicalDeviceError, no_suitable_device)
  366. default:
  367. return "";
  368. }
  369. }
  370. const char* to_string(QueueError err) {
  371. switch (err) {
  372. CASE_TO_STRING(QueueError, present_unavailable)
  373. CASE_TO_STRING(QueueError, graphics_unavailable)
  374. CASE_TO_STRING(QueueError, compute_unavailable)
  375. CASE_TO_STRING(QueueError, transfer_unavailable)
  376. CASE_TO_STRING(QueueError, queue_index_out_of_range)
  377. CASE_TO_STRING(QueueError, invalid_queue_family_index)
  378. default:
  379. return "";
  380. }
  381. }
  382. const char* to_string(DeviceError err) {
  383. switch (err) {
  384. CASE_TO_STRING(DeviceError, failed_create_device)
  385. default:
  386. return "";
  387. }
  388. }
  389. const char* to_string(SwapchainError err) {
  390. switch (err) {
  391. CASE_TO_STRING(SwapchainError, surface_handle_not_provided)
  392. CASE_TO_STRING(SwapchainError, failed_query_surface_support_details)
  393. CASE_TO_STRING(SwapchainError, failed_create_swapchain)
  394. CASE_TO_STRING(SwapchainError, failed_get_swapchain_images)
  395. CASE_TO_STRING(SwapchainError, failed_create_swapchain_image_views)
  396. CASE_TO_STRING(SwapchainError, required_min_image_count_too_low)
  397. CASE_TO_STRING(SwapchainError, required_usage_not_supported)
  398. default:
  399. return "";
  400. }
  401. }
  402. Result<SystemInfo> SystemInfo::get_system_info() {
  403. if (!detail::vulkan_functions().init_vulkan_funcs(nullptr)) {
  404. return make_error_code(InstanceError::vulkan_unavailable);
  405. }
  406. return SystemInfo();
  407. }
  408. Result<SystemInfo> SystemInfo::get_system_info(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) {
  409. // Using externally provided function pointers, assume the loader is available
  410. if (!detail::vulkan_functions().init_vulkan_funcs(fp_vkGetInstanceProcAddr)) {
  411. return make_error_code(InstanceError::vulkan_unavailable);
  412. }
  413. return SystemInfo();
  414. }
  415. SystemInfo::SystemInfo() {
  416. auto available_layers_ret = detail::get_vector<VkLayerProperties>(
  417. this->available_layers, detail::vulkan_functions().fp_vkEnumerateInstanceLayerProperties);
  418. if (available_layers_ret != VK_SUCCESS) {
  419. this->available_layers.clear();
  420. }
  421. for (auto& layer : this->available_layers)
  422. if (strcmp(layer.layerName, detail::validation_layer_name) == 0) validation_layers_available = true;
  423. auto available_extensions_ret = detail::get_vector<VkExtensionProperties>(
  424. this->available_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, nullptr);
  425. if (available_extensions_ret != VK_SUCCESS) {
  426. this->available_extensions.clear();
  427. }
  428. for (auto& ext : this->available_extensions) {
  429. if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) {
  430. debug_utils_available = true;
  431. }
  432. }
  433. for (auto& layer : this->available_layers) {
  434. std::vector<VkExtensionProperties> layer_extensions;
  435. auto layer_extensions_ret = detail::get_vector<VkExtensionProperties>(
  436. layer_extensions, detail::vulkan_functions().fp_vkEnumerateInstanceExtensionProperties, layer.layerName);
  437. if (layer_extensions_ret == VK_SUCCESS) {
  438. this->available_extensions.insert(
  439. this->available_extensions.end(), layer_extensions.begin(), layer_extensions.end());
  440. for (auto& ext : layer_extensions) {
  441. if (strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) {
  442. debug_utils_available = true;
  443. }
  444. }
  445. }
  446. }
  447. }
  448. bool SystemInfo::is_extension_available(const char* extension_name) const {
  449. if (!extension_name) return false;
  450. return detail::check_extension_supported(available_extensions, extension_name);
  451. }
  452. bool SystemInfo::is_layer_available(const char* layer_name) const {
  453. if (!layer_name) return false;
  454. return detail::check_layer_supported(available_layers, layer_name);
  455. }
  456. void destroy_surface(Instance instance, VkSurfaceKHR surface) {
  457. if (instance.instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) {
  458. detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance.instance, surface, instance.allocation_callbacks);
  459. }
  460. }
  461. void destroy_surface(VkInstance instance, VkSurfaceKHR surface, VkAllocationCallbacks* callbacks) {
  462. if (instance != VK_NULL_HANDLE && surface != VK_NULL_HANDLE) {
  463. detail::vulkan_functions().fp_vkDestroySurfaceKHR(instance, surface, callbacks);
  464. }
  465. }
  466. void destroy_instance(Instance instance) {
  467. if (instance.instance != VK_NULL_HANDLE) {
  468. if (instance.debug_messenger != VK_NULL_HANDLE)
  469. destroy_debug_utils_messenger(instance.instance, instance.debug_messenger, instance.allocation_callbacks);
  470. detail::vulkan_functions().fp_vkDestroyInstance(instance.instance, instance.allocation_callbacks);
  471. }
  472. }
  473. Instance::operator VkInstance() const { return this->instance; }
  474. InstanceBuilder::InstanceBuilder(PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) {
  475. info.fp_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr;
  476. }
  477. InstanceBuilder::InstanceBuilder() {}
  478. Result<Instance> InstanceBuilder::build() const {
  479. auto sys_info_ret = SystemInfo::get_system_info(info.fp_vkGetInstanceProcAddr);
  480. if (!sys_info_ret) return sys_info_ret.error();
  481. auto system = sys_info_ret.value();
  482. uint32_t instance_version = VKB_VK_API_VERSION_1_0;
  483. if (info.minimum_instance_version > VKB_VK_API_VERSION_1_0 || info.required_api_version > VKB_VK_API_VERSION_1_0 ||
  484. info.desired_api_version > VKB_VK_API_VERSION_1_0) {
  485. PFN_vkEnumerateInstanceVersion pfn_vkEnumerateInstanceVersion = detail::vulkan_functions().fp_vkEnumerateInstanceVersion;
  486. if (pfn_vkEnumerateInstanceVersion != nullptr) {
  487. VkResult res = pfn_vkEnumerateInstanceVersion(&instance_version);
  488. // Should always return VK_SUCCESS
  489. if (res != VK_SUCCESS && info.required_api_version > 0)
  490. return make_error_code(InstanceError::vulkan_version_unavailable);
  491. }
  492. if (pfn_vkEnumerateInstanceVersion == nullptr || instance_version < info.minimum_instance_version ||
  493. (info.minimum_instance_version == 0 && instance_version < info.required_api_version)) {
  494. if (VK_VERSION_MINOR(info.required_api_version) == 2)
  495. return make_error_code(InstanceError::vulkan_version_1_2_unavailable);
  496. else if (VK_VERSION_MINOR(info.required_api_version))
  497. return make_error_code(InstanceError::vulkan_version_1_1_unavailable);
  498. else
  499. return make_error_code(InstanceError::vulkan_version_unavailable);
  500. }
  501. }
  502. uint32_t api_version = instance_version < VKB_VK_API_VERSION_1_1 ? instance_version : info.required_api_version;
  503. if (info.desired_api_version > VKB_VK_API_VERSION_1_0 && instance_version >= info.desired_api_version) {
  504. instance_version = info.desired_api_version;
  505. api_version = info.desired_api_version;
  506. }
  507. VkApplicationInfo app_info = {};
  508. app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  509. app_info.pNext = nullptr;
  510. app_info.pApplicationName = info.app_name != nullptr ? info.app_name : "";
  511. app_info.applicationVersion = info.application_version;
  512. app_info.pEngineName = info.engine_name != nullptr ? info.engine_name : "";
  513. app_info.engineVersion = info.engine_version;
  514. app_info.apiVersion = api_version;
  515. std::vector<const char*> extensions;
  516. std::vector<const char*> layers;
  517. for (auto& ext : info.extensions)
  518. extensions.push_back(ext);
  519. if (info.debug_callback != nullptr && info.use_debug_messenger && system.debug_utils_available) {
  520. extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  521. }
  522. bool properties2_ext_enabled =
  523. api_version < VKB_VK_API_VERSION_1_1 && detail::check_extension_supported(system.available_extensions,
  524. VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
  525. if (properties2_ext_enabled) {
  526. extensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
  527. }
  528. #if defined(VK_KHR_portability_enumeration)
  529. bool portability_enumeration_support =
  530. detail::check_extension_supported(system.available_extensions, VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
  531. if (portability_enumeration_support) {
  532. extensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
  533. }
  534. #else
  535. bool portability_enumeration_support = false;
  536. #endif
  537. if (!info.headless_context) {
  538. auto check_add_window_ext = [&](const char* name) -> bool {
  539. if (!detail::check_extension_supported(system.available_extensions, name)) return false;
  540. extensions.push_back(name);
  541. return true;
  542. };
  543. bool khr_surface_added = check_add_window_ext("VK_KHR_surface");
  544. #if defined(_WIN32)
  545. bool added_window_exts = check_add_window_ext("VK_KHR_win32_surface");
  546. #elif defined(__ANDROID__)
  547. bool added_window_exts = check_add_window_ext("VK_KHR_android_surface");
  548. #elif defined(_DIRECT2DISPLAY)
  549. bool added_window_exts = check_add_window_ext("VK_KHR_display");
  550. #elif defined(__linux__)
  551. // make sure all three calls to check_add_window_ext, don't allow short circuiting
  552. bool added_window_exts = check_add_window_ext("VK_KHR_xcb_surface");
  553. added_window_exts = check_add_window_ext("VK_KHR_xlib_surface") || added_window_exts;
  554. added_window_exts = check_add_window_ext("VK_KHR_wayland_surface") || added_window_exts;
  555. #elif defined(__APPLE__)
  556. bool added_window_exts = check_add_window_ext("VK_EXT_metal_surface");
  557. #endif
  558. if (!khr_surface_added || !added_window_exts)
  559. return make_error_code(InstanceError::windowing_extensions_not_present);
  560. }
  561. bool all_extensions_supported = detail::check_extensions_supported(system.available_extensions, extensions);
  562. if (!all_extensions_supported) {
  563. return make_error_code(InstanceError::requested_extensions_not_present);
  564. }
  565. for (auto& layer : info.layers)
  566. layers.push_back(layer);
  567. if (info.enable_validation_layers || (info.request_validation_layers && system.validation_layers_available)) {
  568. layers.push_back(detail::validation_layer_name);
  569. }
  570. bool all_layers_supported = detail::check_layers_supported(system.available_layers, layers);
  571. if (!all_layers_supported) {
  572. return make_error_code(InstanceError::requested_layers_not_present);
  573. }
  574. std::vector<VkBaseOutStructure*> pNext_chain;
  575. VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {};
  576. if (info.use_debug_messenger) {
  577. messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  578. messengerCreateInfo.pNext = nullptr;
  579. messengerCreateInfo.messageSeverity = info.debug_message_severity;
  580. messengerCreateInfo.messageType = info.debug_message_type;
  581. messengerCreateInfo.pfnUserCallback = info.debug_callback;
  582. messengerCreateInfo.pUserData = info.debug_user_data_pointer;
  583. pNext_chain.push_back(reinterpret_cast<VkBaseOutStructure*>(&messengerCreateInfo));
  584. }
  585. VkValidationFeaturesEXT features{};
  586. if (info.enabled_validation_features.size() != 0 || info.disabled_validation_features.size()) {
  587. features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
  588. features.pNext = nullptr;
  589. features.enabledValidationFeatureCount = static_cast<uint32_t>(info.enabled_validation_features.size());
  590. features.pEnabledValidationFeatures = info.enabled_validation_features.data();
  591. features.disabledValidationFeatureCount = static_cast<uint32_t>(info.disabled_validation_features.size());
  592. features.pDisabledValidationFeatures = info.disabled_validation_features.data();
  593. pNext_chain.push_back(reinterpret_cast<VkBaseOutStructure*>(&features));
  594. }
  595. VkValidationFlagsEXT checks{};
  596. if (info.disabled_validation_checks.size() != 0) {
  597. checks.sType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT;
  598. checks.pNext = nullptr;
  599. checks.disabledValidationCheckCount = static_cast<uint32_t>(info.disabled_validation_checks.size());
  600. checks.pDisabledValidationChecks = info.disabled_validation_checks.data();
  601. pNext_chain.push_back(reinterpret_cast<VkBaseOutStructure*>(&checks));
  602. }
  603. VkInstanceCreateInfo instance_create_info = {};
  604. instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  605. detail::setup_pNext_chain(instance_create_info, pNext_chain);
  606. #if !defined(NDEBUG)
  607. for (auto& node : pNext_chain) {
  608. assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO);
  609. }
  610. #endif
  611. instance_create_info.flags = info.flags;
  612. instance_create_info.pApplicationInfo = &app_info;
  613. instance_create_info.enabledExtensionCount = static_cast<uint32_t>(extensions.size());
  614. instance_create_info.ppEnabledExtensionNames = extensions.data();
  615. instance_create_info.enabledLayerCount = static_cast<uint32_t>(layers.size());
  616. instance_create_info.ppEnabledLayerNames = layers.data();
  617. #if defined(VK_KHR_portability_enumeration)
  618. if (portability_enumeration_support) {
  619. instance_create_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  620. }
  621. #endif
  622. Instance instance;
  623. VkResult res =
  624. detail::vulkan_functions().fp_vkCreateInstance(&instance_create_info, info.allocation_callbacks, &instance.instance);
  625. if (res != VK_SUCCESS) return Result<Instance>(InstanceError::failed_create_instance, res);
  626. detail::vulkan_functions().init_instance_funcs(instance.instance);
  627. if (info.use_debug_messenger) {
  628. res = create_debug_utils_messenger(instance.instance,
  629. info.debug_callback,
  630. info.debug_message_severity,
  631. info.debug_message_type,
  632. info.debug_user_data_pointer,
  633. &instance.debug_messenger,
  634. info.allocation_callbacks);
  635. if (res != VK_SUCCESS) {
  636. return Result<Instance>(InstanceError::failed_create_debug_messenger, res);
  637. }
  638. }
  639. instance.headless = info.headless_context;
  640. instance.properties2_ext_enabled = properties2_ext_enabled;
  641. instance.allocation_callbacks = info.allocation_callbacks;
  642. instance.instance_version = instance_version;
  643. instance.api_version = api_version;
  644. instance.fp_vkGetInstanceProcAddr = detail::vulkan_functions().ptr_vkGetInstanceProcAddr;
  645. instance.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr;
  646. return instance;
  647. }
  648. InstanceBuilder& InstanceBuilder::set_app_name(const char* app_name) {
  649. if (!app_name) return *this;
  650. info.app_name = app_name;
  651. return *this;
  652. }
  653. InstanceBuilder& InstanceBuilder::set_engine_name(const char* engine_name) {
  654. if (!engine_name) return *this;
  655. info.engine_name = engine_name;
  656. return *this;
  657. }
  658. InstanceBuilder& InstanceBuilder::set_app_version(uint32_t app_version) {
  659. info.application_version = app_version;
  660. return *this;
  661. }
  662. InstanceBuilder& InstanceBuilder::set_app_version(uint32_t major, uint32_t minor, uint32_t patch) {
  663. info.application_version = VKB_MAKE_VK_VERSION(0, major, minor, patch);
  664. return *this;
  665. }
  666. InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t engine_version) {
  667. info.engine_version = engine_version;
  668. return *this;
  669. }
  670. InstanceBuilder& InstanceBuilder::set_engine_version(uint32_t major, uint32_t minor, uint32_t patch) {
  671. info.engine_version = VKB_MAKE_VK_VERSION(0, major, minor, patch);
  672. return *this;
  673. }
  674. InstanceBuilder& InstanceBuilder::require_api_version(uint32_t required_api_version) {
  675. info.required_api_version = required_api_version;
  676. return *this;
  677. }
  678. InstanceBuilder& InstanceBuilder::require_api_version(uint32_t major, uint32_t minor, uint32_t patch) {
  679. info.required_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch);
  680. return *this;
  681. }
  682. InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t minimum_instance_version) {
  683. info.minimum_instance_version = minimum_instance_version;
  684. return *this;
  685. }
  686. InstanceBuilder& InstanceBuilder::set_minimum_instance_version(uint32_t major, uint32_t minor, uint32_t patch) {
  687. info.minimum_instance_version = VKB_MAKE_VK_VERSION(0, major, minor, patch);
  688. return *this;
  689. }
  690. InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t preferred_vulkan_version) {
  691. info.desired_api_version = preferred_vulkan_version;
  692. return *this;
  693. }
  694. InstanceBuilder& InstanceBuilder::desire_api_version(uint32_t major, uint32_t minor, uint32_t patch) {
  695. info.desired_api_version = VKB_MAKE_VK_VERSION(0, major, minor, patch);
  696. return *this;
  697. }
  698. InstanceBuilder& InstanceBuilder::enable_layer(const char* layer_name) {
  699. if (!layer_name) return *this;
  700. info.layers.push_back(layer_name);
  701. return *this;
  702. }
  703. InstanceBuilder& InstanceBuilder::enable_extension(const char* extension_name) {
  704. if (!extension_name) return *this;
  705. info.extensions.push_back(extension_name);
  706. return *this;
  707. }
  708. InstanceBuilder& InstanceBuilder::enable_validation_layers(bool enable_validation) {
  709. info.enable_validation_layers = enable_validation;
  710. return *this;
  711. }
  712. InstanceBuilder& InstanceBuilder::request_validation_layers(bool enable_validation) {
  713. info.request_validation_layers = enable_validation;
  714. return *this;
  715. }
  716. InstanceBuilder& InstanceBuilder::use_default_debug_messenger() {
  717. info.use_debug_messenger = true;
  718. info.debug_callback = default_debug_callback;
  719. return *this;
  720. }
  721. InstanceBuilder& InstanceBuilder::set_debug_callback(PFN_vkDebugUtilsMessengerCallbackEXT callback) {
  722. info.use_debug_messenger = true;
  723. info.debug_callback = callback;
  724. return *this;
  725. }
  726. InstanceBuilder& InstanceBuilder::set_debug_callback_user_data_pointer(void* user_data_pointer) {
  727. info.debug_user_data_pointer = user_data_pointer;
  728. return *this;
  729. }
  730. InstanceBuilder& InstanceBuilder::set_headless(bool headless) {
  731. info.headless_context = headless;
  732. return *this;
  733. }
  734. InstanceBuilder& InstanceBuilder::set_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) {
  735. info.debug_message_severity = severity;
  736. return *this;
  737. }
  738. InstanceBuilder& InstanceBuilder::add_debug_messenger_severity(VkDebugUtilsMessageSeverityFlagsEXT severity) {
  739. info.debug_message_severity = info.debug_message_severity | severity;
  740. return *this;
  741. }
  742. InstanceBuilder& InstanceBuilder::set_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) {
  743. info.debug_message_type = type;
  744. return *this;
  745. }
  746. InstanceBuilder& InstanceBuilder::add_debug_messenger_type(VkDebugUtilsMessageTypeFlagsEXT type) {
  747. info.debug_message_type = info.debug_message_type | type;
  748. return *this;
  749. }
  750. InstanceBuilder& InstanceBuilder::add_validation_disable(VkValidationCheckEXT check) {
  751. info.disabled_validation_checks.push_back(check);
  752. return *this;
  753. }
  754. InstanceBuilder& InstanceBuilder::add_validation_feature_enable(VkValidationFeatureEnableEXT enable) {
  755. info.enabled_validation_features.push_back(enable);
  756. return *this;
  757. }
  758. InstanceBuilder& InstanceBuilder::add_validation_feature_disable(VkValidationFeatureDisableEXT disable) {
  759. info.disabled_validation_features.push_back(disable);
  760. return *this;
  761. }
  762. InstanceBuilder& InstanceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) {
  763. info.allocation_callbacks = callbacks;
  764. return *this;
  765. }
  766. void destroy_debug_messenger(VkInstance const instance, VkDebugUtilsMessengerEXT const messenger);
  767. // ---- Physical Device ---- //
  768. namespace detail {
  769. std::vector<std::string> check_device_extension_support(
  770. std::vector<std::string> const& available_extensions, std::vector<std::string> const& desired_extensions) {
  771. std::vector<std::string> extensions_to_enable;
  772. for (const auto& avail_ext : available_extensions) {
  773. for (auto& req_ext : desired_extensions) {
  774. if (avail_ext == req_ext) {
  775. extensions_to_enable.push_back(req_ext);
  776. break;
  777. }
  778. }
  779. }
  780. return extensions_to_enable;
  781. }
  782. // clang-format off
  783. bool supports_features(VkPhysicalDeviceFeatures supported,
  784. VkPhysicalDeviceFeatures requested,
  785. std::vector<GenericFeaturesPNextNode> const& extension_supported,
  786. std::vector<GenericFeaturesPNextNode> const& extension_requested) {
  787. if (requested.robustBufferAccess && !supported.robustBufferAccess) return false;
  788. if (requested.fullDrawIndexUint32 && !supported.fullDrawIndexUint32) return false;
  789. if (requested.imageCubeArray && !supported.imageCubeArray) return false;
  790. if (requested.independentBlend && !supported.independentBlend) return false;
  791. if (requested.geometryShader && !supported.geometryShader) return false;
  792. if (requested.tessellationShader && !supported.tessellationShader) return false;
  793. if (requested.sampleRateShading && !supported.sampleRateShading) return false;
  794. if (requested.dualSrcBlend && !supported.dualSrcBlend) return false;
  795. if (requested.logicOp && !supported.logicOp) return false;
  796. if (requested.multiDrawIndirect && !supported.multiDrawIndirect) return false;
  797. if (requested.drawIndirectFirstInstance && !supported.drawIndirectFirstInstance) return false;
  798. if (requested.depthClamp && !supported.depthClamp) return false;
  799. if (requested.depthBiasClamp && !supported.depthBiasClamp) return false;
  800. if (requested.fillModeNonSolid && !supported.fillModeNonSolid) return false;
  801. if (requested.depthBounds && !supported.depthBounds) return false;
  802. if (requested.wideLines && !supported.wideLines) return false;
  803. if (requested.largePoints && !supported.largePoints) return false;
  804. if (requested.alphaToOne && !supported.alphaToOne) return false;
  805. if (requested.multiViewport && !supported.multiViewport) return false;
  806. if (requested.samplerAnisotropy && !supported.samplerAnisotropy) return false;
  807. if (requested.textureCompressionETC2 && !supported.textureCompressionETC2) return false;
  808. if (requested.textureCompressionASTC_LDR && !supported.textureCompressionASTC_LDR) return false;
  809. if (requested.textureCompressionBC && !supported.textureCompressionBC) return false;
  810. if (requested.occlusionQueryPrecise && !supported.occlusionQueryPrecise) return false;
  811. if (requested.pipelineStatisticsQuery && !supported.pipelineStatisticsQuery) return false;
  812. if (requested.vertexPipelineStoresAndAtomics && !supported.vertexPipelineStoresAndAtomics) return false;
  813. if (requested.fragmentStoresAndAtomics && !supported.fragmentStoresAndAtomics) return false;
  814. if (requested.shaderTessellationAndGeometryPointSize && !supported.shaderTessellationAndGeometryPointSize) return false;
  815. if (requested.shaderImageGatherExtended && !supported.shaderImageGatherExtended) return false;
  816. if (requested.shaderStorageImageExtendedFormats && !supported.shaderStorageImageExtendedFormats) return false;
  817. if (requested.shaderStorageImageMultisample && !supported.shaderStorageImageMultisample) return false;
  818. if (requested.shaderStorageImageReadWithoutFormat && !supported.shaderStorageImageReadWithoutFormat) return false;
  819. if (requested.shaderStorageImageWriteWithoutFormat && !supported.shaderStorageImageWriteWithoutFormat) return false;
  820. if (requested.shaderUniformBufferArrayDynamicIndexing && !supported.shaderUniformBufferArrayDynamicIndexing) return false;
  821. if (requested.shaderSampledImageArrayDynamicIndexing && !supported.shaderSampledImageArrayDynamicIndexing) return false;
  822. if (requested.shaderStorageBufferArrayDynamicIndexing && !supported.shaderStorageBufferArrayDynamicIndexing) return false;
  823. if (requested.shaderStorageImageArrayDynamicIndexing && !supported.shaderStorageImageArrayDynamicIndexing) return false;
  824. if (requested.shaderClipDistance && !supported.shaderClipDistance) return false;
  825. if (requested.shaderCullDistance && !supported.shaderCullDistance) return false;
  826. if (requested.shaderFloat64 && !supported.shaderFloat64) return false;
  827. if (requested.shaderInt64 && !supported.shaderInt64) return false;
  828. if (requested.shaderInt16 && !supported.shaderInt16) return false;
  829. if (requested.shaderResourceResidency && !supported.shaderResourceResidency) return false;
  830. if (requested.shaderResourceMinLod && !supported.shaderResourceMinLod) return false;
  831. if (requested.sparseBinding && !supported.sparseBinding) return false;
  832. if (requested.sparseResidencyBuffer && !supported.sparseResidencyBuffer) return false;
  833. if (requested.sparseResidencyImage2D && !supported.sparseResidencyImage2D) return false;
  834. if (requested.sparseResidencyImage3D && !supported.sparseResidencyImage3D) return false;
  835. if (requested.sparseResidency2Samples && !supported.sparseResidency2Samples) return false;
  836. if (requested.sparseResidency4Samples && !supported.sparseResidency4Samples) return false;
  837. if (requested.sparseResidency8Samples && !supported.sparseResidency8Samples) return false;
  838. if (requested.sparseResidency16Samples && !supported.sparseResidency16Samples) return false;
  839. if (requested.sparseResidencyAliased && !supported.sparseResidencyAliased) return false;
  840. if (requested.variableMultisampleRate && !supported.variableMultisampleRate) return false;
  841. if (requested.inheritedQueries && !supported.inheritedQueries) return false;
  842. for(size_t i = 0; i < extension_requested.size(); ++i) {
  843. auto res = GenericFeaturesPNextNode::match(extension_requested[i], extension_supported[i]);
  844. if(!res) return false;
  845. }
  846. return true;
  847. }
  848. // clang-format on
  849. // Finds the first queue which supports the desired operations. Returns QUEUE_INDEX_MAX_VALUE if none is found
  850. uint32_t get_first_queue_index(std::vector<VkQueueFamilyProperties> const& families, VkQueueFlags desired_flags) {
  851. for (uint32_t i = 0; i < static_cast<uint32_t>(families.size()); i++) {
  852. if ((families[i].queueFlags & desired_flags) == desired_flags) return i;
  853. }
  854. return QUEUE_INDEX_MAX_VALUE;
  855. }
  856. // Finds the queue which is separate from the graphics queue and has the desired flag and not the
  857. // undesired flag, but will select it if no better options are available compute support. Returns
  858. // QUEUE_INDEX_MAX_VALUE if none is found.
  859. uint32_t get_separate_queue_index(
  860. std::vector<VkQueueFamilyProperties> const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) {
  861. uint32_t index = QUEUE_INDEX_MAX_VALUE;
  862. for (uint32_t i = 0; i < static_cast<uint32_t>(families.size()); i++) {
  863. if ((families[i].queueFlags & desired_flags) == desired_flags && ((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) {
  864. if ((families[i].queueFlags & undesired_flags) == 0) {
  865. return i;
  866. } else {
  867. index = i;
  868. }
  869. }
  870. }
  871. return index;
  872. }
  873. // finds the first queue which supports only the desired flag (not graphics or transfer). Returns QUEUE_INDEX_MAX_VALUE if none is found.
  874. uint32_t get_dedicated_queue_index(
  875. std::vector<VkQueueFamilyProperties> const& families, VkQueueFlags desired_flags, VkQueueFlags undesired_flags) {
  876. for (uint32_t i = 0; i < static_cast<uint32_t>(families.size()); i++) {
  877. if ((families[i].queueFlags & desired_flags) == desired_flags &&
  878. (families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && (families[i].queueFlags & undesired_flags) == 0)
  879. return i;
  880. }
  881. return QUEUE_INDEX_MAX_VALUE;
  882. }
  883. // finds the first queue which supports presenting. returns QUEUE_INDEX_MAX_VALUE if none is found
  884. uint32_t get_present_queue_index(
  885. VkPhysicalDevice const phys_device, VkSurfaceKHR const surface, std::vector<VkQueueFamilyProperties> const& families) {
  886. for (uint32_t i = 0; i < static_cast<uint32_t>(families.size()); i++) {
  887. VkBool32 presentSupport = false;
  888. if (surface != VK_NULL_HANDLE) {
  889. VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceSupportKHR(phys_device, i, surface, &presentSupport);
  890. if (res != VK_SUCCESS) return QUEUE_INDEX_MAX_VALUE; // TODO: determine if this should fail another way
  891. }
  892. if (presentSupport == VK_TRUE) return i;
  893. }
  894. return QUEUE_INDEX_MAX_VALUE;
  895. }
  896. } // namespace detail
  897. PhysicalDevice PhysicalDeviceSelector::populate_device_details(VkPhysicalDevice vk_phys_device,
  898. std::vector<detail::GenericFeaturesPNextNode> const& src_extended_features_chain) const {
  899. PhysicalDevice physical_device{};
  900. physical_device.physical_device = vk_phys_device;
  901. physical_device.surface = instance_info.surface;
  902. physical_device.defer_surface_initialization = criteria.defer_surface_initialization;
  903. physical_device.instance_version = instance_info.version;
  904. auto queue_families = detail::get_vector_noerror<VkQueueFamilyProperties>(
  905. detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, vk_phys_device);
  906. physical_device.queue_families = queue_families;
  907. detail::vulkan_functions().fp_vkGetPhysicalDeviceProperties(vk_phys_device, &physical_device.properties);
  908. detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures(vk_phys_device, &physical_device.features);
  909. detail::vulkan_functions().fp_vkGetPhysicalDeviceMemoryProperties(vk_phys_device, &physical_device.memory_properties);
  910. physical_device.name = physical_device.properties.deviceName;
  911. std::vector<VkExtensionProperties> available_extensions;
  912. auto available_extensions_ret = detail::get_vector<VkExtensionProperties>(
  913. available_extensions, detail::vulkan_functions().fp_vkEnumerateDeviceExtensionProperties, vk_phys_device, nullptr);
  914. if (available_extensions_ret != VK_SUCCESS) return physical_device;
  915. for (const auto& ext : available_extensions) {
  916. physical_device.extensions.push_back(&ext.extensionName[0]);
  917. }
  918. physical_device.features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; // same value as the non-KHR version
  919. physical_device.properties2_ext_enabled = instance_info.properties2_ext_enabled;
  920. auto fill_chain = src_extended_features_chain;
  921. bool instance_is_1_1 = instance_info.version >= VKB_VK_API_VERSION_1_1;
  922. if (!fill_chain.empty() && (instance_is_1_1 || instance_info.properties2_ext_enabled)) {
  923. detail::GenericFeaturesPNextNode* prev = nullptr;
  924. for (auto& extension : fill_chain) {
  925. if (prev != nullptr) {
  926. prev->pNext = &extension;
  927. }
  928. prev = &extension;
  929. }
  930. VkPhysicalDeviceFeatures2 local_features{};
  931. local_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; // KHR is same as core here
  932. local_features.pNext = &fill_chain.front();
  933. // Use KHR function if not able to use the core function
  934. if (instance_is_1_1) {
  935. detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2(vk_phys_device, &local_features);
  936. } else {
  937. detail::vulkan_functions().fp_vkGetPhysicalDeviceFeatures2KHR(vk_phys_device, &local_features);
  938. }
  939. physical_device.extended_features_chain = fill_chain;
  940. }
  941. return physical_device;
  942. }
  943. PhysicalDevice::Suitable PhysicalDeviceSelector::is_device_suitable(PhysicalDevice const& pd) const {
  944. PhysicalDevice::Suitable suitable = PhysicalDevice::Suitable::yes;
  945. if (criteria.name.size() > 0 && criteria.name != pd.properties.deviceName) return PhysicalDevice::Suitable::no;
  946. if (criteria.required_version > pd.properties.apiVersion) return PhysicalDevice::Suitable::no;
  947. if (criteria.desired_version > pd.properties.apiVersion) suitable = PhysicalDevice::Suitable::partial;
  948. bool dedicated_compute = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) !=
  949. detail::QUEUE_INDEX_MAX_VALUE;
  950. bool dedicated_transfer = detail::get_dedicated_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) !=
  951. detail::QUEUE_INDEX_MAX_VALUE;
  952. bool separate_compute = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) !=
  953. detail::QUEUE_INDEX_MAX_VALUE;
  954. bool separate_transfer = detail::get_separate_queue_index(pd.queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) !=
  955. detail::QUEUE_INDEX_MAX_VALUE;
  956. bool present_queue = detail::get_present_queue_index(pd.physical_device, instance_info.surface, pd.queue_families) !=
  957. detail::QUEUE_INDEX_MAX_VALUE;
  958. if (criteria.require_dedicated_compute_queue && !dedicated_compute) return PhysicalDevice::Suitable::no;
  959. if (criteria.require_dedicated_transfer_queue && !dedicated_transfer) return PhysicalDevice::Suitable::no;
  960. if (criteria.require_separate_compute_queue && !separate_compute) return PhysicalDevice::Suitable::no;
  961. if (criteria.require_separate_transfer_queue && !separate_transfer) return PhysicalDevice::Suitable::no;
  962. if (criteria.require_present && !present_queue && !criteria.defer_surface_initialization)
  963. return PhysicalDevice::Suitable::no;
  964. auto required_extensions_supported = detail::check_device_extension_support(pd.extensions, criteria.required_extensions);
  965. if (required_extensions_supported.size() != criteria.required_extensions.size())
  966. return PhysicalDevice::Suitable::no;
  967. auto desired_extensions_supported = detail::check_device_extension_support(pd.extensions, criteria.desired_extensions);
  968. if (desired_extensions_supported.size() != criteria.desired_extensions.size())
  969. suitable = PhysicalDevice::Suitable::partial;
  970. if (!criteria.defer_surface_initialization && criteria.require_present) {
  971. std::vector<VkSurfaceFormatKHR> formats;
  972. std::vector<VkPresentModeKHR> present_modes;
  973. auto formats_ret = detail::get_vector<VkSurfaceFormatKHR>(formats,
  974. detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR,
  975. pd.physical_device,
  976. instance_info.surface);
  977. auto present_modes_ret = detail::get_vector<VkPresentModeKHR>(present_modes,
  978. detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR,
  979. pd.physical_device,
  980. instance_info.surface);
  981. if (formats_ret != VK_SUCCESS || present_modes_ret != VK_SUCCESS || formats.empty() || present_modes.empty()) {
  982. return PhysicalDevice::Suitable::no;
  983. }
  984. }
  985. if (!criteria.allow_any_type && pd.properties.deviceType != static_cast<VkPhysicalDeviceType>(criteria.preferred_type)) {
  986. suitable = PhysicalDevice::Suitable::partial;
  987. }
  988. bool required_features_supported = detail::supports_features(
  989. pd.features, criteria.required_features, pd.extended_features_chain, criteria.extended_features_chain);
  990. if (!required_features_supported) return PhysicalDevice::Suitable::no;
  991. for (uint32_t i = 0; i < pd.memory_properties.memoryHeapCount; i++) {
  992. if (pd.memory_properties.memoryHeaps[i].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) {
  993. if (pd.memory_properties.memoryHeaps[i].size < criteria.required_mem_size) {
  994. return PhysicalDevice::Suitable::no;
  995. } else if (pd.memory_properties.memoryHeaps[i].size < criteria.desired_mem_size) {
  996. suitable = PhysicalDevice::Suitable::partial;
  997. }
  998. }
  999. }
  1000. return suitable;
  1001. }
  1002. // delegate construction to the one with an explicit surface parameter
  1003. PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance)
  1004. : PhysicalDeviceSelector(instance, VK_NULL_HANDLE) {}
  1005. PhysicalDeviceSelector::PhysicalDeviceSelector(Instance const& instance, VkSurfaceKHR surface) {
  1006. instance_info.instance = instance.instance;
  1007. instance_info.version = instance.instance_version;
  1008. instance_info.properties2_ext_enabled = instance.properties2_ext_enabled;
  1009. instance_info.surface = surface;
  1010. criteria.require_present = !instance.headless;
  1011. criteria.required_version = instance.api_version;
  1012. criteria.desired_version = instance.api_version;
  1013. }
  1014. Result<std::vector<PhysicalDevice>> PhysicalDeviceSelector::select_impl(DeviceSelectionMode selection) const {
  1015. #if !defined(NDEBUG)
  1016. // Validation
  1017. for (const auto& node : criteria.extended_features_chain) {
  1018. assert(node.sType != static_cast<VkStructureType>(0) &&
  1019. "Features struct sType must be filled with the struct's "
  1020. "corresponding VkStructureType enum");
  1021. assert(node.sType != VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 &&
  1022. "Do not pass VkPhysicalDeviceFeatures2 as a required extension feature structure. An "
  1023. "instance of this is managed internally for selection criteria and device creation.");
  1024. }
  1025. #endif
  1026. if (criteria.require_present && !criteria.defer_surface_initialization) {
  1027. if (instance_info.surface == VK_NULL_HANDLE)
  1028. return Result<std::vector<PhysicalDevice>>{ PhysicalDeviceError::no_surface_provided };
  1029. }
  1030. // Get the VkPhysicalDevice handles on the system
  1031. std::vector<VkPhysicalDevice> vk_physical_devices;
  1032. auto vk_physical_devices_ret = detail::get_vector<VkPhysicalDevice>(
  1033. vk_physical_devices, detail::vulkan_functions().fp_vkEnumeratePhysicalDevices, instance_info.instance);
  1034. if (vk_physical_devices_ret != VK_SUCCESS) {
  1035. return Result<std::vector<PhysicalDevice>>{ PhysicalDeviceError::failed_enumerate_physical_devices, vk_physical_devices_ret };
  1036. }
  1037. if (vk_physical_devices.size() == 0) {
  1038. return Result<std::vector<PhysicalDevice>>{ PhysicalDeviceError::no_physical_devices_found };
  1039. }
  1040. auto fill_out_phys_dev_with_criteria = [&](PhysicalDevice& phys_dev) {
  1041. phys_dev.features = criteria.required_features;
  1042. phys_dev.extended_features_chain = criteria.extended_features_chain;
  1043. bool portability_ext_available = false;
  1044. for (const auto& ext : phys_dev.extensions)
  1045. if (criteria.enable_portability_subset && ext == "VK_KHR_portability_subset")
  1046. portability_ext_available = true;
  1047. auto desired_extensions_supported = detail::check_device_extension_support(phys_dev.extensions, criteria.desired_extensions);
  1048. phys_dev.extensions.clear();
  1049. phys_dev.extensions.insert(
  1050. phys_dev.extensions.end(), criteria.required_extensions.begin(), criteria.required_extensions.end());
  1051. phys_dev.extensions.insert(
  1052. phys_dev.extensions.end(), desired_extensions_supported.begin(), desired_extensions_supported.end());
  1053. if (portability_ext_available) {
  1054. phys_dev.extensions.push_back("VK_KHR_portability_subset");
  1055. }
  1056. };
  1057. // if this option is set, always return only the first physical device found
  1058. if (criteria.use_first_gpu_unconditionally && vk_physical_devices.size() > 0) {
  1059. PhysicalDevice physical_device = populate_device_details(vk_physical_devices[0], criteria.extended_features_chain);
  1060. fill_out_phys_dev_with_criteria(physical_device);
  1061. return std::vector<PhysicalDevice>{ physical_device };
  1062. }
  1063. // Populate their details and check their suitability
  1064. std::vector<PhysicalDevice> physical_devices;
  1065. for (auto& vk_physical_device : vk_physical_devices) {
  1066. PhysicalDevice phys_dev = populate_device_details(vk_physical_device, criteria.extended_features_chain);
  1067. phys_dev.suitable = is_device_suitable(phys_dev);
  1068. if (phys_dev.suitable != PhysicalDevice::Suitable::no) {
  1069. physical_devices.push_back(phys_dev);
  1070. }
  1071. }
  1072. // sort the list into fully and partially suitable devices. use stable_partition to maintain relative order
  1073. const auto partition_index = std::stable_partition(physical_devices.begin(), physical_devices.end(), [](auto const& pd) {
  1074. return pd.suitable == PhysicalDevice::Suitable::yes;
  1075. });
  1076. // Remove the partially suitable elements if they aren't desired
  1077. if (selection == DeviceSelectionMode::only_fully_suitable) {
  1078. physical_devices.erase(partition_index, physical_devices.end());
  1079. }
  1080. // Make the physical device ready to be used to create a Device from it
  1081. for (auto& physical_device : physical_devices) {
  1082. fill_out_phys_dev_with_criteria(physical_device);
  1083. }
  1084. return physical_devices;
  1085. }
  1086. Result<PhysicalDevice> PhysicalDeviceSelector::select(DeviceSelectionMode selection) const {
  1087. auto const selected_devices = select_impl(selection);
  1088. if (!selected_devices) return Result<PhysicalDevice>{ selected_devices.error() };
  1089. if (selected_devices.value().size() == 0) {
  1090. return Result<PhysicalDevice>{ PhysicalDeviceError::no_suitable_device };
  1091. }
  1092. return selected_devices.value().at(0);
  1093. }
  1094. // Return all devices which are considered suitable - intended for applications which want to let the user pick the physical device
  1095. Result<std::vector<PhysicalDevice>> PhysicalDeviceSelector::select_devices(DeviceSelectionMode selection) const {
  1096. auto const selected_devices = select_impl(selection);
  1097. if (!selected_devices) return Result<std::vector<PhysicalDevice>>{ selected_devices.error() };
  1098. if (selected_devices.value().size() == 0) {
  1099. return Result<std::vector<PhysicalDevice>>{ PhysicalDeviceError::no_suitable_device };
  1100. }
  1101. return selected_devices.value();
  1102. }
  1103. Result<std::vector<std::string>> PhysicalDeviceSelector::select_device_names(DeviceSelectionMode selection) const {
  1104. auto const selected_devices = select_impl(selection);
  1105. if (!selected_devices) return Result<std::vector<std::string>>{ selected_devices.error() };
  1106. if (selected_devices.value().size() == 0) {
  1107. return Result<std::vector<std::string>>{ PhysicalDeviceError::no_suitable_device };
  1108. }
  1109. std::vector<std::string> names;
  1110. for (const auto& pd : selected_devices.value()) {
  1111. names.push_back(pd.name);
  1112. }
  1113. return names;
  1114. }
  1115. PhysicalDeviceSelector& PhysicalDeviceSelector::set_surface(VkSurfaceKHR surface) {
  1116. instance_info.surface = surface;
  1117. return *this;
  1118. }
  1119. PhysicalDeviceSelector& PhysicalDeviceSelector::set_name(std::string const& name) {
  1120. criteria.name = name;
  1121. return *this;
  1122. }
  1123. PhysicalDeviceSelector& PhysicalDeviceSelector::prefer_gpu_device_type(PreferredDeviceType type) {
  1124. criteria.preferred_type = type;
  1125. return *this;
  1126. }
  1127. PhysicalDeviceSelector& PhysicalDeviceSelector::allow_any_gpu_device_type(bool allow_any_type) {
  1128. criteria.allow_any_type = allow_any_type;
  1129. return *this;
  1130. }
  1131. PhysicalDeviceSelector& PhysicalDeviceSelector::require_present(bool require) {
  1132. criteria.require_present = require;
  1133. return *this;
  1134. }
  1135. PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_transfer_queue() {
  1136. criteria.require_dedicated_transfer_queue = true;
  1137. return *this;
  1138. }
  1139. PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_compute_queue() {
  1140. criteria.require_dedicated_compute_queue = true;
  1141. return *this;
  1142. }
  1143. PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_transfer_queue() {
  1144. criteria.require_separate_transfer_queue = true;
  1145. return *this;
  1146. }
  1147. PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_compute_queue() {
  1148. criteria.require_separate_compute_queue = true;
  1149. return *this;
  1150. }
  1151. PhysicalDeviceSelector& PhysicalDeviceSelector::required_device_memory_size(VkDeviceSize size) {
  1152. criteria.required_mem_size = size;
  1153. return *this;
  1154. }
  1155. PhysicalDeviceSelector& PhysicalDeviceSelector::desired_device_memory_size(VkDeviceSize size) {
  1156. criteria.desired_mem_size = size;
  1157. return *this;
  1158. }
  1159. PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extension(const char* extension) {
  1160. criteria.required_extensions.push_back(extension);
  1161. return *this;
  1162. }
  1163. PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions(std::vector<const char*> extensions) {
  1164. for (const auto& ext : extensions) {
  1165. criteria.required_extensions.push_back(ext);
  1166. }
  1167. return *this;
  1168. }
  1169. PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extension(const char* extension) {
  1170. criteria.desired_extensions.push_back(extension);
  1171. return *this;
  1172. }
  1173. PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extensions(std::vector<const char*> extensions) {
  1174. for (const auto& ext : extensions) {
  1175. criteria.desired_extensions.push_back(ext);
  1176. }
  1177. return *this;
  1178. }
  1179. PhysicalDeviceSelector& PhysicalDeviceSelector::set_minimum_version(uint32_t major, uint32_t minor) {
  1180. criteria.required_version = VKB_MAKE_VK_VERSION(0, major, minor, 0);
  1181. return *this;
  1182. }
  1183. PhysicalDeviceSelector& PhysicalDeviceSelector::set_desired_version(uint32_t major, uint32_t minor) {
  1184. criteria.desired_version = VKB_MAKE_VK_VERSION(0, major, minor, 0);
  1185. return *this;
  1186. }
  1187. PhysicalDeviceSelector& PhysicalDeviceSelector::disable_portability_subset() {
  1188. criteria.enable_portability_subset = false;
  1189. return *this;
  1190. }
  1191. PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features(VkPhysicalDeviceFeatures const& features) {
  1192. criteria.required_features = features;
  1193. return *this;
  1194. }
  1195. #if defined(VKB_VK_API_VERSION_1_2)
  1196. // Just calls add_required_features
  1197. PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_11(VkPhysicalDeviceVulkan11Features features_11) {
  1198. features_11.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
  1199. add_required_extension_features(features_11);
  1200. return *this;
  1201. }
  1202. PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_12(VkPhysicalDeviceVulkan12Features features_12) {
  1203. features_12.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
  1204. add_required_extension_features(features_12);
  1205. return *this;
  1206. }
  1207. #endif
  1208. #if defined(VKB_VK_API_VERSION_1_3)
  1209. PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features_13(VkPhysicalDeviceVulkan13Features features_13) {
  1210. features_13.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES;
  1211. add_required_extension_features(features_13);
  1212. return *this;
  1213. }
  1214. #endif
  1215. PhysicalDeviceSelector& PhysicalDeviceSelector::defer_surface_initialization() {
  1216. criteria.defer_surface_initialization = true;
  1217. return *this;
  1218. }
  1219. PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditionally(bool unconditionally) {
  1220. criteria.use_first_gpu_unconditionally = unconditionally;
  1221. return *this;
  1222. }
  1223. // PhysicalDevice
  1224. bool PhysicalDevice::has_dedicated_compute_queue() const {
  1225. return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE;
  1226. }
  1227. bool PhysicalDevice::has_separate_compute_queue() const {
  1228. return detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT) != detail::QUEUE_INDEX_MAX_VALUE;
  1229. }
  1230. bool PhysicalDevice::has_dedicated_transfer_queue() const {
  1231. return detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE;
  1232. }
  1233. bool PhysicalDevice::has_separate_transfer_queue() const {
  1234. return detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT) != detail::QUEUE_INDEX_MAX_VALUE;
  1235. }
  1236. std::vector<VkQueueFamilyProperties> PhysicalDevice::get_queue_families() const { return queue_families; }
  1237. std::vector<std::string> PhysicalDevice::get_extensions() const { return extensions; }
  1238. PhysicalDevice::operator VkPhysicalDevice() const { return this->physical_device; }
  1239. // ---- Queues ---- //
  1240. Result<uint32_t> Device::get_queue_index(QueueType type) const {
  1241. uint32_t index = detail::QUEUE_INDEX_MAX_VALUE;
  1242. switch (type) {
  1243. case QueueType::present:
  1244. index = detail::get_present_queue_index(physical_device.physical_device, surface, queue_families);
  1245. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::present_unavailable };
  1246. break;
  1247. case QueueType::graphics:
  1248. index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT);
  1249. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::graphics_unavailable };
  1250. break;
  1251. case QueueType::compute:
  1252. index = detail::get_separate_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT);
  1253. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::compute_unavailable };
  1254. break;
  1255. case QueueType::transfer:
  1256. index = detail::get_separate_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT);
  1257. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::transfer_unavailable };
  1258. break;
  1259. default:
  1260. return Result<uint32_t>{ QueueError::invalid_queue_family_index };
  1261. }
  1262. return index;
  1263. }
  1264. Result<uint32_t> Device::get_dedicated_queue_index(QueueType type) const {
  1265. uint32_t index = detail::QUEUE_INDEX_MAX_VALUE;
  1266. switch (type) {
  1267. case QueueType::compute:
  1268. index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_COMPUTE_BIT, VK_QUEUE_TRANSFER_BIT);
  1269. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::compute_unavailable };
  1270. break;
  1271. case QueueType::transfer:
  1272. index = detail::get_dedicated_queue_index(queue_families, VK_QUEUE_TRANSFER_BIT, VK_QUEUE_COMPUTE_BIT);
  1273. if (index == detail::QUEUE_INDEX_MAX_VALUE) return Result<uint32_t>{ QueueError::transfer_unavailable };
  1274. break;
  1275. default:
  1276. return Result<uint32_t>{ QueueError::invalid_queue_family_index };
  1277. }
  1278. return index;
  1279. }
  1280. Result<VkQueue> Device::get_queue(QueueType type) const {
  1281. auto index = get_queue_index(type);
  1282. if (!index.has_value()) return { index.error() };
  1283. VkQueue out_queue;
  1284. internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue);
  1285. return out_queue;
  1286. }
  1287. Result<VkQueue> Device::get_dedicated_queue(QueueType type) const {
  1288. auto index = get_dedicated_queue_index(type);
  1289. if (!index.has_value()) return { index.error() };
  1290. VkQueue out_queue;
  1291. internal_table.fp_vkGetDeviceQueue(device, index.value(), 0, &out_queue);
  1292. return out_queue;
  1293. }
  1294. // ---- Dispatch ---- //
  1295. DispatchTable Device::make_table() const { return { device, fp_vkGetDeviceProcAddr }; }
  1296. // ---- Device ---- //
  1297. Device::operator VkDevice() const { return this->device; }
  1298. CustomQueueDescription::CustomQueueDescription(uint32_t index, uint32_t count, std::vector<float> priorities)
  1299. : index(index), count(count), priorities(priorities) {
  1300. assert(count == priorities.size());
  1301. }
  1302. void destroy_device(Device device) {
  1303. device.internal_table.fp_vkDestroyDevice(device.device, device.allocation_callbacks);
  1304. }
  1305. DeviceBuilder::DeviceBuilder(PhysicalDevice phys_device) { physical_device = phys_device; }
  1306. Result<Device> DeviceBuilder::build() const {
  1307. std::vector<CustomQueueDescription> queue_descriptions;
  1308. queue_descriptions.insert(queue_descriptions.end(), info.queue_descriptions.begin(), info.queue_descriptions.end());
  1309. if (queue_descriptions.size() == 0) {
  1310. for (uint32_t i = 0; i < physical_device.queue_families.size(); i++) {
  1311. queue_descriptions.push_back(CustomQueueDescription{ i, 1, std::vector<float>{ 1.0f } });
  1312. }
  1313. }
  1314. std::vector<VkDeviceQueueCreateInfo> queueCreateInfos;
  1315. for (auto& desc : queue_descriptions) {
  1316. VkDeviceQueueCreateInfo queue_create_info = {};
  1317. queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  1318. queue_create_info.queueFamilyIndex = desc.index;
  1319. queue_create_info.queueCount = desc.count;
  1320. queue_create_info.pQueuePriorities = desc.priorities.data();
  1321. queueCreateInfos.push_back(queue_create_info);
  1322. }
  1323. std::vector<const char*> extensions;
  1324. for (const auto& ext : physical_device.extensions) {
  1325. extensions.push_back(ext.c_str());
  1326. }
  1327. if (physical_device.surface != VK_NULL_HANDLE || physical_device.defer_surface_initialization)
  1328. extensions.push_back({ VK_KHR_SWAPCHAIN_EXTENSION_NAME });
  1329. std::vector<VkBaseOutStructure*> final_pnext_chain;
  1330. VkDeviceCreateInfo device_create_info = {};
  1331. bool user_defined_phys_dev_features_2 = false;
  1332. for (auto& pnext : info.pNext_chain) {
  1333. if (pnext->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2) {
  1334. user_defined_phys_dev_features_2 = true;
  1335. break;
  1336. }
  1337. }
  1338. if (user_defined_phys_dev_features_2 && physical_device.extended_features_chain.size() > 0) {
  1339. return { DeviceError::VkPhysicalDeviceFeatures2_in_pNext_chain_while_using_add_required_extension_features };
  1340. }
  1341. // These objects must be alive during the call to vkCreateDevice
  1342. auto physical_device_extension_features_copy = physical_device.extended_features_chain;
  1343. VkPhysicalDeviceFeatures2 local_features2{};
  1344. local_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  1345. if (!user_defined_phys_dev_features_2) {
  1346. if (physical_device.instance_version >= VKB_VK_API_VERSION_1_1 || physical_device.properties2_ext_enabled) {
  1347. local_features2.features = physical_device.features;
  1348. final_pnext_chain.push_back(reinterpret_cast<VkBaseOutStructure*>(&local_features2));
  1349. for (auto& features_node : physical_device_extension_features_copy) {
  1350. final_pnext_chain.push_back(reinterpret_cast<VkBaseOutStructure*>(&features_node));
  1351. }
  1352. } else {
  1353. // Only set device_create_info.pEnabledFeatures when the pNext chain does not contain a VkPhysicalDeviceFeatures2 structure
  1354. device_create_info.pEnabledFeatures = &physical_device.features;
  1355. }
  1356. }
  1357. for (auto& pnext : info.pNext_chain) {
  1358. final_pnext_chain.push_back(pnext);
  1359. }
  1360. detail::setup_pNext_chain(device_create_info, final_pnext_chain);
  1361. #if !defined(NDEBUG)
  1362. for (auto& node : final_pnext_chain) {
  1363. assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO);
  1364. }
  1365. #endif
  1366. device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  1367. device_create_info.flags = info.flags;
  1368. device_create_info.queueCreateInfoCount = static_cast<uint32_t>(queueCreateInfos.size());
  1369. device_create_info.pQueueCreateInfos = queueCreateInfos.data();
  1370. device_create_info.enabledExtensionCount = static_cast<uint32_t>(extensions.size());
  1371. device_create_info.ppEnabledExtensionNames = extensions.data();
  1372. Device device;
  1373. VkResult res = detail::vulkan_functions().fp_vkCreateDevice(
  1374. physical_device.physical_device, &device_create_info, info.allocation_callbacks, &device.device);
  1375. if (res != VK_SUCCESS) {
  1376. return { DeviceError::failed_create_device, res };
  1377. }
  1378. device.physical_device = physical_device;
  1379. device.surface = physical_device.surface;
  1380. device.queue_families = physical_device.queue_families;
  1381. device.allocation_callbacks = info.allocation_callbacks;
  1382. device.fp_vkGetDeviceProcAddr = detail::vulkan_functions().fp_vkGetDeviceProcAddr;
  1383. detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkGetDeviceQueue, "vkGetDeviceQueue");
  1384. detail::vulkan_functions().get_device_proc_addr(device.device, device.internal_table.fp_vkDestroyDevice, "vkDestroyDevice");
  1385. device.instance_version = physical_device.instance_version;
  1386. return device;
  1387. }
  1388. DeviceBuilder& DeviceBuilder::custom_queue_setup(std::vector<CustomQueueDescription> queue_descriptions) {
  1389. info.queue_descriptions = queue_descriptions;
  1390. return *this;
  1391. }
  1392. DeviceBuilder& DeviceBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) {
  1393. info.allocation_callbacks = callbacks;
  1394. return *this;
  1395. }
  1396. // ---- Swapchain ---- //
  1397. namespace detail {
  1398. struct SurfaceSupportDetails {
  1399. VkSurfaceCapabilitiesKHR capabilities;
  1400. std::vector<VkSurfaceFormatKHR> formats;
  1401. std::vector<VkPresentModeKHR> present_modes;
  1402. };
  1403. enum class SurfaceSupportError {
  1404. surface_handle_null,
  1405. failed_get_surface_capabilities,
  1406. failed_enumerate_surface_formats,
  1407. failed_enumerate_present_modes,
  1408. no_suitable_desired_format
  1409. };
  1410. struct SurfaceSupportErrorCategory : std::error_category {
  1411. const char* name() const noexcept override { return "vbk_surface_support"; }
  1412. std::string message(int err) const override {
  1413. switch (static_cast<SurfaceSupportError>(err)) {
  1414. CASE_TO_STRING(SurfaceSupportError, surface_handle_null)
  1415. CASE_TO_STRING(SurfaceSupportError, failed_get_surface_capabilities)
  1416. CASE_TO_STRING(SurfaceSupportError, failed_enumerate_surface_formats)
  1417. CASE_TO_STRING(SurfaceSupportError, failed_enumerate_present_modes)
  1418. CASE_TO_STRING(SurfaceSupportError, no_suitable_desired_format)
  1419. default:
  1420. return "";
  1421. }
  1422. }
  1423. };
  1424. const SurfaceSupportErrorCategory surface_support_error_category;
  1425. std::error_code make_error_code(SurfaceSupportError surface_support_error) {
  1426. return { static_cast<int>(surface_support_error), detail::surface_support_error_category };
  1427. }
  1428. Result<SurfaceSupportDetails> query_surface_support_details(VkPhysicalDevice phys_device, VkSurfaceKHR surface) {
  1429. if (surface == VK_NULL_HANDLE) return make_error_code(SurfaceSupportError::surface_handle_null);
  1430. VkSurfaceCapabilitiesKHR capabilities;
  1431. VkResult res = detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities);
  1432. if (res != VK_SUCCESS) {
  1433. return { make_error_code(SurfaceSupportError::failed_get_surface_capabilities), res };
  1434. }
  1435. std::vector<VkSurfaceFormatKHR> formats;
  1436. std::vector<VkPresentModeKHR> present_modes;
  1437. auto formats_ret = detail::get_vector<VkSurfaceFormatKHR>(
  1438. formats, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, phys_device, surface);
  1439. if (formats_ret != VK_SUCCESS)
  1440. return { make_error_code(SurfaceSupportError::failed_enumerate_surface_formats), formats_ret };
  1441. auto present_modes_ret = detail::get_vector<VkPresentModeKHR>(
  1442. present_modes, detail::vulkan_functions().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, phys_device, surface);
  1443. if (present_modes_ret != VK_SUCCESS)
  1444. return { make_error_code(SurfaceSupportError::failed_enumerate_present_modes), present_modes_ret };
  1445. return SurfaceSupportDetails{ capabilities, formats, present_modes };
  1446. }
  1447. Result<VkSurfaceFormatKHR> find_desired_surface_format(
  1448. std::vector<VkSurfaceFormatKHR> const& available_formats, std::vector<VkSurfaceFormatKHR> const& desired_formats) {
  1449. for (auto const& desired_format : desired_formats) {
  1450. for (auto const& available_format : available_formats) {
  1451. // finds the first format that is desired and available
  1452. if (desired_format.format == available_format.format && desired_format.colorSpace == available_format.colorSpace) {
  1453. return desired_format;
  1454. }
  1455. }
  1456. }
  1457. // if no desired format is available, we report that no format is suitable to the user request
  1458. return { make_error_code(SurfaceSupportError::no_suitable_desired_format) };
  1459. }
  1460. VkSurfaceFormatKHR find_best_surface_format(
  1461. std::vector<VkSurfaceFormatKHR> const& available_formats, std::vector<VkSurfaceFormatKHR> const& desired_formats) {
  1462. auto surface_format_ret = detail::find_desired_surface_format(available_formats, desired_formats);
  1463. if (surface_format_ret.has_value()) return surface_format_ret.value();
  1464. // use the first available format as a fallback if any desired formats aren't found
  1465. return available_formats[0];
  1466. }
  1467. VkPresentModeKHR find_present_mode(std::vector<VkPresentModeKHR> const& available_resent_modes,
  1468. std::vector<VkPresentModeKHR> const& desired_present_modes) {
  1469. for (auto const& desired_pm : desired_present_modes) {
  1470. for (auto const& available_pm : available_resent_modes) {
  1471. // finds the first present mode that is desired and available
  1472. if (desired_pm == available_pm) return desired_pm;
  1473. }
  1474. }
  1475. // only present mode required, use as a fallback
  1476. return VK_PRESENT_MODE_FIFO_KHR;
  1477. }
  1478. template <typename T> T minimum(T a, T b) { return a < b ? a : b; }
  1479. template <typename T> T maximum(T a, T b) { return a > b ? a : b; }
  1480. VkExtent2D find_extent(VkSurfaceCapabilitiesKHR const& capabilities, uint32_t desired_width, uint32_t desired_height) {
  1481. if (capabilities.currentExtent.width != UINT32_MAX) {
  1482. return capabilities.currentExtent;
  1483. } else {
  1484. VkExtent2D actualExtent = { desired_width, desired_height };
  1485. actualExtent.width =
  1486. maximum(capabilities.minImageExtent.width, minimum(capabilities.maxImageExtent.width, actualExtent.width));
  1487. actualExtent.height =
  1488. maximum(capabilities.minImageExtent.height, minimum(capabilities.maxImageExtent.height, actualExtent.height));
  1489. return actualExtent;
  1490. }
  1491. }
  1492. } // namespace detail
  1493. void destroy_swapchain(Swapchain const& swapchain) {
  1494. if (swapchain.device != VK_NULL_HANDLE && swapchain.swapchain != VK_NULL_HANDLE) {
  1495. swapchain.internal_table.fp_vkDestroySwapchainKHR(swapchain.device, swapchain.swapchain, swapchain.allocation_callbacks);
  1496. }
  1497. }
  1498. SwapchainBuilder::SwapchainBuilder(Device const& device) {
  1499. info.physical_device = device.physical_device.physical_device;
  1500. info.device = device.device;
  1501. info.surface = device.surface;
  1502. info.instance_version = device.instance_version;
  1503. auto present = device.get_queue_index(QueueType::present);
  1504. auto graphics = device.get_queue_index(QueueType::graphics);
  1505. assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid");
  1506. info.graphics_queue_index = present.value();
  1507. info.present_queue_index = graphics.value();
  1508. info.allocation_callbacks = device.allocation_callbacks;
  1509. }
  1510. SwapchainBuilder::SwapchainBuilder(Device const& device, VkSurfaceKHR const surface) {
  1511. info.physical_device = device.physical_device.physical_device;
  1512. info.device = device.device;
  1513. info.surface = surface;
  1514. info.instance_version = device.instance_version;
  1515. Device temp_device = device;
  1516. temp_device.surface = surface;
  1517. auto present = temp_device.get_queue_index(QueueType::present);
  1518. auto graphics = temp_device.get_queue_index(QueueType::graphics);
  1519. assert(graphics.has_value() && present.has_value() && "Graphics and Present queue indexes must be valid");
  1520. info.graphics_queue_index = graphics.value();
  1521. info.present_queue_index = present.value();
  1522. info.allocation_callbacks = device.allocation_callbacks;
  1523. }
  1524. SwapchainBuilder::SwapchainBuilder(VkPhysicalDevice const physical_device,
  1525. VkDevice const device,
  1526. VkSurfaceKHR const surface,
  1527. uint32_t graphics_queue_index,
  1528. uint32_t present_queue_index) {
  1529. info.physical_device = physical_device;
  1530. info.device = device;
  1531. info.surface = surface;
  1532. info.graphics_queue_index = graphics_queue_index;
  1533. info.present_queue_index = present_queue_index;
  1534. if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE || present_queue_index == detail::QUEUE_INDEX_MAX_VALUE) {
  1535. auto queue_families = detail::get_vector_noerror<VkQueueFamilyProperties>(
  1536. detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, physical_device);
  1537. if (graphics_queue_index == detail::QUEUE_INDEX_MAX_VALUE)
  1538. info.graphics_queue_index = detail::get_first_queue_index(queue_families, VK_QUEUE_GRAPHICS_BIT);
  1539. if (present_queue_index == detail::QUEUE_INDEX_MAX_VALUE)
  1540. info.present_queue_index = detail::get_present_queue_index(physical_device, surface, queue_families);
  1541. }
  1542. }
  1543. Result<Swapchain> SwapchainBuilder::build() const {
  1544. if (info.surface == VK_NULL_HANDLE) {
  1545. return Error{ SwapchainError::surface_handle_not_provided };
  1546. }
  1547. auto desired_formats = info.desired_formats;
  1548. if (desired_formats.size() == 0) add_desired_formats(desired_formats);
  1549. auto desired_present_modes = info.desired_present_modes;
  1550. if (desired_present_modes.size() == 0) add_desired_present_modes(desired_present_modes);
  1551. auto surface_support_ret = detail::query_surface_support_details(info.physical_device, info.surface);
  1552. if (!surface_support_ret.has_value())
  1553. return Error{ SwapchainError::failed_query_surface_support_details, surface_support_ret.vk_result() };
  1554. auto surface_support = surface_support_ret.value();
  1555. uint32_t image_count = info.min_image_count;
  1556. if (info.required_min_image_count >= 1) {
  1557. if (info.required_min_image_count < surface_support.capabilities.minImageCount)
  1558. return make_error_code(SwapchainError::required_min_image_count_too_low);
  1559. image_count = info.required_min_image_count;
  1560. } else if (info.min_image_count == 0) {
  1561. // We intentionally use minImageCount + 1 to maintain existing behavior, even if it typically results in triple buffering on most systems.
  1562. image_count = surface_support.capabilities.minImageCount + 1;
  1563. } else {
  1564. image_count = info.min_image_count;
  1565. if (image_count < surface_support.capabilities.minImageCount)
  1566. image_count = surface_support.capabilities.minImageCount;
  1567. }
  1568. if (surface_support.capabilities.maxImageCount > 0 && image_count > surface_support.capabilities.maxImageCount) {
  1569. image_count = surface_support.capabilities.maxImageCount;
  1570. }
  1571. VkSurfaceFormatKHR surface_format = detail::find_best_surface_format(surface_support.formats, desired_formats);
  1572. VkExtent2D extent = detail::find_extent(surface_support.capabilities, info.desired_width, info.desired_height);
  1573. uint32_t image_array_layers = info.array_layer_count;
  1574. if (surface_support.capabilities.maxImageArrayLayers < info.array_layer_count)
  1575. image_array_layers = surface_support.capabilities.maxImageArrayLayers;
  1576. if (info.array_layer_count == 0) image_array_layers = 1;
  1577. uint32_t queue_family_indices[] = { info.graphics_queue_index, info.present_queue_index };
  1578. VkPresentModeKHR present_mode = detail::find_present_mode(surface_support.present_modes, desired_present_modes);
  1579. // VkSurfaceCapabilitiesKHR::supportedUsageFlags is only only valid for some present modes. For shared present modes, we should also check VkSharedPresentSurfaceCapabilitiesKHR::sharedPresentSupportedUsageFlags.
  1580. auto is_unextended_present_mode = [](VkPresentModeKHR present_mode) {
  1581. return (present_mode == VK_PRESENT_MODE_IMMEDIATE_KHR) || (present_mode == VK_PRESENT_MODE_MAILBOX_KHR) ||
  1582. (present_mode == VK_PRESENT_MODE_FIFO_KHR) || (present_mode == VK_PRESENT_MODE_FIFO_RELAXED_KHR);
  1583. };
  1584. if (is_unextended_present_mode(present_mode) &&
  1585. (info.image_usage_flags & surface_support.capabilities.supportedUsageFlags) != info.image_usage_flags) {
  1586. return Error{ SwapchainError::required_usage_not_supported };
  1587. }
  1588. VkSurfaceTransformFlagBitsKHR pre_transform = info.pre_transform;
  1589. if (info.pre_transform == static_cast<VkSurfaceTransformFlagBitsKHR>(0))
  1590. pre_transform = surface_support.capabilities.currentTransform;
  1591. VkSwapchainCreateInfoKHR swapchain_create_info = {};
  1592. swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
  1593. detail::setup_pNext_chain(swapchain_create_info, info.pNext_chain);
  1594. #if !defined(NDEBUG)
  1595. for (auto& node : info.pNext_chain) {
  1596. assert(node->sType != VK_STRUCTURE_TYPE_APPLICATION_INFO);
  1597. }
  1598. #endif
  1599. swapchain_create_info.flags = info.create_flags;
  1600. swapchain_create_info.surface = info.surface;
  1601. swapchain_create_info.minImageCount = image_count;
  1602. swapchain_create_info.imageFormat = surface_format.format;
  1603. swapchain_create_info.imageColorSpace = surface_format.colorSpace;
  1604. swapchain_create_info.imageExtent = extent;
  1605. swapchain_create_info.imageArrayLayers = image_array_layers;
  1606. swapchain_create_info.imageUsage = info.image_usage_flags;
  1607. if (info.graphics_queue_index != info.present_queue_index) {
  1608. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
  1609. swapchain_create_info.queueFamilyIndexCount = 2;
  1610. swapchain_create_info.pQueueFamilyIndices = queue_family_indices;
  1611. } else {
  1612. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1613. }
  1614. swapchain_create_info.preTransform = pre_transform;
  1615. swapchain_create_info.compositeAlpha = info.composite_alpha;
  1616. swapchain_create_info.presentMode = present_mode;
  1617. swapchain_create_info.clipped = info.clipped;
  1618. swapchain_create_info.oldSwapchain = info.old_swapchain;
  1619. Swapchain swapchain{};
  1620. PFN_vkCreateSwapchainKHR swapchain_create_proc;
  1621. detail::vulkan_functions().get_device_proc_addr(info.device, swapchain_create_proc, "vkCreateSwapchainKHR");
  1622. auto res = swapchain_create_proc(info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain);
  1623. if (res != VK_SUCCESS) {
  1624. return Error{ SwapchainError::failed_create_swapchain, res };
  1625. }
  1626. swapchain.device = info.device;
  1627. swapchain.image_format = surface_format.format;
  1628. swapchain.color_space = surface_format.colorSpace;
  1629. swapchain.image_usage_flags = info.image_usage_flags;
  1630. swapchain.extent = extent;
  1631. detail::vulkan_functions().get_device_proc_addr(
  1632. info.device, swapchain.internal_table.fp_vkGetSwapchainImagesKHR, "vkGetSwapchainImagesKHR");
  1633. detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkCreateImageView, "vkCreateImageView");
  1634. detail::vulkan_functions().get_device_proc_addr(info.device, swapchain.internal_table.fp_vkDestroyImageView, "vkDestroyImageView");
  1635. detail::vulkan_functions().get_device_proc_addr(
  1636. info.device, swapchain.internal_table.fp_vkDestroySwapchainKHR, "vkDestroySwapchainKHR");
  1637. auto images = swapchain.get_images();
  1638. if (!images) {
  1639. return Error{ SwapchainError::failed_get_swapchain_images };
  1640. }
  1641. swapchain.requested_min_image_count = image_count;
  1642. swapchain.present_mode = present_mode;
  1643. swapchain.image_count = static_cast<uint32_t>(images.value().size());
  1644. swapchain.instance_version = info.instance_version;
  1645. swapchain.allocation_callbacks = info.allocation_callbacks;
  1646. return swapchain;
  1647. }
  1648. Result<std::vector<VkImage>> Swapchain::get_images() {
  1649. std::vector<VkImage> swapchain_images;
  1650. auto swapchain_images_ret =
  1651. detail::get_vector<VkImage>(swapchain_images, internal_table.fp_vkGetSwapchainImagesKHR, device, swapchain);
  1652. if (swapchain_images_ret != VK_SUCCESS) {
  1653. return Error{ SwapchainError::failed_get_swapchain_images, swapchain_images_ret };
  1654. }
  1655. return swapchain_images;
  1656. }
  1657. Result<std::vector<VkImageView>> Swapchain::get_image_views() { return get_image_views(nullptr); }
  1658. Result<std::vector<VkImageView>> Swapchain::get_image_views(const void* pNext) {
  1659. const auto swapchain_images_ret = get_images();
  1660. if (!swapchain_images_ret) return swapchain_images_ret.error();
  1661. const auto swapchain_images = swapchain_images_ret.value();
  1662. bool already_contains_image_view_usage = false;
  1663. while (pNext) {
  1664. if (reinterpret_cast<const VkBaseInStructure*>(pNext)->sType == VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO) {
  1665. already_contains_image_view_usage = true;
  1666. break;
  1667. }
  1668. pNext = reinterpret_cast<const VkBaseInStructure*>(pNext)->pNext;
  1669. }
  1670. VkImageViewUsageCreateInfo desired_flags{};
  1671. desired_flags.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO;
  1672. desired_flags.pNext = pNext;
  1673. desired_flags.usage = image_usage_flags;
  1674. std::vector<VkImageView> views(swapchain_images.size());
  1675. for (size_t i = 0; i < swapchain_images.size(); i++) {
  1676. VkImageViewCreateInfo createInfo = {};
  1677. createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1678. if (instance_version >= VKB_VK_API_VERSION_1_1 && !already_contains_image_view_usage) {
  1679. createInfo.pNext = &desired_flags;
  1680. } else {
  1681. createInfo.pNext = pNext;
  1682. }
  1683. createInfo.image = swapchain_images[i];
  1684. createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
  1685. createInfo.format = image_format;
  1686. createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
  1687. createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
  1688. createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
  1689. createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
  1690. createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  1691. createInfo.subresourceRange.baseMipLevel = 0;
  1692. createInfo.subresourceRange.levelCount = 1;
  1693. createInfo.subresourceRange.baseArrayLayer = 0;
  1694. createInfo.subresourceRange.layerCount = 1;
  1695. VkResult res = internal_table.fp_vkCreateImageView(device, &createInfo, allocation_callbacks, &views[i]);
  1696. if (res != VK_SUCCESS) return Error{ SwapchainError::failed_create_swapchain_image_views, res };
  1697. }
  1698. return views;
  1699. }
  1700. void Swapchain::destroy_image_views(std::vector<VkImageView> const& image_views) {
  1701. for (auto& image_view : image_views) {
  1702. internal_table.fp_vkDestroyImageView(device, image_view, allocation_callbacks);
  1703. }
  1704. }
  1705. Swapchain::operator VkSwapchainKHR() const { return this->swapchain; }
  1706. SwapchainBuilder& SwapchainBuilder::set_old_swapchain(VkSwapchainKHR old_swapchain) {
  1707. info.old_swapchain = old_swapchain;
  1708. return *this;
  1709. }
  1710. SwapchainBuilder& SwapchainBuilder::set_old_swapchain(Swapchain const& swapchain) {
  1711. info.old_swapchain = swapchain.swapchain;
  1712. return *this;
  1713. }
  1714. SwapchainBuilder& SwapchainBuilder::set_desired_extent(uint32_t width, uint32_t height) {
  1715. info.desired_width = width;
  1716. info.desired_height = height;
  1717. return *this;
  1718. }
  1719. SwapchainBuilder& SwapchainBuilder::set_desired_format(VkSurfaceFormatKHR format) {
  1720. info.desired_formats.insert(info.desired_formats.begin(), format);
  1721. return *this;
  1722. }
  1723. SwapchainBuilder& SwapchainBuilder::add_fallback_format(VkSurfaceFormatKHR format) {
  1724. info.desired_formats.push_back(format);
  1725. return *this;
  1726. }
  1727. SwapchainBuilder& SwapchainBuilder::use_default_format_selection() {
  1728. info.desired_formats.clear();
  1729. add_desired_formats(info.desired_formats);
  1730. return *this;
  1731. }
  1732. SwapchainBuilder& SwapchainBuilder::set_desired_present_mode(VkPresentModeKHR present_mode) {
  1733. info.desired_present_modes.insert(info.desired_present_modes.begin(), present_mode);
  1734. return *this;
  1735. }
  1736. SwapchainBuilder& SwapchainBuilder::add_fallback_present_mode(VkPresentModeKHR present_mode) {
  1737. info.desired_present_modes.push_back(present_mode);
  1738. return *this;
  1739. }
  1740. SwapchainBuilder& SwapchainBuilder::use_default_present_mode_selection() {
  1741. info.desired_present_modes.clear();
  1742. add_desired_present_modes(info.desired_present_modes);
  1743. return *this;
  1744. }
  1745. SwapchainBuilder& SwapchainBuilder::set_allocation_callbacks(VkAllocationCallbacks* callbacks) {
  1746. info.allocation_callbacks = callbacks;
  1747. return *this;
  1748. }
  1749. SwapchainBuilder& SwapchainBuilder::set_image_usage_flags(VkImageUsageFlags usage_flags) {
  1750. info.image_usage_flags = usage_flags;
  1751. return *this;
  1752. }
  1753. SwapchainBuilder& SwapchainBuilder::add_image_usage_flags(VkImageUsageFlags usage_flags) {
  1754. info.image_usage_flags = info.image_usage_flags | usage_flags;
  1755. return *this;
  1756. }
  1757. SwapchainBuilder& SwapchainBuilder::use_default_image_usage_flags() {
  1758. info.image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  1759. return *this;
  1760. }
  1761. SwapchainBuilder& SwapchainBuilder::set_image_array_layer_count(uint32_t array_layer_count) {
  1762. info.array_layer_count = array_layer_count;
  1763. return *this;
  1764. }
  1765. SwapchainBuilder& SwapchainBuilder::set_desired_min_image_count(uint32_t min_image_count) {
  1766. info.min_image_count = min_image_count;
  1767. return *this;
  1768. }
  1769. SwapchainBuilder& SwapchainBuilder::set_required_min_image_count(uint32_t required_min_image_count) {
  1770. info.required_min_image_count = required_min_image_count;
  1771. return *this;
  1772. }
  1773. SwapchainBuilder& SwapchainBuilder::set_clipped(bool clipped) {
  1774. info.clipped = clipped;
  1775. return *this;
  1776. }
  1777. SwapchainBuilder& SwapchainBuilder::set_create_flags(VkSwapchainCreateFlagBitsKHR create_flags) {
  1778. info.create_flags = create_flags;
  1779. return *this;
  1780. }
  1781. SwapchainBuilder& SwapchainBuilder::set_pre_transform_flags(VkSurfaceTransformFlagBitsKHR pre_transform_flags) {
  1782. info.pre_transform = pre_transform_flags;
  1783. return *this;
  1784. }
  1785. SwapchainBuilder& SwapchainBuilder::set_composite_alpha_flags(VkCompositeAlphaFlagBitsKHR composite_alpha_flags) {
  1786. info.composite_alpha = composite_alpha_flags;
  1787. return *this;
  1788. }
  1789. void SwapchainBuilder::add_desired_formats(std::vector<VkSurfaceFormatKHR>& formats) const {
  1790. formats.push_back({ VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR });
  1791. formats.push_back({ VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR });
  1792. }
  1793. void SwapchainBuilder::add_desired_present_modes(std::vector<VkPresentModeKHR>& modes) const {
  1794. modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR);
  1795. modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
  1796. }
  1797. } // namespace vkb