2
0

trampoline.c 117 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599
  1. /*
  2. *
  3. * Copyright (c) 2015-2016 The Khronos Group Inc.
  4. * Copyright (c) 2015-2016 Valve Corporation
  5. * Copyright (c) 2015-2016 LunarG, Inc.
  6. * Copyright (C) 2015 Google Inc.
  7. *
  8. * Licensed under the Apache License, Version 2.0 (the "License");
  9. * you may not use this file except in compliance with the License.
  10. * You may obtain a copy of the License at
  11. *
  12. * http://www.apache.org/licenses/LICENSE-2.0
  13. *
  14. * Unless required by applicable law or agreed to in writing, software
  15. * distributed under the License is distributed on an "AS IS" BASIS,
  16. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  17. * See the License for the specific language governing permissions and
  18. * limitations under the License.
  19. *
  20. * Author: Courtney Goeltzenleuchter <[email protected]>
  21. * Author: Jon Ashburn <[email protected]>
  22. * Author: Tony Barbour <[email protected]>
  23. * Author: Chia-I Wu <[email protected]>
  24. */
  25. #ifndef _GNU_SOURCE
  26. #define _GNU_SOURCE
  27. #endif
  28. #include <stdlib.h>
  29. #include <string.h>
  30. #include "vk_loader_platform.h"
  31. #include "loader.h"
  32. #include "debug_utils.h"
  33. #include "wsi.h"
  34. #include "vk_loader_extensions.h"
  35. #include "gpa_helper.h"
  36. // Trampoline entrypoints are in this file for core Vulkan commands
  37. // Get an instance level or global level entry point address.
  38. // @param instance
  39. // @param pName
  40. // @return
  41. // If instance == NULL returns a global level functions only
  42. // If instance is valid returns a trampoline entry point for all dispatchable Vulkan
  43. // functions both core and extensions.
  44. LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName) {
  45. void *addr;
  46. addr = globalGetProcAddr(pName);
  47. if (instance == VK_NULL_HANDLE || addr != NULL) {
  48. return addr;
  49. }
  50. struct loader_instance *ptr_instance = loader_get_instance(instance);
  51. if (ptr_instance == NULL) return NULL;
  52. // Return trampoline code for non-global entrypoints including any extensions.
  53. // Device extensions are returned if a layer or ICD supports the extension.
  54. // Instance extensions are returned if the extension is enabled and the
  55. // loader or someone else supports the extension
  56. return trampolineGetProcAddr(ptr_instance, pName);
  57. }
  58. // Get a device level or global level entry point address.
  59. // @param device
  60. // @param pName
  61. // @return
  62. // If device is valid, returns a device relative entry point for device level
  63. // entry points both core and extensions.
  64. // Device relative means call down the device chain.
  65. LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName) {
  66. void *addr;
  67. // For entrypoints that loader must handle (ie non-dispatchable or create object)
  68. // make sure the loader entrypoint is returned
  69. addr = loader_non_passthrough_gdpa(pName);
  70. if (addr) {
  71. return addr;
  72. }
  73. // Although CreateDevice is on device chain it's dispatchable object isn't
  74. // a VkDevice or child of VkDevice so return NULL.
  75. if (!strcmp(pName, "CreateDevice")) return NULL;
  76. // Return the dispatch table entrypoint for the fastest case
  77. const VkLayerDispatchTable *disp_table = *(VkLayerDispatchTable **)device;
  78. if (disp_table == NULL) return NULL;
  79. addr = loader_lookup_device_dispatch_table(disp_table, pName);
  80. if (addr) return addr;
  81. if (disp_table->GetDeviceProcAddr == NULL) return NULL;
  82. return disp_table->GetDeviceProcAddr(device, pName);
  83. }
  84. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName,
  85. uint32_t *pPropertyCount,
  86. VkExtensionProperties *pProperties) {
  87. tls_instance = NULL;
  88. LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
  89. // We know we need to call at least the terminator
  90. VkResult res = VK_SUCCESS;
  91. VkEnumerateInstanceExtensionPropertiesChain chain_tail = {
  92. .header =
  93. {
  94. .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES,
  95. .version = VK_CURRENT_CHAIN_VERSION,
  96. .size = sizeof(chain_tail),
  97. },
  98. .pfnNextLayer = &terminator_EnumerateInstanceExtensionProperties,
  99. .pNextLink = NULL,
  100. };
  101. VkEnumerateInstanceExtensionPropertiesChain *chain_head = &chain_tail;
  102. // Get the implicit layers
  103. struct loader_layer_list layers;
  104. memset(&layers, 0, sizeof(layers));
  105. loaderScanForImplicitLayers(NULL, &layers);
  106. // We'll need to save the dl handles so we can close them later
  107. loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
  108. if (libs == NULL) {
  109. return VK_ERROR_OUT_OF_HOST_MEMORY;
  110. }
  111. size_t lib_count = 0;
  112. // Prepend layers onto the chain if they implement this entry point
  113. for (uint32_t i = 0; i < layers.count; ++i) {
  114. if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
  115. layers.list[i].pre_instance_functions.enumerate_instance_extension_properties[0] == '\0') {
  116. continue;
  117. }
  118. loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
  119. if (layer_lib == NULL) {
  120. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
  121. layers.list[i].lib_name);
  122. continue;
  123. }
  124. libs[lib_count++] = layer_lib;
  125. void *pfn = loader_platform_get_proc_address(layer_lib,
  126. layers.list[i].pre_instance_functions.enumerate_instance_extension_properties);
  127. if (pfn == NULL) {
  128. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
  129. "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
  130. layers.list[i].pre_instance_functions.enumerate_instance_extension_properties, layers.list[i].lib_name);
  131. continue;
  132. }
  133. VkEnumerateInstanceExtensionPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceExtensionPropertiesChain));
  134. if (chain_link == NULL) {
  135. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  136. break;
  137. }
  138. chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES;
  139. chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
  140. chain_link->header.size = sizeof(*chain_link);
  141. chain_link->pfnNextLayer = pfn;
  142. chain_link->pNextLink = chain_head;
  143. chain_head = chain_link;
  144. }
  145. // Call down the chain
  146. if (res == VK_SUCCESS) {
  147. res = chain_head->pfnNextLayer(chain_head->pNextLink, pLayerName, pPropertyCount, pProperties);
  148. }
  149. // Free up the layers
  150. loaderDeleteLayerListAndProperties(NULL, &layers);
  151. // Tear down the chain
  152. while (chain_head != &chain_tail) {
  153. VkEnumerateInstanceExtensionPropertiesChain *holder = chain_head;
  154. chain_head = (VkEnumerateInstanceExtensionPropertiesChain *)chain_head->pNextLink;
  155. free(holder);
  156. }
  157. // Close the dl handles
  158. for (size_t i = 0; i < lib_count; ++i) {
  159. loader_platform_close_library(libs[i]);
  160. }
  161. free(libs);
  162. return res;
  163. }
  164. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
  165. VkLayerProperties *pProperties) {
  166. tls_instance = NULL;
  167. LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
  168. // We know we need to call at least the terminator
  169. VkResult res = VK_SUCCESS;
  170. VkEnumerateInstanceLayerPropertiesChain chain_tail = {
  171. .header =
  172. {
  173. .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES,
  174. .version = VK_CURRENT_CHAIN_VERSION,
  175. .size = sizeof(chain_tail),
  176. },
  177. .pfnNextLayer = &terminator_EnumerateInstanceLayerProperties,
  178. .pNextLink = NULL,
  179. };
  180. VkEnumerateInstanceLayerPropertiesChain *chain_head = &chain_tail;
  181. // Get the implicit layers
  182. struct loader_layer_list layers;
  183. memset(&layers, 0, sizeof(layers));
  184. loaderScanForImplicitLayers(NULL, &layers);
  185. // We'll need to save the dl handles so we can close them later
  186. loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
  187. if (libs == NULL) {
  188. return VK_ERROR_OUT_OF_HOST_MEMORY;
  189. }
  190. size_t lib_count = 0;
  191. // Prepend layers onto the chain if they implement this entry point
  192. for (uint32_t i = 0; i < layers.count; ++i) {
  193. if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
  194. layers.list[i].pre_instance_functions.enumerate_instance_layer_properties[0] == '\0') {
  195. continue;
  196. }
  197. loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
  198. if (layer_lib == NULL) {
  199. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
  200. layers.list[i].lib_name);
  201. continue;
  202. }
  203. libs[lib_count++] = layer_lib;
  204. void *pfn =
  205. loader_platform_get_proc_address(layer_lib, layers.list[i].pre_instance_functions.enumerate_instance_layer_properties);
  206. if (pfn == NULL) {
  207. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
  208. "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
  209. layers.list[i].pre_instance_functions.enumerate_instance_layer_properties, layers.list[i].lib_name);
  210. continue;
  211. }
  212. VkEnumerateInstanceLayerPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceLayerPropertiesChain));
  213. if (chain_link == NULL) {
  214. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  215. break;
  216. }
  217. chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES;
  218. chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
  219. chain_link->header.size = sizeof(*chain_link);
  220. chain_link->pfnNextLayer = pfn;
  221. chain_link->pNextLink = chain_head;
  222. chain_head = chain_link;
  223. }
  224. // Call down the chain
  225. if (res == VK_SUCCESS) {
  226. res = chain_head->pfnNextLayer(chain_head->pNextLink, pPropertyCount, pProperties);
  227. }
  228. // Free up the layers
  229. loaderDeleteLayerListAndProperties(NULL, &layers);
  230. // Tear down the chain
  231. while (chain_head != &chain_tail) {
  232. VkEnumerateInstanceLayerPropertiesChain *holder = chain_head;
  233. chain_head = (VkEnumerateInstanceLayerPropertiesChain *)chain_head->pNextLink;
  234. free(holder);
  235. }
  236. // Close the dl handles
  237. for (size_t i = 0; i < lib_count; ++i) {
  238. loader_platform_close_library(libs[i]);
  239. }
  240. free(libs);
  241. return res;
  242. }
  243. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t* pApiVersion) {
  244. tls_instance = NULL;
  245. LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
  246. // We know we need to call at least the terminator
  247. VkResult res = VK_SUCCESS;
  248. VkEnumerateInstanceVersionChain chain_tail = {
  249. .header =
  250. {
  251. .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION,
  252. .version = VK_CURRENT_CHAIN_VERSION,
  253. .size = sizeof(chain_tail),
  254. },
  255. .pfnNextLayer = &terminator_EnumerateInstanceVersion,
  256. .pNextLink = NULL,
  257. };
  258. VkEnumerateInstanceVersionChain *chain_head = &chain_tail;
  259. // Get the implicit layers
  260. struct loader_layer_list layers;
  261. memset(&layers, 0, sizeof(layers));
  262. loaderScanForImplicitLayers(NULL, &layers);
  263. // We'll need to save the dl handles so we can close them later
  264. loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
  265. if (libs == NULL) {
  266. return VK_ERROR_OUT_OF_HOST_MEMORY;
  267. }
  268. size_t lib_count = 0;
  269. // Prepend layers onto the chain if they implement this entry point
  270. for (uint32_t i = 0; i < layers.count; ++i) {
  271. if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
  272. layers.list[i].pre_instance_functions.enumerate_instance_version[0] == '\0') {
  273. continue;
  274. }
  275. loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
  276. if (layer_lib == NULL) {
  277. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
  278. layers.list[i].lib_name);
  279. continue;
  280. }
  281. libs[lib_count++] = layer_lib;
  282. void *pfn = loader_platform_get_proc_address(layer_lib,
  283. layers.list[i].pre_instance_functions.enumerate_instance_version);
  284. if (pfn == NULL) {
  285. loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
  286. "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
  287. layers.list[i].pre_instance_functions.enumerate_instance_version, layers.list[i].lib_name);
  288. continue;
  289. }
  290. VkEnumerateInstanceVersionChain *chain_link = malloc(sizeof(VkEnumerateInstanceVersionChain));
  291. if (chain_link == NULL) {
  292. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  293. break;
  294. }
  295. chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION;
  296. chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
  297. chain_link->header.size = sizeof(*chain_link);
  298. chain_link->pfnNextLayer = pfn;
  299. chain_link->pNextLink = chain_head;
  300. chain_head = chain_link;
  301. }
  302. // Call down the chain
  303. if (res == VK_SUCCESS) {
  304. res = chain_head->pfnNextLayer(chain_head->pNextLink, pApiVersion);
  305. }
  306. // Free up the layers
  307. loaderDeleteLayerListAndProperties(NULL, &layers);
  308. // Tear down the chain
  309. while (chain_head != &chain_tail) {
  310. VkEnumerateInstanceVersionChain *holder = chain_head;
  311. chain_head = (VkEnumerateInstanceVersionChain *)chain_head->pNextLink;
  312. free(holder);
  313. }
  314. // Close the dl handles
  315. for (size_t i = 0; i < lib_count; ++i) {
  316. loader_platform_close_library(libs[i]);
  317. }
  318. free(libs);
  319. return res;
  320. }
  321. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
  322. const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
  323. struct loader_instance *ptr_instance = NULL;
  324. VkInstance created_instance = VK_NULL_HANDLE;
  325. bool loaderLocked = false;
  326. VkResult res = VK_ERROR_INITIALIZATION_FAILED;
  327. LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
  328. #if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
  329. {
  330. #else
  331. if (pAllocator) {
  332. ptr_instance = (struct loader_instance *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(struct loader_instance),
  333. sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
  334. } else {
  335. #endif
  336. ptr_instance = (struct loader_instance *)malloc(sizeof(struct loader_instance));
  337. }
  338. VkInstanceCreateInfo ici = *pCreateInfo;
  339. if (ptr_instance == NULL) {
  340. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  341. goto out;
  342. }
  343. tls_instance = ptr_instance;
  344. loader_platform_thread_lock_mutex(&loader_lock);
  345. loaderLocked = true;
  346. memset(ptr_instance, 0, sizeof(struct loader_instance));
  347. if (pAllocator) {
  348. ptr_instance->alloc_callbacks = *pAllocator;
  349. }
  350. // Save the application version
  351. if (NULL == pCreateInfo || NULL == pCreateInfo->pApplicationInfo || 0 == pCreateInfo->pApplicationInfo->apiVersion)
  352. {
  353. ptr_instance->app_api_major_version = 1;
  354. ptr_instance->app_api_minor_version = 0;
  355. } else {
  356. ptr_instance->app_api_major_version = VK_VERSION_MAJOR(pCreateInfo->pApplicationInfo->apiVersion);
  357. ptr_instance->app_api_minor_version = VK_VERSION_MINOR(pCreateInfo->pApplicationInfo->apiVersion);
  358. }
  359. // Look for one or more VK_EXT_debug_report or VK_EXT_debug_utils create info structures
  360. // and setup a callback(s) for each one found.
  361. ptr_instance->num_tmp_report_callbacks = 0;
  362. ptr_instance->tmp_report_create_infos = NULL;
  363. ptr_instance->tmp_report_callbacks = NULL;
  364. ptr_instance->num_tmp_messengers = 0;
  365. ptr_instance->tmp_messenger_create_infos = NULL;
  366. ptr_instance->tmp_messengers = NULL;
  367. // Handle cases of VK_EXT_debug_utils
  368. if (util_CopyDebugUtilsMessengerCreateInfos(pCreateInfo->pNext, pAllocator, &ptr_instance->num_tmp_messengers,
  369. &ptr_instance->tmp_messenger_create_infos, &ptr_instance->tmp_messengers)) {
  370. // One or more were found, but allocation failed. Therefore, clean up and fail this function:
  371. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  372. goto out;
  373. } else if (ptr_instance->num_tmp_messengers > 0) {
  374. // Setup the temporary messenger(s) here to catch early issues:
  375. if (util_CreateDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
  376. ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers)) {
  377. // Failure of setting up one or more of the messenger. Therefore, clean up and fail this function:
  378. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  379. goto out;
  380. }
  381. }
  382. // Handle cases of VK_EXT_debug_report
  383. if (util_CopyDebugReportCreateInfos(pCreateInfo->pNext, pAllocator, &ptr_instance->num_tmp_report_callbacks,
  384. &ptr_instance->tmp_report_create_infos, &ptr_instance->tmp_report_callbacks)) {
  385. // One or more were found, but allocation failed. Therefore, clean up and fail this function:
  386. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  387. goto out;
  388. } else if (ptr_instance->num_tmp_report_callbacks > 0) {
  389. // Setup the temporary callback(s) here to catch early issues:
  390. if (util_CreateDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
  391. ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks)) {
  392. // Failure of setting up one or more of the callback. Therefore, clean up and fail this function:
  393. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  394. goto out;
  395. }
  396. }
  397. // Due to implicit layers need to get layer list even if
  398. // enabledLayerCount == 0 and VK_INSTANCE_LAYERS is unset. For now always
  399. // get layer list via loaderScanForLayers().
  400. memset(&ptr_instance->instance_layer_list, 0, sizeof(ptr_instance->instance_layer_list));
  401. loaderScanForLayers(ptr_instance, &ptr_instance->instance_layer_list);
  402. // Validate the app requested layers to be enabled
  403. if (pCreateInfo->enabledLayerCount > 0) {
  404. res = loaderValidateLayers(ptr_instance, pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames,
  405. &ptr_instance->instance_layer_list);
  406. if (res != VK_SUCCESS) {
  407. goto out;
  408. }
  409. }
  410. // Scan/discover all ICD libraries
  411. memset(&ptr_instance->icd_tramp_list, 0, sizeof(ptr_instance->icd_tramp_list));
  412. res = loader_icd_scan(ptr_instance, &ptr_instance->icd_tramp_list);
  413. if (res != VK_SUCCESS) {
  414. goto out;
  415. }
  416. // Get extensions from all ICD's, merge so no duplicates, then validate
  417. res = loader_get_icd_loader_instance_extensions(ptr_instance, &ptr_instance->icd_tramp_list, &ptr_instance->ext_list);
  418. if (res != VK_SUCCESS) {
  419. goto out;
  420. }
  421. res = loader_validate_instance_extensions(ptr_instance, &ptr_instance->ext_list, &ptr_instance->instance_layer_list, &ici);
  422. if (res != VK_SUCCESS) {
  423. goto out;
  424. }
  425. ptr_instance->disp = loader_instance_heap_alloc(ptr_instance, sizeof(struct loader_instance_dispatch_table),
  426. VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
  427. if (ptr_instance->disp == NULL) {
  428. loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  429. "vkCreateInstance: Failed to allocate Loader's full Instance dispatch table.");
  430. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  431. goto out;
  432. }
  433. memcpy(&ptr_instance->disp->layer_inst_disp, &instance_disp, sizeof(instance_disp));
  434. ptr_instance->next = loader.instances;
  435. loader.instances = ptr_instance;
  436. // Activate any layers on instance chain
  437. res = loaderEnableInstanceLayers(ptr_instance, &ici, &ptr_instance->instance_layer_list);
  438. if (res != VK_SUCCESS) {
  439. goto out;
  440. }
  441. created_instance = (VkInstance)ptr_instance;
  442. res = loader_create_instance_chain(&ici, pAllocator, ptr_instance, &created_instance);
  443. if (res == VK_SUCCESS) {
  444. memset(ptr_instance->enabled_known_extensions.padding, 0, sizeof(uint64_t) * 4);
  445. wsi_create_instance(ptr_instance, &ici);
  446. debug_utils_CreateInstance(ptr_instance, &ici);
  447. extensions_create_instance(ptr_instance, &ici);
  448. *pInstance = created_instance;
  449. // Finally have the layers in place and everyone has seen
  450. // the CreateInstance command go by. This allows the layer's
  451. // GetInstanceProcAddr functions to return valid extension functions
  452. // if enabled.
  453. loaderActivateInstanceLayerExtensions(ptr_instance, *pInstance);
  454. }
  455. out:
  456. if (NULL != ptr_instance) {
  457. if (res != VK_SUCCESS) {
  458. if (loader.instances == ptr_instance) {
  459. loader.instances = ptr_instance->next;
  460. }
  461. if (NULL != ptr_instance->disp) {
  462. loader_instance_heap_free(ptr_instance, ptr_instance->disp);
  463. }
  464. if (ptr_instance->num_tmp_report_callbacks > 0) {
  465. // Remove temporary VK_EXT_debug_report items
  466. util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
  467. ptr_instance->tmp_report_callbacks);
  468. util_FreeDebugReportCreateInfos(pAllocator, ptr_instance->tmp_report_create_infos,
  469. ptr_instance->tmp_report_callbacks);
  470. }
  471. if (ptr_instance->num_tmp_messengers > 0) {
  472. // Remove temporary VK_EXT_debug_utils items
  473. util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
  474. ptr_instance->tmp_messengers);
  475. util_FreeDebugUtilsMessengerCreateInfos(pAllocator, ptr_instance->tmp_messenger_create_infos,
  476. ptr_instance->tmp_messengers);
  477. }
  478. if (NULL != ptr_instance->expanded_activated_layer_list.list) {
  479. loaderDeactivateLayers(ptr_instance, NULL, &ptr_instance->expanded_activated_layer_list);
  480. }
  481. if (NULL != ptr_instance->app_activated_layer_list.list) {
  482. loaderDestroyLayerList(ptr_instance, NULL, &ptr_instance->app_activated_layer_list);
  483. }
  484. loaderDeleteLayerListAndProperties(ptr_instance, &ptr_instance->instance_layer_list);
  485. loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_tramp_list);
  486. loader_destroy_generic_list(ptr_instance, (struct loader_generic_list *)&ptr_instance->ext_list);
  487. loader_instance_heap_free(ptr_instance, ptr_instance);
  488. } else {
  489. // Remove temporary VK_EXT_debug_report or VK_EXT_debug_utils items
  490. util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
  491. ptr_instance->tmp_messengers);
  492. util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
  493. ptr_instance->tmp_report_callbacks);
  494. }
  495. if (loaderLocked) {
  496. loader_platform_thread_unlock_mutex(&loader_lock);
  497. }
  498. }
  499. return res;
  500. }
  501. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
  502. const VkLayerInstanceDispatchTable *disp;
  503. struct loader_instance *ptr_instance = NULL;
  504. bool callback_setup = false;
  505. bool messenger_setup = false;
  506. if (instance == VK_NULL_HANDLE) {
  507. return;
  508. }
  509. disp = loader_get_instance_layer_dispatch(instance);
  510. loader_platform_thread_lock_mutex(&loader_lock);
  511. ptr_instance = loader_get_instance(instance);
  512. if (pAllocator) {
  513. ptr_instance->alloc_callbacks = *pAllocator;
  514. }
  515. if (ptr_instance->num_tmp_messengers > 0) {
  516. // Setup the temporary VK_EXT_debug_utils messenger(s) here to catch cleanup issues:
  517. if (!util_CreateDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
  518. ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers)) {
  519. messenger_setup = true;
  520. }
  521. }
  522. if (ptr_instance->num_tmp_report_callbacks > 0) {
  523. // Setup the temporary VK_EXT_debug_report callback(s) here to catch cleanup issues:
  524. if (!util_CreateDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
  525. ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks)) {
  526. callback_setup = true;
  527. }
  528. }
  529. disp->DestroyInstance(instance, pAllocator);
  530. if (NULL != ptr_instance->expanded_activated_layer_list.list) {
  531. loaderDeactivateLayers(ptr_instance, NULL, &ptr_instance->expanded_activated_layer_list);
  532. }
  533. if (NULL != ptr_instance->app_activated_layer_list.list) {
  534. loaderDestroyLayerList(ptr_instance, NULL, &ptr_instance->app_activated_layer_list);
  535. }
  536. if (ptr_instance->phys_devs_tramp) {
  537. for (uint32_t i = 0; i < ptr_instance->phys_dev_count_tramp; i++) {
  538. loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_tramp[i]);
  539. }
  540. loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_tramp);
  541. }
  542. if (ptr_instance->phys_dev_groups_tramp) {
  543. for (uint32_t i = 0; i < ptr_instance->phys_dev_group_count_tramp; i++) {
  544. loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_tramp[i]);
  545. }
  546. loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_tramp);
  547. }
  548. if (messenger_setup) {
  549. util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers, ptr_instance->tmp_messengers);
  550. util_FreeDebugUtilsMessengerCreateInfos(pAllocator, ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers);
  551. }
  552. if (callback_setup) {
  553. util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
  554. ptr_instance->tmp_report_callbacks);
  555. util_FreeDebugReportCreateInfos(pAllocator, ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks);
  556. }
  557. loader_instance_heap_free(ptr_instance, ptr_instance->disp);
  558. loader_instance_heap_free(ptr_instance, ptr_instance);
  559. loader_platform_thread_unlock_mutex(&loader_lock);
  560. // Unload preloaded layers, so if vkEnumerateInstanceExtensionProperties or vkCreateInstance is called again, the ICD's are up
  561. // to date
  562. loader_unload_preloaded_icds();
  563. }
  564. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
  565. VkPhysicalDevice *pPhysicalDevices) {
  566. VkResult res = VK_SUCCESS;
  567. uint32_t count;
  568. uint32_t i;
  569. struct loader_instance *inst;
  570. loader_platform_thread_lock_mutex(&loader_lock);
  571. inst = loader_get_instance(instance);
  572. if (NULL == inst) {
  573. res = VK_ERROR_INITIALIZATION_FAILED;
  574. goto out;
  575. }
  576. if (NULL == pPhysicalDeviceCount) {
  577. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  578. "vkEnumeratePhysicalDevices: Received NULL pointer for physical device count return value.");
  579. res = VK_ERROR_INITIALIZATION_FAILED;
  580. goto out;
  581. }
  582. // Setup the trampoline loader physical devices. This will actually
  583. // call down and setup the terminator loader physical devices during the
  584. // process.
  585. VkResult setup_res = setupLoaderTrampPhysDevs(instance);
  586. if (setup_res != VK_SUCCESS && setup_res != VK_INCOMPLETE) {
  587. res = setup_res;
  588. goto out;
  589. }
  590. count = inst->phys_dev_count_tramp;
  591. // Wrap the PhysDev object for loader usage, return wrapped objects
  592. if (NULL != pPhysicalDevices) {
  593. if (inst->phys_dev_count_tramp > *pPhysicalDeviceCount) {
  594. loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
  595. "vkEnumeratePhysicalDevices: Trimming device count down"
  596. " by application request from %d to %d physical devices",
  597. inst->phys_dev_count_tramp, *pPhysicalDeviceCount);
  598. count = *pPhysicalDeviceCount;
  599. res = VK_INCOMPLETE;
  600. }
  601. for (i = 0; i < count; i++) {
  602. pPhysicalDevices[i] = (VkPhysicalDevice)inst->phys_devs_tramp[i];
  603. }
  604. }
  605. *pPhysicalDeviceCount = count;
  606. out:
  607. loader_platform_thread_unlock_mutex(&loader_lock);
  608. return res;
  609. }
  610. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
  611. VkPhysicalDeviceFeatures *pFeatures) {
  612. const VkLayerInstanceDispatchTable *disp;
  613. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  614. disp = loader_get_instance_layer_dispatch(physicalDevice);
  615. disp->GetPhysicalDeviceFeatures(unwrapped_phys_dev, pFeatures);
  616. }
  617. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
  618. VkFormatProperties *pFormatInfo) {
  619. const VkLayerInstanceDispatchTable *disp;
  620. VkPhysicalDevice unwrapped_pd = loader_unwrap_physical_device(physicalDevice);
  621. disp = loader_get_instance_layer_dispatch(physicalDevice);
  622. disp->GetPhysicalDeviceFormatProperties(unwrapped_pd, format, pFormatInfo);
  623. }
  624. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
  625. VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage,
  626. VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties) {
  627. const VkLayerInstanceDispatchTable *disp;
  628. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  629. disp = loader_get_instance_layer_dispatch(physicalDevice);
  630. return disp->GetPhysicalDeviceImageFormatProperties(unwrapped_phys_dev, format, type, tiling, usage, flags,
  631. pImageFormatProperties);
  632. }
  633. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
  634. VkPhysicalDeviceProperties *pProperties) {
  635. const VkLayerInstanceDispatchTable *disp;
  636. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  637. disp = loader_get_instance_layer_dispatch(physicalDevice);
  638. disp->GetPhysicalDeviceProperties(unwrapped_phys_dev, pProperties);
  639. }
  640. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
  641. uint32_t *pQueueFamilyPropertyCount,
  642. VkQueueFamilyProperties *pQueueProperties) {
  643. const VkLayerInstanceDispatchTable *disp;
  644. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  645. disp = loader_get_instance_layer_dispatch(physicalDevice);
  646. disp->GetPhysicalDeviceQueueFamilyProperties(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueProperties);
  647. }
  648. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
  649. VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
  650. const VkLayerInstanceDispatchTable *disp;
  651. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  652. disp = loader_get_instance_layer_dispatch(physicalDevice);
  653. disp->GetPhysicalDeviceMemoryProperties(unwrapped_phys_dev, pMemoryProperties);
  654. }
  655. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
  656. const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
  657. loader_platform_thread_lock_mutex(&loader_lock);
  658. VkResult res = loader_layer_create_device(NULL, physicalDevice, pCreateInfo, pAllocator, pDevice, NULL, NULL);
  659. loader_platform_thread_unlock_mutex(&loader_lock);
  660. return res;
  661. }
  662. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
  663. const VkLayerDispatchTable *disp;
  664. if (device == VK_NULL_HANDLE) {
  665. return;
  666. }
  667. disp = loader_get_dispatch(device);
  668. loader_platform_thread_lock_mutex(&loader_lock);
  669. loader_layer_destroy_device(device, pAllocator, disp->DestroyDevice);
  670. loader_platform_thread_unlock_mutex(&loader_lock);
  671. }
  672. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
  673. const char *pLayerName, uint32_t *pPropertyCount,
  674. VkExtensionProperties *pProperties) {
  675. VkResult res = VK_SUCCESS;
  676. struct loader_physical_device_tramp *phys_dev;
  677. const VkLayerInstanceDispatchTable *disp;
  678. phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
  679. loader_platform_thread_lock_mutex(&loader_lock);
  680. // always pass this call down the instance chain which will terminate
  681. // in the ICD. This allows layers to filter the extensions coming back
  682. // up the chain. In the terminator we look up layer extensions from the
  683. // manifest file if it wasn't provided by the layer itself.
  684. disp = loader_get_instance_layer_dispatch(physicalDevice);
  685. res = disp->EnumerateDeviceExtensionProperties(phys_dev->phys_dev, pLayerName, pPropertyCount, pProperties);
  686. loader_platform_thread_unlock_mutex(&loader_lock);
  687. return res;
  688. }
  689. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
  690. uint32_t *pPropertyCount,
  691. VkLayerProperties *pProperties) {
  692. uint32_t copy_size;
  693. struct loader_physical_device_tramp *phys_dev;
  694. struct loader_layer_list *enabled_layers, layers_list;
  695. memset(&layers_list, 0, sizeof(layers_list));
  696. loader_platform_thread_lock_mutex(&loader_lock);
  697. // Don't dispatch this call down the instance chain, want all device layers
  698. // enumerated and instance chain may not contain all device layers
  699. // TODO re-evaluate the above statement we maybe able to start calling
  700. // down the chain
  701. phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
  702. const struct loader_instance *inst = phys_dev->this_instance;
  703. uint32_t count = inst->app_activated_layer_list.count;
  704. if (count == 0 || pProperties == NULL) {
  705. *pPropertyCount = count;
  706. loader_platform_thread_unlock_mutex(&loader_lock);
  707. return VK_SUCCESS;
  708. }
  709. enabled_layers = (struct loader_layer_list *)&inst->app_activated_layer_list;
  710. copy_size = (*pPropertyCount < count) ? *pPropertyCount : count;
  711. for (uint32_t i = 0; i < copy_size; i++) {
  712. memcpy(&pProperties[i], &(enabled_layers->list[i].info), sizeof(VkLayerProperties));
  713. }
  714. *pPropertyCount = copy_size;
  715. if (copy_size < count) {
  716. loader_platform_thread_unlock_mutex(&loader_lock);
  717. return VK_INCOMPLETE;
  718. }
  719. loader_platform_thread_unlock_mutex(&loader_lock);
  720. return VK_SUCCESS;
  721. }
  722. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueNodeIndex, uint32_t queueIndex,
  723. VkQueue *pQueue) {
  724. const VkLayerDispatchTable *disp;
  725. disp = loader_get_dispatch(device);
  726. disp->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
  727. if (pQueue != NULL) {
  728. loader_set_dispatch(*pQueue, disp);
  729. }
  730. }
  731. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
  732. VkFence fence) {
  733. const VkLayerDispatchTable *disp;
  734. disp = loader_get_dispatch(queue);
  735. return disp->QueueSubmit(queue, submitCount, pSubmits, fence);
  736. }
  737. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
  738. const VkLayerDispatchTable *disp;
  739. disp = loader_get_dispatch(queue);
  740. return disp->QueueWaitIdle(queue);
  741. }
  742. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device) {
  743. const VkLayerDispatchTable *disp;
  744. disp = loader_get_dispatch(device);
  745. return disp->DeviceWaitIdle(device);
  746. }
  747. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
  748. const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
  749. const VkLayerDispatchTable *disp;
  750. disp = loader_get_dispatch(device);
  751. return disp->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
  752. }
  753. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory mem,
  754. const VkAllocationCallbacks *pAllocator) {
  755. const VkLayerDispatchTable *disp;
  756. disp = loader_get_dispatch(device);
  757. disp->FreeMemory(device, mem, pAllocator);
  758. }
  759. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
  760. VkDeviceSize size, VkFlags flags, void **ppData) {
  761. const VkLayerDispatchTable *disp;
  762. disp = loader_get_dispatch(device);
  763. return disp->MapMemory(device, mem, offset, size, flags, ppData);
  764. }
  765. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory mem) {
  766. const VkLayerDispatchTable *disp;
  767. disp = loader_get_dispatch(device);
  768. disp->UnmapMemory(device, mem);
  769. }
  770. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
  771. const VkMappedMemoryRange *pMemoryRanges) {
  772. const VkLayerDispatchTable *disp;
  773. disp = loader_get_dispatch(device);
  774. return disp->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
  775. }
  776. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
  777. const VkMappedMemoryRange *pMemoryRanges) {
  778. const VkLayerDispatchTable *disp;
  779. disp = loader_get_dispatch(device);
  780. return disp->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
  781. }
  782. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
  783. VkDeviceSize *pCommittedMemoryInBytes) {
  784. const VkLayerDispatchTable *disp;
  785. disp = loader_get_dispatch(device);
  786. disp->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
  787. }
  788. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
  789. VkDeviceSize offset) {
  790. const VkLayerDispatchTable *disp;
  791. disp = loader_get_dispatch(device);
  792. return disp->BindBufferMemory(device, buffer, mem, offset);
  793. }
  794. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
  795. VkDeviceSize offset) {
  796. const VkLayerDispatchTable *disp;
  797. disp = loader_get_dispatch(device);
  798. return disp->BindImageMemory(device, image, mem, offset);
  799. }
  800. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
  801. VkMemoryRequirements *pMemoryRequirements) {
  802. const VkLayerDispatchTable *disp;
  803. disp = loader_get_dispatch(device);
  804. disp->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
  805. }
  806. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image,
  807. VkMemoryRequirements *pMemoryRequirements) {
  808. const VkLayerDispatchTable *disp;
  809. disp = loader_get_dispatch(device);
  810. disp->GetImageMemoryRequirements(device, image, pMemoryRequirements);
  811. }
  812. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
  813. vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
  814. VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
  815. const VkLayerDispatchTable *disp;
  816. disp = loader_get_dispatch(device);
  817. disp->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
  818. }
  819. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
  820. VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage,
  821. VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties) {
  822. const VkLayerInstanceDispatchTable *disp;
  823. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  824. disp = loader_get_instance_layer_dispatch(physicalDevice);
  825. disp->GetPhysicalDeviceSparseImageFormatProperties(unwrapped_phys_dev, format, type, samples, usage, tiling, pPropertyCount,
  826. pProperties);
  827. }
  828. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
  829. const VkBindSparseInfo *pBindInfo, VkFence fence) {
  830. const VkLayerDispatchTable *disp;
  831. disp = loader_get_dispatch(queue);
  832. return disp->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
  833. }
  834. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
  835. const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
  836. const VkLayerDispatchTable *disp;
  837. disp = loader_get_dispatch(device);
  838. return disp->CreateFence(device, pCreateInfo, pAllocator, pFence);
  839. }
  840. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
  841. const VkLayerDispatchTable *disp;
  842. disp = loader_get_dispatch(device);
  843. disp->DestroyFence(device, fence, pAllocator);
  844. }
  845. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
  846. const VkLayerDispatchTable *disp;
  847. disp = loader_get_dispatch(device);
  848. return disp->ResetFences(device, fenceCount, pFences);
  849. }
  850. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence) {
  851. const VkLayerDispatchTable *disp;
  852. disp = loader_get_dispatch(device);
  853. return disp->GetFenceStatus(device, fence);
  854. }
  855. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
  856. VkBool32 waitAll, uint64_t timeout) {
  857. const VkLayerDispatchTable *disp;
  858. disp = loader_get_dispatch(device);
  859. return disp->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
  860. }
  861. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
  862. const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
  863. const VkLayerDispatchTable *disp;
  864. disp = loader_get_dispatch(device);
  865. return disp->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
  866. }
  867. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore,
  868. const VkAllocationCallbacks *pAllocator) {
  869. const VkLayerDispatchTable *disp;
  870. disp = loader_get_dispatch(device);
  871. disp->DestroySemaphore(device, semaphore, pAllocator);
  872. }
  873. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
  874. const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
  875. const VkLayerDispatchTable *disp;
  876. disp = loader_get_dispatch(device);
  877. return disp->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
  878. }
  879. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
  880. const VkLayerDispatchTable *disp;
  881. disp = loader_get_dispatch(device);
  882. disp->DestroyEvent(device, event, pAllocator);
  883. }
  884. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event) {
  885. const VkLayerDispatchTable *disp;
  886. disp = loader_get_dispatch(device);
  887. return disp->GetEventStatus(device, event);
  888. }
  889. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event) {
  890. const VkLayerDispatchTable *disp;
  891. disp = loader_get_dispatch(device);
  892. return disp->SetEvent(device, event);
  893. }
  894. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event) {
  895. const VkLayerDispatchTable *disp;
  896. disp = loader_get_dispatch(device);
  897. return disp->ResetEvent(device, event);
  898. }
  899. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
  900. const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
  901. const VkLayerDispatchTable *disp;
  902. disp = loader_get_dispatch(device);
  903. return disp->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
  904. }
  905. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
  906. const VkAllocationCallbacks *pAllocator) {
  907. const VkLayerDispatchTable *disp;
  908. disp = loader_get_dispatch(device);
  909. disp->DestroyQueryPool(device, queryPool, pAllocator);
  910. }
  911. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
  912. uint32_t queryCount, size_t dataSize, void *pData,
  913. VkDeviceSize stride, VkQueryResultFlags flags) {
  914. const VkLayerDispatchTable *disp;
  915. disp = loader_get_dispatch(device);
  916. return disp->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
  917. }
  918. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
  919. const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
  920. const VkLayerDispatchTable *disp;
  921. disp = loader_get_dispatch(device);
  922. return disp->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
  923. }
  924. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer,
  925. const VkAllocationCallbacks *pAllocator) {
  926. const VkLayerDispatchTable *disp;
  927. disp = loader_get_dispatch(device);
  928. disp->DestroyBuffer(device, buffer, pAllocator);
  929. }
  930. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
  931. const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
  932. const VkLayerDispatchTable *disp;
  933. disp = loader_get_dispatch(device);
  934. return disp->CreateBufferView(device, pCreateInfo, pAllocator, pView);
  935. }
  936. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView,
  937. const VkAllocationCallbacks *pAllocator) {
  938. const VkLayerDispatchTable *disp;
  939. disp = loader_get_dispatch(device);
  940. disp->DestroyBufferView(device, bufferView, pAllocator);
  941. }
  942. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
  943. const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
  944. const VkLayerDispatchTable *disp;
  945. disp = loader_get_dispatch(device);
  946. return disp->CreateImage(device, pCreateInfo, pAllocator, pImage);
  947. }
  948. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
  949. const VkLayerDispatchTable *disp;
  950. disp = loader_get_dispatch(device);
  951. disp->DestroyImage(device, image, pAllocator);
  952. }
  953. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image,
  954. const VkImageSubresource *pSubresource,
  955. VkSubresourceLayout *pLayout) {
  956. const VkLayerDispatchTable *disp;
  957. disp = loader_get_dispatch(device);
  958. disp->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
  959. }
  960. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
  961. const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
  962. const VkLayerDispatchTable *disp;
  963. disp = loader_get_dispatch(device);
  964. return disp->CreateImageView(device, pCreateInfo, pAllocator, pView);
  965. }
  966. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView,
  967. const VkAllocationCallbacks *pAllocator) {
  968. const VkLayerDispatchTable *disp;
  969. disp = loader_get_dispatch(device);
  970. disp->DestroyImageView(device, imageView, pAllocator);
  971. }
  972. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
  973. const VkAllocationCallbacks *pAllocator,
  974. VkShaderModule *pShader) {
  975. const VkLayerDispatchTable *disp;
  976. disp = loader_get_dispatch(device);
  977. return disp->CreateShaderModule(device, pCreateInfo, pAllocator, pShader);
  978. }
  979. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
  980. const VkAllocationCallbacks *pAllocator) {
  981. const VkLayerDispatchTable *disp;
  982. disp = loader_get_dispatch(device);
  983. disp->DestroyShaderModule(device, shaderModule, pAllocator);
  984. }
  985. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
  986. const VkAllocationCallbacks *pAllocator,
  987. VkPipelineCache *pPipelineCache) {
  988. const VkLayerDispatchTable *disp;
  989. disp = loader_get_dispatch(device);
  990. return disp->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
  991. }
  992. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
  993. const VkAllocationCallbacks *pAllocator) {
  994. const VkLayerDispatchTable *disp;
  995. disp = loader_get_dispatch(device);
  996. disp->DestroyPipelineCache(device, pipelineCache, pAllocator);
  997. }
  998. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
  999. size_t *pDataSize, void *pData) {
  1000. const VkLayerDispatchTable *disp;
  1001. disp = loader_get_dispatch(device);
  1002. return disp->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
  1003. }
  1004. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
  1005. uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches) {
  1006. const VkLayerDispatchTable *disp;
  1007. disp = loader_get_dispatch(device);
  1008. return disp->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
  1009. }
  1010. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
  1011. uint32_t createInfoCount,
  1012. const VkGraphicsPipelineCreateInfo *pCreateInfos,
  1013. const VkAllocationCallbacks *pAllocator,
  1014. VkPipeline *pPipelines) {
  1015. const VkLayerDispatchTable *disp;
  1016. disp = loader_get_dispatch(device);
  1017. return disp->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
  1018. }
  1019. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
  1020. uint32_t createInfoCount,
  1021. const VkComputePipelineCreateInfo *pCreateInfos,
  1022. const VkAllocationCallbacks *pAllocator,
  1023. VkPipeline *pPipelines) {
  1024. const VkLayerDispatchTable *disp;
  1025. disp = loader_get_dispatch(device);
  1026. return disp->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
  1027. }
  1028. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline,
  1029. const VkAllocationCallbacks *pAllocator) {
  1030. const VkLayerDispatchTable *disp;
  1031. disp = loader_get_dispatch(device);
  1032. disp->DestroyPipeline(device, pipeline, pAllocator);
  1033. }
  1034. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
  1035. const VkAllocationCallbacks *pAllocator,
  1036. VkPipelineLayout *pPipelineLayout) {
  1037. const VkLayerDispatchTable *disp;
  1038. disp = loader_get_dispatch(device);
  1039. return disp->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
  1040. }
  1041. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
  1042. const VkAllocationCallbacks *pAllocator) {
  1043. const VkLayerDispatchTable *disp;
  1044. disp = loader_get_dispatch(device);
  1045. disp->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
  1046. }
  1047. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
  1048. const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
  1049. const VkLayerDispatchTable *disp;
  1050. disp = loader_get_dispatch(device);
  1051. return disp->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
  1052. }
  1053. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler,
  1054. const VkAllocationCallbacks *pAllocator) {
  1055. const VkLayerDispatchTable *disp;
  1056. disp = loader_get_dispatch(device);
  1057. disp->DestroySampler(device, sampler, pAllocator);
  1058. }
  1059. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device,
  1060. const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
  1061. const VkAllocationCallbacks *pAllocator,
  1062. VkDescriptorSetLayout *pSetLayout) {
  1063. const VkLayerDispatchTable *disp;
  1064. disp = loader_get_dispatch(device);
  1065. return disp->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
  1066. }
  1067. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
  1068. const VkAllocationCallbacks *pAllocator) {
  1069. const VkLayerDispatchTable *disp;
  1070. disp = loader_get_dispatch(device);
  1071. disp->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
  1072. }
  1073. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
  1074. const VkAllocationCallbacks *pAllocator,
  1075. VkDescriptorPool *pDescriptorPool) {
  1076. const VkLayerDispatchTable *disp;
  1077. disp = loader_get_dispatch(device);
  1078. return disp->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
  1079. }
  1080. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
  1081. const VkAllocationCallbacks *pAllocator) {
  1082. const VkLayerDispatchTable *disp;
  1083. disp = loader_get_dispatch(device);
  1084. disp->DestroyDescriptorPool(device, descriptorPool, pAllocator);
  1085. }
  1086. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
  1087. VkDescriptorPoolResetFlags flags) {
  1088. const VkLayerDispatchTable *disp;
  1089. disp = loader_get_dispatch(device);
  1090. return disp->ResetDescriptorPool(device, descriptorPool, flags);
  1091. }
  1092. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device,
  1093. const VkDescriptorSetAllocateInfo *pAllocateInfo,
  1094. VkDescriptorSet *pDescriptorSets) {
  1095. const VkLayerDispatchTable *disp;
  1096. disp = loader_get_dispatch(device);
  1097. return disp->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
  1098. }
  1099. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
  1100. uint32_t descriptorSetCount,
  1101. const VkDescriptorSet *pDescriptorSets) {
  1102. const VkLayerDispatchTable *disp;
  1103. disp = loader_get_dispatch(device);
  1104. return disp->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
  1105. }
  1106. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
  1107. const VkWriteDescriptorSet *pDescriptorWrites,
  1108. uint32_t descriptorCopyCount,
  1109. const VkCopyDescriptorSet *pDescriptorCopies) {
  1110. const VkLayerDispatchTable *disp;
  1111. disp = loader_get_dispatch(device);
  1112. disp->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
  1113. }
  1114. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
  1115. const VkAllocationCallbacks *pAllocator,
  1116. VkFramebuffer *pFramebuffer) {
  1117. const VkLayerDispatchTable *disp;
  1118. disp = loader_get_dispatch(device);
  1119. return disp->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
  1120. }
  1121. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
  1122. const VkAllocationCallbacks *pAllocator) {
  1123. const VkLayerDispatchTable *disp;
  1124. disp = loader_get_dispatch(device);
  1125. disp->DestroyFramebuffer(device, framebuffer, pAllocator);
  1126. }
  1127. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
  1128. const VkAllocationCallbacks *pAllocator,
  1129. VkRenderPass *pRenderPass) {
  1130. const VkLayerDispatchTable *disp;
  1131. disp = loader_get_dispatch(device);
  1132. return disp->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
  1133. }
  1134. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
  1135. const VkAllocationCallbacks *pAllocator) {
  1136. const VkLayerDispatchTable *disp;
  1137. disp = loader_get_dispatch(device);
  1138. disp->DestroyRenderPass(device, renderPass, pAllocator);
  1139. }
  1140. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass,
  1141. VkExtent2D *pGranularity) {
  1142. const VkLayerDispatchTable *disp;
  1143. disp = loader_get_dispatch(device);
  1144. disp->GetRenderAreaGranularity(device, renderPass, pGranularity);
  1145. }
  1146. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
  1147. const VkAllocationCallbacks *pAllocator,
  1148. VkCommandPool *pCommandPool) {
  1149. const VkLayerDispatchTable *disp;
  1150. disp = loader_get_dispatch(device);
  1151. return disp->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
  1152. }
  1153. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
  1154. const VkAllocationCallbacks *pAllocator) {
  1155. const VkLayerDispatchTable *disp;
  1156. disp = loader_get_dispatch(device);
  1157. disp->DestroyCommandPool(device, commandPool, pAllocator);
  1158. }
  1159. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
  1160. VkCommandPoolResetFlags flags) {
  1161. const VkLayerDispatchTable *disp;
  1162. disp = loader_get_dispatch(device);
  1163. return disp->ResetCommandPool(device, commandPool, flags);
  1164. }
  1165. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device,
  1166. const VkCommandBufferAllocateInfo *pAllocateInfo,
  1167. VkCommandBuffer *pCommandBuffers) {
  1168. const VkLayerDispatchTable *disp;
  1169. VkResult res;
  1170. disp = loader_get_dispatch(device);
  1171. res = disp->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
  1172. if (res == VK_SUCCESS) {
  1173. for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
  1174. if (pCommandBuffers[i]) {
  1175. loader_init_dispatch(pCommandBuffers[i], disp);
  1176. }
  1177. }
  1178. }
  1179. return res;
  1180. }
  1181. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
  1182. uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
  1183. const VkLayerDispatchTable *disp;
  1184. disp = loader_get_dispatch(device);
  1185. disp->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
  1186. }
  1187. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
  1188. const VkCommandBufferBeginInfo *pBeginInfo) {
  1189. const VkLayerDispatchTable *disp;
  1190. disp = loader_get_dispatch(commandBuffer);
  1191. return disp->BeginCommandBuffer(commandBuffer, pBeginInfo);
  1192. }
  1193. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
  1194. const VkLayerDispatchTable *disp;
  1195. disp = loader_get_dispatch(commandBuffer);
  1196. return disp->EndCommandBuffer(commandBuffer);
  1197. }
  1198. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
  1199. const VkLayerDispatchTable *disp;
  1200. disp = loader_get_dispatch(commandBuffer);
  1201. return disp->ResetCommandBuffer(commandBuffer, flags);
  1202. }
  1203. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
  1204. VkPipeline pipeline) {
  1205. const VkLayerDispatchTable *disp;
  1206. disp = loader_get_dispatch(commandBuffer);
  1207. disp->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
  1208. }
  1209. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
  1210. uint32_t viewportCount, const VkViewport *pViewports) {
  1211. const VkLayerDispatchTable *disp;
  1212. disp = loader_get_dispatch(commandBuffer);
  1213. disp->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
  1214. }
  1215. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
  1216. uint32_t scissorCount, const VkRect2D *pScissors) {
  1217. const VkLayerDispatchTable *disp;
  1218. disp = loader_get_dispatch(commandBuffer);
  1219. disp->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
  1220. }
  1221. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
  1222. const VkLayerDispatchTable *disp;
  1223. disp = loader_get_dispatch(commandBuffer);
  1224. disp->CmdSetLineWidth(commandBuffer, lineWidth);
  1225. }
  1226. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
  1227. float depthBiasClamp, float depthBiasSlopeFactor) {
  1228. const VkLayerDispatchTable *disp;
  1229. disp = loader_get_dispatch(commandBuffer);
  1230. disp->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
  1231. }
  1232. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
  1233. const VkLayerDispatchTable *disp;
  1234. disp = loader_get_dispatch(commandBuffer);
  1235. disp->CmdSetBlendConstants(commandBuffer, blendConstants);
  1236. }
  1237. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
  1238. float maxDepthBounds) {
  1239. const VkLayerDispatchTable *disp;
  1240. disp = loader_get_dispatch(commandBuffer);
  1241. disp->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
  1242. }
  1243. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
  1244. uint32_t compareMask) {
  1245. const VkLayerDispatchTable *disp;
  1246. disp = loader_get_dispatch(commandBuffer);
  1247. disp->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
  1248. }
  1249. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
  1250. uint32_t writeMask) {
  1251. const VkLayerDispatchTable *disp;
  1252. disp = loader_get_dispatch(commandBuffer);
  1253. disp->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
  1254. }
  1255. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
  1256. uint32_t reference) {
  1257. const VkLayerDispatchTable *disp;
  1258. disp = loader_get_dispatch(commandBuffer);
  1259. disp->CmdSetStencilReference(commandBuffer, faceMask, reference);
  1260. }
  1261. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
  1262. VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
  1263. uint32_t firstSet, uint32_t descriptorSetCount,
  1264. const VkDescriptorSet *pDescriptorSets,
  1265. uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets) {
  1266. const VkLayerDispatchTable *disp;
  1267. disp = loader_get_dispatch(commandBuffer);
  1268. disp->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
  1269. dynamicOffsetCount, pDynamicOffsets);
  1270. }
  1271. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
  1272. VkIndexType indexType) {
  1273. const VkLayerDispatchTable *disp;
  1274. disp = loader_get_dispatch(commandBuffer);
  1275. disp->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
  1276. }
  1277. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
  1278. uint32_t bindingCount, const VkBuffer *pBuffers,
  1279. const VkDeviceSize *pOffsets) {
  1280. const VkLayerDispatchTable *disp;
  1281. disp = loader_get_dispatch(commandBuffer);
  1282. disp->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
  1283. }
  1284. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
  1285. uint32_t firstVertex, uint32_t firstInstance) {
  1286. const VkLayerDispatchTable *disp;
  1287. disp = loader_get_dispatch(commandBuffer);
  1288. disp->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
  1289. }
  1290. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
  1291. uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
  1292. uint32_t firstInstance) {
  1293. const VkLayerDispatchTable *disp;
  1294. disp = loader_get_dispatch(commandBuffer);
  1295. disp->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
  1296. }
  1297. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
  1298. uint32_t drawCount, uint32_t stride) {
  1299. const VkLayerDispatchTable *disp;
  1300. disp = loader_get_dispatch(commandBuffer);
  1301. disp->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
  1302. }
  1303. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
  1304. VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
  1305. const VkLayerDispatchTable *disp;
  1306. disp = loader_get_dispatch(commandBuffer);
  1307. disp->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
  1308. }
  1309. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
  1310. const VkLayerDispatchTable *disp;
  1311. disp = loader_get_dispatch(commandBuffer);
  1312. disp->CmdDispatch(commandBuffer, x, y, z);
  1313. }
  1314. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
  1315. VkDeviceSize offset) {
  1316. const VkLayerDispatchTable *disp;
  1317. disp = loader_get_dispatch(commandBuffer);
  1318. disp->CmdDispatchIndirect(commandBuffer, buffer, offset);
  1319. }
  1320. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
  1321. uint32_t regionCount, const VkBufferCopy *pRegions) {
  1322. const VkLayerDispatchTable *disp;
  1323. disp = loader_get_dispatch(commandBuffer);
  1324. disp->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
  1325. }
  1326. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
  1327. VkImageLayout srcImageLayout, VkImage dstImage,
  1328. VkImageLayout dstImageLayout, uint32_t regionCount,
  1329. const VkImageCopy *pRegions) {
  1330. const VkLayerDispatchTable *disp;
  1331. disp = loader_get_dispatch(commandBuffer);
  1332. disp->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
  1333. }
  1334. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
  1335. VkImageLayout srcImageLayout, VkImage dstImage,
  1336. VkImageLayout dstImageLayout, uint32_t regionCount,
  1337. const VkImageBlit *pRegions, VkFilter filter) {
  1338. const VkLayerDispatchTable *disp;
  1339. disp = loader_get_dispatch(commandBuffer);
  1340. disp->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
  1341. }
  1342. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
  1343. VkImageLayout dstImageLayout, uint32_t regionCount,
  1344. const VkBufferImageCopy *pRegions) {
  1345. const VkLayerDispatchTable *disp;
  1346. disp = loader_get_dispatch(commandBuffer);
  1347. disp->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
  1348. }
  1349. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
  1350. VkImageLayout srcImageLayout, VkBuffer dstBuffer,
  1351. uint32_t regionCount, const VkBufferImageCopy *pRegions) {
  1352. const VkLayerDispatchTable *disp;
  1353. disp = loader_get_dispatch(commandBuffer);
  1354. disp->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
  1355. }
  1356. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
  1357. VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
  1358. const VkLayerDispatchTable *disp;
  1359. disp = loader_get_dispatch(commandBuffer);
  1360. disp->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
  1361. }
  1362. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
  1363. VkDeviceSize size, uint32_t data) {
  1364. const VkLayerDispatchTable *disp;
  1365. disp = loader_get_dispatch(commandBuffer);
  1366. disp->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
  1367. }
  1368. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
  1369. VkImageLayout imageLayout, const VkClearColorValue *pColor,
  1370. uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
  1371. const VkLayerDispatchTable *disp;
  1372. disp = loader_get_dispatch(commandBuffer);
  1373. disp->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
  1374. }
  1375. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
  1376. VkImageLayout imageLayout,
  1377. const VkClearDepthStencilValue *pDepthStencil,
  1378. uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
  1379. const VkLayerDispatchTable *disp;
  1380. disp = loader_get_dispatch(commandBuffer);
  1381. disp->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
  1382. }
  1383. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
  1384. const VkClearAttachment *pAttachments, uint32_t rectCount,
  1385. const VkClearRect *pRects) {
  1386. const VkLayerDispatchTable *disp;
  1387. disp = loader_get_dispatch(commandBuffer);
  1388. disp->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
  1389. }
  1390. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
  1391. VkImageLayout srcImageLayout, VkImage dstImage,
  1392. VkImageLayout dstImageLayout, uint32_t regionCount,
  1393. const VkImageResolve *pRegions) {
  1394. const VkLayerDispatchTable *disp;
  1395. disp = loader_get_dispatch(commandBuffer);
  1396. disp->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
  1397. }
  1398. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
  1399. VkPipelineStageFlags stageMask) {
  1400. const VkLayerDispatchTable *disp;
  1401. disp = loader_get_dispatch(commandBuffer);
  1402. disp->CmdSetEvent(commandBuffer, event, stageMask);
  1403. }
  1404. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
  1405. VkPipelineStageFlags stageMask) {
  1406. const VkLayerDispatchTable *disp;
  1407. disp = loader_get_dispatch(commandBuffer);
  1408. disp->CmdResetEvent(commandBuffer, event, stageMask);
  1409. }
  1410. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
  1411. VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
  1412. uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
  1413. uint32_t bufferMemoryBarrierCount,
  1414. const VkBufferMemoryBarrier *pBufferMemoryBarriers,
  1415. uint32_t imageMemoryBarrierCount,
  1416. const VkImageMemoryBarrier *pImageMemoryBarriers) {
  1417. const VkLayerDispatchTable *disp;
  1418. disp = loader_get_dispatch(commandBuffer);
  1419. disp->CmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
  1420. bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
  1421. }
  1422. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
  1423. VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
  1424. uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
  1425. uint32_t bufferMemoryBarrierCount,
  1426. const VkBufferMemoryBarrier *pBufferMemoryBarriers,
  1427. uint32_t imageMemoryBarrierCount,
  1428. const VkImageMemoryBarrier *pImageMemoryBarriers) {
  1429. const VkLayerDispatchTable *disp;
  1430. disp = loader_get_dispatch(commandBuffer);
  1431. disp->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
  1432. bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
  1433. }
  1434. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
  1435. VkFlags flags) {
  1436. const VkLayerDispatchTable *disp;
  1437. disp = loader_get_dispatch(commandBuffer);
  1438. disp->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
  1439. }
  1440. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
  1441. const VkLayerDispatchTable *disp;
  1442. disp = loader_get_dispatch(commandBuffer);
  1443. disp->CmdEndQuery(commandBuffer, queryPool, slot);
  1444. }
  1445. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
  1446. uint32_t firstQuery, uint32_t queryCount) {
  1447. const VkLayerDispatchTable *disp;
  1448. disp = loader_get_dispatch(commandBuffer);
  1449. disp->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
  1450. }
  1451. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
  1452. VkQueryPool queryPool, uint32_t slot) {
  1453. const VkLayerDispatchTable *disp;
  1454. disp = loader_get_dispatch(commandBuffer);
  1455. disp->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
  1456. }
  1457. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
  1458. uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
  1459. VkDeviceSize dstOffset, VkDeviceSize stride, VkFlags flags) {
  1460. const VkLayerDispatchTable *disp;
  1461. disp = loader_get_dispatch(commandBuffer);
  1462. disp->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
  1463. }
  1464. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
  1465. VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
  1466. const void *pValues) {
  1467. const VkLayerDispatchTable *disp;
  1468. disp = loader_get_dispatch(commandBuffer);
  1469. disp->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
  1470. }
  1471. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
  1472. const VkRenderPassBeginInfo *pRenderPassBegin,
  1473. VkSubpassContents contents) {
  1474. const VkLayerDispatchTable *disp;
  1475. disp = loader_get_dispatch(commandBuffer);
  1476. disp->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
  1477. }
  1478. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
  1479. const VkLayerDispatchTable *disp;
  1480. disp = loader_get_dispatch(commandBuffer);
  1481. disp->CmdNextSubpass(commandBuffer, contents);
  1482. }
  1483. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
  1484. const VkLayerDispatchTable *disp;
  1485. disp = loader_get_dispatch(commandBuffer);
  1486. disp->CmdEndRenderPass(commandBuffer);
  1487. }
  1488. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
  1489. const VkCommandBuffer *pCommandBuffers) {
  1490. const VkLayerDispatchTable *disp;
  1491. disp = loader_get_dispatch(commandBuffer);
  1492. disp->CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
  1493. }
  1494. // ---- Vulkan core 1.1 trampolines
  1495. VkResult setupLoaderTrampPhysDevGroups(VkInstance instance) {
  1496. VkResult res = VK_SUCCESS;
  1497. struct loader_instance *inst;
  1498. uint32_t total_count = 0;
  1499. VkPhysicalDeviceGroupPropertiesKHR **new_phys_dev_groups = NULL;
  1500. VkPhysicalDeviceGroupPropertiesKHR *local_phys_dev_groups = NULL;
  1501. PFN_vkEnumeratePhysicalDeviceGroups fpEnumeratePhysicalDeviceGroups = NULL;
  1502. inst = loader_get_instance(instance);
  1503. if (NULL == inst) {
  1504. res = VK_ERROR_INITIALIZATION_FAILED;
  1505. goto out;
  1506. }
  1507. // Get the function pointer to use to call into the ICD. This could be the core or KHR version
  1508. if (inst->enabled_known_extensions.khr_device_group_creation) {
  1509. fpEnumeratePhysicalDeviceGroups = inst->disp->layer_inst_disp.EnumeratePhysicalDeviceGroupsKHR;
  1510. } else {
  1511. fpEnumeratePhysicalDeviceGroups = inst->disp->layer_inst_disp.EnumeratePhysicalDeviceGroups;
  1512. }
  1513. // Setup the trampoline loader physical devices. This will actually
  1514. // call down and setup the terminator loader physical devices during the
  1515. // process.
  1516. VkResult setup_res = setupLoaderTrampPhysDevs(instance);
  1517. if (setup_res != VK_SUCCESS && setup_res != VK_INCOMPLETE) {
  1518. res = setup_res;
  1519. goto out;
  1520. }
  1521. // Query how many physical device groups there
  1522. res = fpEnumeratePhysicalDeviceGroups(instance, &total_count, NULL);
  1523. if (res != VK_SUCCESS) {
  1524. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1525. "setupLoaderTrampPhysDevGroups: Failed during dispatch call of "
  1526. "\'EnumeratePhysicalDeviceGroupsKHR\' to lower layers or "
  1527. "loader to get count.");
  1528. goto out;
  1529. }
  1530. // Create an array for the new physical device groups, which will be stored
  1531. // in the instance for the trampoline code.
  1532. new_phys_dev_groups = (VkPhysicalDeviceGroupPropertiesKHR **)loader_instance_heap_alloc(
  1533. inst, total_count * sizeof(VkPhysicalDeviceGroupPropertiesKHR *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
  1534. if (NULL == new_phys_dev_groups) {
  1535. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1536. "setupLoaderTrampPhysDevGroups: Failed to allocate new physical device"
  1537. " group array of size %d",
  1538. total_count);
  1539. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  1540. goto out;
  1541. }
  1542. memset(new_phys_dev_groups, 0, total_count * sizeof(VkPhysicalDeviceGroupPropertiesKHR *));
  1543. // Create a temporary array (on the stack) to keep track of the
  1544. // returned VkPhysicalDevice values.
  1545. local_phys_dev_groups = loader_stack_alloc(sizeof(VkPhysicalDeviceGroupPropertiesKHR) * total_count);
  1546. if (NULL == local_phys_dev_groups) {
  1547. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1548. "setupLoaderTrampPhysDevGroups: Failed to allocate local "
  1549. "physical device group array of size %d",
  1550. total_count);
  1551. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  1552. goto out;
  1553. }
  1554. // Initialize the memory to something valid
  1555. memset(local_phys_dev_groups, 0, sizeof(VkPhysicalDeviceGroupPropertiesKHR) * total_count);
  1556. for (uint32_t group = 0; group < total_count; group++) {
  1557. local_phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
  1558. local_phys_dev_groups[group].pNext = NULL;
  1559. local_phys_dev_groups[group].subsetAllocation = false;
  1560. }
  1561. // Call down and get the content
  1562. fpEnumeratePhysicalDeviceGroups(instance, &total_count, local_phys_dev_groups);
  1563. if (VK_SUCCESS != res) {
  1564. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1565. "setupLoaderTrampPhysDevGroups: Failed during dispatch call of "
  1566. "\'EnumeratePhysicalDeviceGroupsKHR\' to lower layers or "
  1567. "loader to get content.");
  1568. goto out;
  1569. }
  1570. // Replace all the physical device IDs with the proper loader values
  1571. for (uint32_t group = 0; group < total_count; group++) {
  1572. for (uint32_t group_gpu = 0; group_gpu < local_phys_dev_groups[group].physicalDeviceCount; group_gpu++) {
  1573. bool found = false;
  1574. for (uint32_t tramp_gpu = 0; tramp_gpu < inst->phys_dev_count_tramp; tramp_gpu++) {
  1575. if (local_phys_dev_groups[group].physicalDevices[group_gpu] == inst->phys_devs_tramp[tramp_gpu]->phys_dev) {
  1576. local_phys_dev_groups[group].physicalDevices[group_gpu] = (VkPhysicalDevice)inst->phys_devs_tramp[tramp_gpu];
  1577. found = true;
  1578. break;
  1579. }
  1580. }
  1581. if (!found) {
  1582. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1583. "setupLoaderTrampPhysDevGroups: Failed to find GPU %d in group %d"
  1584. " returned by \'EnumeratePhysicalDeviceGroupsKHR\' in list returned"
  1585. " by \'EnumeratePhysicalDevices\'", group_gpu, group);
  1586. res = VK_ERROR_INITIALIZATION_FAILED;
  1587. goto out;
  1588. }
  1589. }
  1590. }
  1591. // Copy or create everything to fill the new array of physical device groups
  1592. for (uint32_t new_idx = 0; new_idx < total_count; new_idx++) {
  1593. // Check if this physical device group with the same contents is already in the old buffer
  1594. for (uint32_t old_idx = 0; old_idx < inst->phys_dev_group_count_tramp; old_idx++) {
  1595. if (local_phys_dev_groups[new_idx].physicalDeviceCount == inst->phys_dev_groups_tramp[old_idx]->physicalDeviceCount) {
  1596. bool found_all_gpus = true;
  1597. for (uint32_t old_gpu = 0; old_gpu < inst->phys_dev_groups_tramp[old_idx]->physicalDeviceCount; old_gpu++) {
  1598. bool found_gpu = false;
  1599. for (uint32_t new_gpu = 0; new_gpu < local_phys_dev_groups[new_idx].physicalDeviceCount; new_gpu++) {
  1600. if (local_phys_dev_groups[new_idx].physicalDevices[new_gpu] == inst->phys_dev_groups_tramp[old_idx]->physicalDevices[old_gpu]) {
  1601. found_gpu = true;
  1602. break;
  1603. }
  1604. }
  1605. if (!found_gpu) {
  1606. found_all_gpus = false;
  1607. break;
  1608. }
  1609. }
  1610. if (!found_all_gpus) {
  1611. continue;
  1612. } else {
  1613. new_phys_dev_groups[new_idx] = inst->phys_dev_groups_tramp[old_idx];
  1614. break;
  1615. }
  1616. }
  1617. }
  1618. // If this physical device group isn't in the old buffer, create it
  1619. if (NULL == new_phys_dev_groups[new_idx]) {
  1620. new_phys_dev_groups[new_idx] = (VkPhysicalDeviceGroupPropertiesKHR *)loader_instance_heap_alloc(
  1621. inst, sizeof(VkPhysicalDeviceGroupPropertiesKHR), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
  1622. if (NULL == new_phys_dev_groups[new_idx]) {
  1623. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1624. "setupLoaderTrampPhysDevGroups: Failed to allocate "
  1625. "physical device group trampoline object %d",
  1626. new_idx);
  1627. total_count = new_idx;
  1628. res = VK_ERROR_OUT_OF_HOST_MEMORY;
  1629. goto out;
  1630. }
  1631. memcpy(new_phys_dev_groups[new_idx], &local_phys_dev_groups[new_idx],
  1632. sizeof(VkPhysicalDeviceGroupPropertiesKHR));
  1633. }
  1634. }
  1635. out:
  1636. if (VK_SUCCESS != res) {
  1637. if (NULL != new_phys_dev_groups) {
  1638. for (uint32_t i = 0; i < total_count; i++) {
  1639. loader_instance_heap_free(inst, new_phys_dev_groups[i]);
  1640. }
  1641. loader_instance_heap_free(inst, new_phys_dev_groups);
  1642. }
  1643. total_count = 0;
  1644. } else {
  1645. // Free everything that didn't carry over to the new array of
  1646. // physical device groups
  1647. if (NULL != inst->phys_dev_groups_tramp) {
  1648. for (uint32_t i = 0; i < inst->phys_dev_group_count_tramp; i++) {
  1649. bool found = false;
  1650. for (uint32_t j = 0; j < total_count; j++) {
  1651. if (inst->phys_dev_groups_tramp[i] == new_phys_dev_groups[j]) {
  1652. found = true;
  1653. break;
  1654. }
  1655. }
  1656. if (!found) {
  1657. loader_instance_heap_free(inst, inst->phys_dev_groups_tramp[i]);
  1658. }
  1659. }
  1660. loader_instance_heap_free(inst, inst->phys_dev_groups_tramp);
  1661. }
  1662. // Swap in the new physical device group list
  1663. inst->phys_dev_group_count_tramp = total_count;
  1664. inst->phys_dev_groups_tramp = new_phys_dev_groups;
  1665. }
  1666. return res;
  1667. }
  1668. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(
  1669. VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
  1670. VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
  1671. VkResult res = VK_SUCCESS;
  1672. uint32_t count;
  1673. uint32_t i;
  1674. struct loader_instance *inst = NULL;
  1675. loader_platform_thread_lock_mutex(&loader_lock);
  1676. inst = loader_get_instance(instance);
  1677. if (NULL == inst) {
  1678. res = VK_ERROR_INITIALIZATION_FAILED;
  1679. goto out;
  1680. }
  1681. if (NULL == pPhysicalDeviceGroupCount) {
  1682. loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
  1683. "vkEnumeratePhysicalDeviceGroupsKHR: Received NULL pointer for physical "
  1684. "device group count return value.");
  1685. res = VK_ERROR_INITIALIZATION_FAILED;
  1686. goto out;
  1687. }
  1688. VkResult setup_res = setupLoaderTrampPhysDevGroups(instance);
  1689. if (VK_SUCCESS != setup_res) {
  1690. res = setup_res;
  1691. goto out;
  1692. }
  1693. count = inst->phys_dev_group_count_tramp;
  1694. // Wrap the PhysDev object for loader usage, return wrapped objects
  1695. if (NULL != pPhysicalDeviceGroupProperties) {
  1696. if (inst->phys_dev_group_count_tramp > *pPhysicalDeviceGroupCount) {
  1697. loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
  1698. "vkEnumeratePhysicalDeviceGroupsKHR: Trimming device group count down"
  1699. " by application request from %d to %d physical device groups",
  1700. inst->phys_dev_group_count_tramp, *pPhysicalDeviceGroupCount);
  1701. count = *pPhysicalDeviceGroupCount;
  1702. res = VK_INCOMPLETE;
  1703. }
  1704. for (i = 0; i < count; i++) {
  1705. memcpy(&pPhysicalDeviceGroupProperties[i], inst->phys_dev_groups_tramp[i],
  1706. sizeof(VkPhysicalDeviceGroupPropertiesKHR));
  1707. }
  1708. }
  1709. *pPhysicalDeviceGroupCount = count;
  1710. out:
  1711. loader_platform_thread_unlock_mutex(&loader_lock);
  1712. return res;
  1713. }
  1714. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures) {
  1715. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1716. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1717. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1718. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1719. disp->GetPhysicalDeviceFeatures2KHR(unwrapped_phys_dev, pFeatures);
  1720. } else {
  1721. disp->GetPhysicalDeviceFeatures2(unwrapped_phys_dev, pFeatures);
  1722. }
  1723. }
  1724. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
  1725. VkPhysicalDeviceProperties2 *pProperties) {
  1726. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1727. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1728. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1729. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1730. disp->GetPhysicalDeviceProperties2KHR(unwrapped_phys_dev, pProperties);
  1731. } else {
  1732. disp->GetPhysicalDeviceProperties2(unwrapped_phys_dev, pProperties);
  1733. }
  1734. }
  1735. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format,
  1736. VkFormatProperties2 *pFormatProperties) {
  1737. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1738. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1739. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1740. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1741. disp->GetPhysicalDeviceFormatProperties2KHR(unwrapped_phys_dev, format, pFormatProperties);
  1742. } else {
  1743. disp->GetPhysicalDeviceFormatProperties2(unwrapped_phys_dev, format, pFormatProperties);
  1744. }
  1745. }
  1746. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(
  1747. VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
  1748. VkImageFormatProperties2 *pImageFormatProperties) {
  1749. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1750. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1751. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1752. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1753. return disp->GetPhysicalDeviceImageFormatProperties2KHR(unwrapped_phys_dev, pImageFormatInfo, pImageFormatProperties);
  1754. } else {
  1755. return disp->GetPhysicalDeviceImageFormatProperties2(unwrapped_phys_dev, pImageFormatInfo, pImageFormatProperties);
  1756. }
  1757. }
  1758. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
  1759. uint32_t *pQueueFamilyPropertyCount,
  1760. VkQueueFamilyProperties2 *pQueueFamilyProperties) {
  1761. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1762. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1763. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1764. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1765. disp->GetPhysicalDeviceQueueFamilyProperties2KHR(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
  1766. } else {
  1767. disp->GetPhysicalDeviceQueueFamilyProperties2(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
  1768. }
  1769. }
  1770. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
  1771. VkPhysicalDeviceMemoryProperties2 *pMemoryProperties) {
  1772. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1773. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1774. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1775. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1776. disp->GetPhysicalDeviceMemoryProperties2KHR(unwrapped_phys_dev, pMemoryProperties);
  1777. } else {
  1778. disp->GetPhysicalDeviceMemoryProperties2(unwrapped_phys_dev, pMemoryProperties);
  1779. }
  1780. }
  1781. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(
  1782. VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount,
  1783. VkSparseImageFormatProperties2 *pProperties) {
  1784. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1785. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1786. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1787. if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
  1788. disp->GetPhysicalDeviceSparseImageFormatProperties2KHR(unwrapped_phys_dev, pFormatInfo, pPropertyCount, pProperties);
  1789. } else {
  1790. disp->GetPhysicalDeviceSparseImageFormatProperties2(unwrapped_phys_dev, pFormatInfo, pPropertyCount, pProperties);
  1791. }
  1792. }
  1793. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(
  1794. VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
  1795. VkExternalBufferProperties *pExternalBufferProperties) {
  1796. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1797. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1798. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1799. if (inst != NULL && inst->enabled_known_extensions.khr_external_memory_capabilities){
  1800. disp->GetPhysicalDeviceExternalBufferPropertiesKHR(unwrapped_phys_dev, pExternalBufferInfo, pExternalBufferProperties);
  1801. } else {
  1802. disp->GetPhysicalDeviceExternalBufferProperties(unwrapped_phys_dev, pExternalBufferInfo, pExternalBufferProperties);
  1803. }
  1804. }
  1805. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(
  1806. VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR *pExternalSemaphoreInfo,
  1807. VkExternalSemaphoreProperties *pExternalSemaphoreProperties) {
  1808. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1809. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1810. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1811. if (inst != NULL && inst->enabled_known_extensions.khr_external_semaphore_capabilities) {
  1812. disp->GetPhysicalDeviceExternalSemaphorePropertiesKHR(unwrapped_phys_dev, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
  1813. } else {
  1814. disp->GetPhysicalDeviceExternalSemaphoreProperties(unwrapped_phys_dev, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
  1815. }
  1816. }
  1817. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(
  1818. VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
  1819. VkExternalFenceProperties *pExternalFenceProperties) {
  1820. VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
  1821. const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
  1822. const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
  1823. if (inst != NULL && inst->enabled_known_extensions.khr_external_fence_capabilities) {
  1824. disp->GetPhysicalDeviceExternalFencePropertiesKHR(unwrapped_phys_dev, pExternalFenceInfo, pExternalFenceProperties);
  1825. } else {
  1826. disp->GetPhysicalDeviceExternalFenceProperties(unwrapped_phys_dev, pExternalFenceInfo, pExternalFenceProperties);
  1827. }
  1828. }
  1829. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(
  1830. VkDevice device,
  1831. uint32_t bindInfoCount,
  1832. const VkBindBufferMemoryInfo* pBindInfos) {
  1833. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1834. return disp->BindBufferMemory2(device, bindInfoCount, pBindInfos);
  1835. }
  1836. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(
  1837. VkDevice device,
  1838. uint32_t bindInfoCount,
  1839. const VkBindImageMemoryInfo* pBindInfos) {
  1840. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1841. return disp->BindImageMemory2(device, bindInfoCount, pBindInfos);
  1842. }
  1843. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(
  1844. VkDevice device,
  1845. uint32_t heapIndex,
  1846. uint32_t localDeviceIndex,
  1847. uint32_t remoteDeviceIndex,
  1848. VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
  1849. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1850. disp->GetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
  1851. }
  1852. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(
  1853. VkCommandBuffer commandBuffer,
  1854. uint32_t deviceMask) {
  1855. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1856. disp->CmdSetDeviceMask(commandBuffer, deviceMask);
  1857. }
  1858. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(
  1859. VkCommandBuffer commandBuffer,
  1860. uint32_t baseGroupX,
  1861. uint32_t baseGroupY,
  1862. uint32_t baseGroupZ,
  1863. uint32_t groupCountX,
  1864. uint32_t groupCountY,
  1865. uint32_t groupCountZ) {
  1866. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1867. disp->CmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
  1868. }
  1869. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(
  1870. VkDevice device,
  1871. const VkImageMemoryRequirementsInfo2* pInfo,
  1872. VkMemoryRequirements2* pMemoryRequirements) {
  1873. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1874. disp->GetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
  1875. }
  1876. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(
  1877. VkDevice device,
  1878. const VkBufferMemoryRequirementsInfo2* pInfo,
  1879. VkMemoryRequirements2* pMemoryRequirements) {
  1880. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1881. disp->GetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
  1882. }
  1883. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(
  1884. VkDevice device,
  1885. const VkImageSparseMemoryRequirementsInfo2* pInfo,
  1886. uint32_t* pSparseMemoryRequirementCount,
  1887. VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
  1888. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1889. disp->GetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
  1890. }
  1891. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(
  1892. VkDevice device,
  1893. VkCommandPool commandPool,
  1894. VkCommandPoolTrimFlags flags) {
  1895. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1896. disp->TrimCommandPool(device, commandPool, flags);
  1897. }
  1898. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
  1899. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1900. disp->GetDeviceQueue2(device, pQueueInfo, pQueue);
  1901. if (pQueue != NULL && *pQueue != NULL) {
  1902. loader_set_dispatch(*pQueue, disp);
  1903. }
  1904. }
  1905. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(
  1906. VkDevice device,
  1907. const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
  1908. const VkAllocationCallbacks* pAllocator,
  1909. VkSamplerYcbcrConversion* pYcbcrConversion) {
  1910. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1911. return disp->CreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
  1912. }
  1913. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(
  1914. VkDevice device,
  1915. VkSamplerYcbcrConversion ycbcrConversion,
  1916. const VkAllocationCallbacks* pAllocator) {
  1917. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1918. disp->DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
  1919. }
  1920. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
  1921. VkDevice device,
  1922. const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
  1923. VkDescriptorSetLayoutSupport* pSupport) {
  1924. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1925. disp->GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
  1926. }
  1927. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
  1928. vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
  1929. const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
  1930. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1931. return disp->CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
  1932. }
  1933. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device,
  1934. VkDescriptorUpdateTemplate descriptorUpdateTemplate,
  1935. const VkAllocationCallbacks *pAllocator) {
  1936. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1937. disp->DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
  1938. }
  1939. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
  1940. VkDescriptorUpdateTemplate descriptorUpdateTemplate,
  1941. const void *pData) {
  1942. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1943. disp->UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
  1944. }
  1945. // ---- Vulkan core 1.2 trampolines
  1946. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo,
  1947. const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
  1948. {
  1949. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1950. return disp->CreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
  1951. }
  1952. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
  1953. const VkRenderPassBeginInfo* pRenderPassBegin,
  1954. const VkSubpassBeginInfo* pSubpassBeginInfo)
  1955. {
  1956. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1957. disp->CmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
  1958. }
  1959. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer,
  1960. const VkSubpassBeginInfo* pSubpassBeginInfo,
  1961. const VkSubpassEndInfo* pSubpassEndInfo)
  1962. {
  1963. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1964. disp->CmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
  1965. }
  1966. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo)
  1967. {
  1968. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1969. disp->CmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
  1970. }
  1971. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
  1972. VkBuffer countBuffer, VkDeviceSize countBufferOffset,
  1973. uint32_t maxDrawCount, uint32_t stride)
  1974. {
  1975. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1976. disp->CmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
  1977. }
  1978. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
  1979. VkDeviceSize offset, VkBuffer countBuffer,
  1980. VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
  1981. uint32_t stride)
  1982. {
  1983. const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
  1984. disp->CmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
  1985. }
  1986. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t* pValue)
  1987. {
  1988. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1989. return disp->GetSemaphoreCounterValue(device, semaphore, pValue);
  1990. }
  1991. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo,
  1992. uint64_t timeout)
  1993. {
  1994. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  1995. return disp->WaitSemaphores(device, pWaitInfo, timeout);
  1996. }
  1997. LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo)
  1998. {
  1999. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  2000. return disp->SignalSemaphore(device, pSignalInfo);
  2001. }
  2002. LOADER_EXPORT VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device,
  2003. const VkBufferDeviceAddressInfo* pInfo)
  2004. {
  2005. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  2006. return disp->GetBufferDeviceAddress(device, pInfo);
  2007. }
  2008. LOADER_EXPORT VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device,
  2009. const VkBufferDeviceAddressInfo* pInfo)
  2010. {
  2011. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  2012. return disp->GetBufferOpaqueCaptureAddress(device, pInfo);
  2013. }
  2014. LOADER_EXPORT VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,
  2015. const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo)
  2016. {
  2017. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  2018. return disp->GetDeviceMemoryOpaqueCaptureAddress(device, pInfo);
  2019. }
  2020. LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
  2021. uint32_t queryCount)
  2022. {
  2023. const VkLayerDispatchTable *disp = loader_get_dispatch(device);
  2024. disp->ResetQueryPool(device, queryPool, firstQuery, queryCount);
  2025. }