VkGrManager.cpp 62 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904
  1. // Copyright (C) 2009-2023, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/VkGrManager.h>
  6. #include <AnKi/Util/StringList.h>
  7. #include <AnKi/Core/App.h>
  8. #include <AnKi/Gr/Vulkan/VkBuffer.h>
  9. #include <AnKi/Gr/Vulkan/VkTexture.h>
  10. #include <AnKi/Gr/Vulkan/VkTextureView.h>
  11. #include <AnKi/Gr/Vulkan/VkSampler.h>
  12. #include <AnKi/Gr/Vulkan/VkShader.h>
  13. #include <AnKi/Gr/Vulkan/VkShaderProgram.h>
  14. #include <AnKi/Gr/Vulkan/VkCommandBuffer.h>
  15. #include <AnKi/Gr/Vulkan/VkFramebuffer.h>
  16. #include <AnKi/Gr/Vulkan/VkOcclusionQuery.h>
  17. #include <AnKi/Gr/Vulkan/VkTimestampQuery.h>
  18. #include <AnKi/Gr/Vulkan/VkPipelineQuery.h>
  19. #include <AnKi/Gr/RenderGraph.h>
  20. #include <AnKi/Gr/Vulkan/VkAccelerationStructure.h>
  21. #include <AnKi/Gr/Vulkan/VkGrUpscaler.h>
  22. #include <AnKi/Gr/Vulkan/VkFence.h>
  23. #include <AnKi/Window/NativeWindow.h>
  24. #if ANKI_WINDOWING_SYSTEM_SDL
  25. # include <AnKi/Window/NativeWindowSdl.h>
  26. # include <SDL_syswm.h>
  27. # include <SDL_vulkan.h>
  28. #elif ANKI_WINDOWING_SYSTEM_ANDROID
  29. # include <AnKi/Window/NativeWindowAndroid.h>
  30. #elif ANKI_WINDOWING_SYSTEM_HEADLESS
  31. // Nothing extra
  32. #else
  33. # error "Unsupported"
  34. #endif
  35. namespace anki {
  36. BoolCVar g_validationCVar(CVarSubsystem::kGr, "Validation", false, "Enable or not validation");
  37. static BoolCVar g_gpuValidationCVar(CVarSubsystem::kGr, "GpuValidation", false, "Enable or not GPU validation");
  38. static BoolCVar g_debugPrintfCVar(CVarSubsystem::kGr, "DebugPrintf", false, "Enable or not debug printf");
  39. BoolCVar g_debugMarkersCVar(CVarSubsystem::kGr, "DebugMarkers", false, "Enable or not debug markers");
  40. BoolCVar g_vsyncCVar(CVarSubsystem::kGr, "Vsync", false, "Enable or not vsync");
  41. static NumericCVar<U8> g_deviceCVar(CVarSubsystem::kGr, "Device", 0, 0, 16, "Choose an available device. Devices are sorted by performance");
  42. static BoolCVar g_rayTracingCVar(CVarSubsystem::kGr, "RayTracing", false, "Try enabling ray tracing");
  43. static BoolCVar g_64bitAtomicsCVar(CVarSubsystem::kGr, "64bitAtomics", true, "Enable or not 64bit atomics");
  44. static BoolCVar g_samplerFilterMinMaxCVar(CVarSubsystem::kGr, "SamplerFilterMinMax", true, "Enable or not min/max sample filtering");
  45. static BoolCVar g_vrsCVar(CVarSubsystem::kGr, "Vrs", false, "Enable or not VRS");
  46. BoolCVar g_meshShadersCVar(CVarSubsystem::kGr, "MeshShaders", false, "Enable or not mesh shaders");
  47. static BoolCVar g_asyncComputeCVar(CVarSubsystem::kGr, "AsyncCompute", true, "Enable or not async compute");
  48. static NumericCVar<U8> g_vkMinorCVar(CVarSubsystem::kGr, "VkMinor", 1, 1, 1, "Vulkan minor version");
  49. static NumericCVar<U8> g_vkMajorCVar(CVarSubsystem::kGr, "VkMajor", 1, 1, 1, "Vulkan major version");
  50. static StringCVar g_vkLayers(CVarSubsystem::kGr, "VkLayers", "", "VK layers to enable. Seperated by :");
  51. // DLSS related
  52. #define ANKI_VK_NVX_BINARY_IMPORT "VK_NVX_binary_import"
  53. template<>
  54. template<>
  55. GrManager& MakeSingletonPtr<GrManager>::allocateSingleton<>()
  56. {
  57. ANKI_ASSERT(m_global == nullptr);
  58. m_global = new GrManagerImpl;
  59. #if ANKI_ASSERTIONS_ENABLED
  60. ++g_singletonsAllocated;
  61. #endif
  62. return *m_global;
  63. }
  64. template<>
  65. void MakeSingletonPtr<GrManager>::freeSingleton()
  66. {
  67. if(m_global)
  68. {
  69. delete static_cast<GrManagerImpl*>(m_global);
  70. m_global = nullptr;
  71. #if ANKI_ASSERTIONS_ENABLED
  72. --g_singletonsAllocated;
  73. #endif
  74. }
  75. }
  76. GrManager::GrManager()
  77. {
  78. }
  79. GrManager::~GrManager()
  80. {
  81. }
  82. Error GrManager::init(GrManagerInitInfo& inf)
  83. {
  84. ANKI_VK_SELF(GrManagerImpl);
  85. return self.init(inf);
  86. }
  87. TexturePtr GrManager::acquireNextPresentableTexture()
  88. {
  89. ANKI_VK_SELF(GrManagerImpl);
  90. return self.acquireNextPresentableTexture();
  91. }
  92. void GrManager::swapBuffers()
  93. {
  94. ANKI_VK_SELF(GrManagerImpl);
  95. self.endFrame();
  96. }
  97. void GrManager::finish()
  98. {
  99. ANKI_VK_SELF(GrManagerImpl);
  100. self.finish();
  101. }
  102. #define ANKI_NEW_GR_OBJECT(type) \
  103. type##Ptr GrManager::new##type(const type##InitInfo& init) \
  104. { \
  105. type##Ptr ptr(type::newInstance(init)); \
  106. if(!ptr.isCreated()) [[unlikely]] \
  107. { \
  108. ANKI_VK_LOGF("Failed to create a " ANKI_STRINGIZE(type) " object"); \
  109. } \
  110. return ptr; \
  111. }
  112. #define ANKI_NEW_GR_OBJECT_NO_INIT_INFO(type) \
  113. type##Ptr GrManager::new##type() \
  114. { \
  115. type##Ptr ptr(type::newInstance()); \
  116. if(!ptr.isCreated()) [[unlikely]] \
  117. { \
  118. ANKI_VK_LOGF("Failed to create a " ANKI_STRINGIZE(type) " object"); \
  119. } \
  120. return ptr; \
  121. }
  122. ANKI_NEW_GR_OBJECT(Buffer)
  123. ANKI_NEW_GR_OBJECT(Texture)
  124. ANKI_NEW_GR_OBJECT(TextureView)
  125. ANKI_NEW_GR_OBJECT(Sampler)
  126. ANKI_NEW_GR_OBJECT(Shader)
  127. ANKI_NEW_GR_OBJECT(ShaderProgram)
  128. ANKI_NEW_GR_OBJECT(CommandBuffer)
  129. ANKI_NEW_GR_OBJECT(Framebuffer)
  130. ANKI_NEW_GR_OBJECT_NO_INIT_INFO(OcclusionQuery)
  131. ANKI_NEW_GR_OBJECT_NO_INIT_INFO(TimestampQuery)
  132. ANKI_NEW_GR_OBJECT(PipelineQuery)
  133. ANKI_NEW_GR_OBJECT_NO_INIT_INFO(RenderGraph)
  134. ANKI_NEW_GR_OBJECT(AccelerationStructure)
  135. ANKI_NEW_GR_OBJECT(GrUpscaler)
  136. #undef ANKI_NEW_GR_OBJECT
  137. #undef ANKI_NEW_GR_OBJECT_NO_INIT_INFO
  138. void GrManager::submit(WeakArray<CommandBuffer*> cmdbs, WeakArray<Fence*> waitFences, FencePtr* signalFence)
  139. {
  140. Bool renderedToDefaultFb = false;
  141. Array<MicroCommandBuffer*, 16> mcmdbs;
  142. for(U32 i = 0; i < cmdbs.getSize(); ++i)
  143. {
  144. CommandBufferImpl& cmdb = *static_cast<CommandBufferImpl*>(cmdbs[i]);
  145. ANKI_ASSERT(cmdb.isFinalized());
  146. mcmdbs[i] = cmdb.getMicroCommandBuffer().get();
  147. renderedToDefaultFb = renderedToDefaultFb || cmdb.renderedToDefaultFramebuffer();
  148. #if ANKI_ASSERTIONS_ENABLED
  149. cmdb.setSubmitted();
  150. #endif
  151. }
  152. Array<MicroSemaphore*, 8> waitSemaphores;
  153. for(U32 i = 0; i < waitFences.getSize(); ++i)
  154. {
  155. waitSemaphores[i] = static_cast<const FenceImpl&>(*waitFences[i]).m_semaphore.get();
  156. }
  157. MicroSemaphorePtr signalSemaphore;
  158. getGrManagerImpl().flushCommandBuffers({mcmdbs.getBegin(), cmdbs.getSize()}, renderedToDefaultFb,
  159. {waitSemaphores.getBegin(), waitFences.getSize()}, (signalFence) ? &signalSemaphore : nullptr, false);
  160. if(signalFence)
  161. {
  162. FenceImpl* fenceImpl = anki::newInstance<FenceImpl>(GrMemoryPool::getSingleton(), "SignalFence");
  163. fenceImpl->m_semaphore = signalSemaphore;
  164. signalFence->reset(fenceImpl);
  165. }
  166. }
  167. GrManagerImpl::~GrManagerImpl()
  168. {
  169. ANKI_VK_LOGI("Destroying Vulkan backend");
  170. // 1st THING: wait for the present fences because I don't know if waiting on queue will cover this
  171. for(PerFrame& frame : m_perFrame)
  172. {
  173. if(frame.m_presentFence.isCreated())
  174. {
  175. frame.m_presentFence->wait();
  176. }
  177. }
  178. // 2nd THING: wait for the GPU
  179. for(VkQueue& queue : m_queues)
  180. {
  181. LockGuard<Mutex> lock(m_globalMtx);
  182. if(queue)
  183. {
  184. vkQueueWaitIdle(queue);
  185. queue = VK_NULL_HANDLE;
  186. }
  187. }
  188. // 3rd THING: The destroy everything that has a reference to GrObjects.
  189. m_cmdbFactory.destroy();
  190. for(PerFrame& frame : m_perFrame)
  191. {
  192. frame.m_presentFence.reset(nullptr);
  193. frame.m_acquireSemaphore.reset(nullptr);
  194. frame.m_renderSemaphore.reset(nullptr);
  195. }
  196. m_crntSwapchain.reset(nullptr);
  197. // 4th THING: Continue with the rest
  198. m_barrierFactory.destroy(); // Destroy before fences
  199. m_semaphoreFactory.destroy(); // Destroy before fences
  200. m_swapchainFactory.destroy(); // Destroy before fences
  201. m_frameGarbageCollector.destroy();
  202. m_gpuMemManager.destroy();
  203. PipelineLayoutFactory::freeSingleton();
  204. DSLayoutFactory::freeSingleton();
  205. DSBindless::freeSingleton();
  206. m_pplineCache.destroy();
  207. m_fenceFactory.destroy();
  208. m_samplerFactory.destroy();
  209. if(m_device)
  210. {
  211. vkDestroyDevice(m_device, nullptr);
  212. }
  213. if(m_surface)
  214. {
  215. vkDestroySurfaceKHR(m_instance, m_surface, nullptr);
  216. }
  217. if(m_debugUtilsMessager)
  218. {
  219. vkDestroyDebugUtilsMessengerEXT(m_instance, m_debugUtilsMessager, nullptr);
  220. }
  221. if(m_instance)
  222. {
  223. #if ANKI_GR_MANAGER_DEBUG_MEMMORY
  224. VkAllocationCallbacks* pallocCbs = &m_debugAllocCbs;
  225. #else
  226. VkAllocationCallbacks* pallocCbs = nullptr;
  227. #endif
  228. vkDestroyInstance(m_instance, pallocCbs);
  229. }
  230. #if ANKI_PLATFORM_MOBILE
  231. anki::deleteInstance(GrMemoryPool::getSingleton(), m_globalCreatePipelineMtx);
  232. #endif
  233. m_cacheDir.destroy();
  234. GrMemoryPool::freeSingleton();
  235. }
  236. Error GrManagerImpl::init(const GrManagerInitInfo& init)
  237. {
  238. const Error err = initInternal(init);
  239. if(err)
  240. {
  241. ANKI_VK_LOGE("Vulkan initialization failed");
  242. return Error::kFunctionFailed;
  243. }
  244. return Error::kNone;
  245. }
  246. Error GrManagerImpl::initInternal(const GrManagerInitInfo& init)
  247. {
  248. ANKI_VK_LOGI("Initializing Vulkan backend");
  249. GrMemoryPool::allocateSingleton(init.m_allocCallback, init.m_allocCallbackUserData);
  250. m_cacheDir = init.m_cacheDirectory;
  251. ANKI_CHECK(initInstance());
  252. ANKI_CHECK(initSurface());
  253. ANKI_CHECK(initDevice());
  254. for(VulkanQueueType qtype : EnumIterable<VulkanQueueType>())
  255. {
  256. if(m_queueFamilyIndices[qtype] != kMaxU32)
  257. {
  258. vkGetDeviceQueue(m_device, m_queueFamilyIndices[qtype], 0, &m_queues[qtype]);
  259. }
  260. else
  261. {
  262. m_queues[qtype] = VK_NULL_HANDLE;
  263. }
  264. }
  265. m_swapchainFactory.init(g_vsyncCVar.get());
  266. m_crntSwapchain = m_swapchainFactory.newInstance();
  267. ANKI_CHECK(m_pplineCache.init(init.m_cacheDirectory));
  268. ANKI_CHECK(initMemory());
  269. m_cmdbFactory.init(m_queueFamilyIndices);
  270. for(PerFrame& f : m_perFrame)
  271. {
  272. resetFrame(f);
  273. }
  274. m_occlusionQueryFactory.init(VK_QUERY_TYPE_OCCLUSION);
  275. m_timestampQueryFactory.init(VK_QUERY_TYPE_TIMESTAMP);
  276. if(m_capabilities.m_pipelineQuery)
  277. {
  278. m_pipelineQueryFactories[PipelineQueryType::kPrimitivesPassedClipping].init(VK_QUERY_TYPE_PIPELINE_STATISTICS,
  279. VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT);
  280. }
  281. // See if unaligned formats are supported
  282. {
  283. m_capabilities.m_unalignedBbpTextureFormats = true;
  284. VkImageFormatProperties props = {};
  285. VkResult res = vkGetPhysicalDeviceImageFormatProperties(m_physicalDevice, VK_FORMAT_R8G8B8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
  286. VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &props);
  287. if(res == VK_ERROR_FORMAT_NOT_SUPPORTED)
  288. {
  289. m_capabilities.m_unalignedBbpTextureFormats = false;
  290. }
  291. res = vkGetPhysicalDeviceImageFormatProperties(m_physicalDevice, VK_FORMAT_R16G16B16_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
  292. VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &props);
  293. if(res == VK_ERROR_FORMAT_NOT_SUPPORTED)
  294. {
  295. m_capabilities.m_unalignedBbpTextureFormats = false;
  296. }
  297. res = vkGetPhysicalDeviceImageFormatProperties(m_physicalDevice, VK_FORMAT_R32G32B32_SFLOAT, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
  298. VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &props);
  299. if(res == VK_ERROR_FORMAT_NOT_SUPPORTED)
  300. {
  301. m_capabilities.m_unalignedBbpTextureFormats = false;
  302. }
  303. if(!m_capabilities.m_unalignedBbpTextureFormats)
  304. {
  305. ANKI_VK_LOGV("R8G8B8, R16G16B16 and R32G32B32 image formats are not supported");
  306. }
  307. }
  308. DSBindless::allocateSingleton();
  309. ANKI_CHECK(DSBindless::getSingleton().init(kMaxBindlessTextures, kMaxBindlessReadonlyTextureBuffers));
  310. DSLayoutFactory::allocateSingleton();
  311. PipelineLayoutFactory::allocateSingleton();
  312. m_frameGarbageCollector.init();
  313. return Error::kNone;
  314. }
  315. Error GrManagerImpl::initInstance()
  316. {
  317. // Init VOLK
  318. //
  319. ANKI_VK_CHECK(volkInitialize());
  320. // Create the instance
  321. //
  322. const U8 vulkanMinor = g_vkMinorCVar.get();
  323. const U8 vulkanMajor = g_vkMajorCVar.get();
  324. VkApplicationInfo app = {};
  325. app.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  326. app.pApplicationName = "unamed";
  327. app.applicationVersion = 1;
  328. app.pEngineName = "AnKi 3D Engine";
  329. app.engineVersion = (ANKI_VERSION_MAJOR << 16) | ANKI_VERSION_MINOR;
  330. app.apiVersion = VK_MAKE_VERSION(vulkanMajor, vulkanMinor, 0);
  331. VkInstanceCreateInfo ci = {};
  332. ci.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  333. ci.pApplicationInfo = &app;
  334. // Instance layers
  335. GrDynamicArray<const char*> layersToEnable;
  336. GrList<GrString> layersToEnableStrings;
  337. {
  338. U32 layerCount;
  339. vkEnumerateInstanceLayerProperties(&layerCount, nullptr);
  340. if(layerCount)
  341. {
  342. GrDynamicArray<VkLayerProperties> layerProps;
  343. layerProps.resize(layerCount);
  344. vkEnumerateInstanceLayerProperties(&layerCount, &layerProps[0]);
  345. ANKI_VK_LOGV("Found the following instance layers:");
  346. for(const VkLayerProperties& layer : layerProps)
  347. {
  348. ANKI_VK_LOGV("\t%s", layer.layerName);
  349. CString layerName = layer.layerName;
  350. Bool enableLayer =
  351. (g_validationCVar.get() || g_debugMarkersCVar.get() || g_debugPrintfCVar.get()) && layerName == "VK_LAYER_KHRONOS_validation";
  352. enableLayer = enableLayer || (!g_vkLayers.get().isEmpty() && g_vkLayers.get().find(layerName) != CString::kNpos);
  353. if(enableLayer)
  354. {
  355. layersToEnableStrings.emplaceBack(layer.layerName);
  356. layersToEnable.emplaceBack(layersToEnableStrings.getBack().cstr());
  357. }
  358. }
  359. }
  360. if(layersToEnable.getSize())
  361. {
  362. ANKI_VK_LOGI("Will enable the following instance layers:");
  363. for(const char* name : layersToEnable)
  364. {
  365. ANKI_VK_LOGI("\t%s", name);
  366. }
  367. ci.enabledLayerCount = layersToEnable.getSize();
  368. ci.ppEnabledLayerNames = &layersToEnable[0];
  369. }
  370. }
  371. // Validation features
  372. GrDynamicArray<VkValidationFeatureEnableEXT> enabledValidationFeatures;
  373. GrDynamicArray<VkValidationFeatureDisableEXT> disabledValidationFeatures;
  374. if(g_debugPrintfCVar.get())
  375. {
  376. enabledValidationFeatures.emplaceBack(VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT);
  377. }
  378. if((g_debugPrintfCVar.get() || g_debugMarkersCVar.get()) && !g_validationCVar.get())
  379. {
  380. disabledValidationFeatures.emplaceBack(VK_VALIDATION_FEATURE_DISABLE_ALL_EXT);
  381. }
  382. if(g_validationCVar.get() && g_gpuValidationCVar.get())
  383. {
  384. enabledValidationFeatures.emplaceBack(VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT);
  385. }
  386. VkValidationFeaturesEXT validationFeatures = {};
  387. if(enabledValidationFeatures.getSize() || disabledValidationFeatures.getSize())
  388. {
  389. validationFeatures.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
  390. validationFeatures.disabledValidationFeatureCount = disabledValidationFeatures.getSize();
  391. validationFeatures.enabledValidationFeatureCount = enabledValidationFeatures.getSize();
  392. validationFeatures.pDisabledValidationFeatures = disabledValidationFeatures.getBegin();
  393. validationFeatures.pEnabledValidationFeatures = enabledValidationFeatures.getBegin();
  394. validationFeatures.pNext = ci.pNext;
  395. ci.pNext = &validationFeatures;
  396. }
  397. // Extensions
  398. GrDynamicArray<const char*> instExtensions;
  399. GrDynamicArray<VkExtensionProperties> instExtensionInf;
  400. U32 extCount = 0;
  401. vkEnumerateInstanceExtensionProperties(nullptr, &extCount, nullptr);
  402. if(extCount)
  403. {
  404. instExtensions.resize(extCount);
  405. instExtensionInf.resize(extCount);
  406. vkEnumerateInstanceExtensionProperties(nullptr, &extCount, &instExtensionInf[0]);
  407. ANKI_VK_LOGV("Found the following instance extensions:");
  408. for(U32 i = 0; i < extCount; ++i)
  409. {
  410. ANKI_VK_LOGV("\t%s", instExtensionInf[i].extensionName);
  411. }
  412. U32 instExtensionCount = 0;
  413. for(U32 i = 0; i < extCount; ++i)
  414. {
  415. const CString extensionName = instExtensionInf[i].extensionName;
  416. #if ANKI_WINDOWING_SYSTEM_HEADLESS
  417. if(extensionName == VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME)
  418. {
  419. m_extensions |= VulkanExtensions::kEXT_headless_surface;
  420. instExtensions[instExtensionCount++] = VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME;
  421. }
  422. #elif ANKI_OS_LINUX
  423. if(extensionName == VK_KHR_XCB_SURFACE_EXTENSION_NAME)
  424. {
  425. m_extensions |= VulkanExtensions::kKHR_xcb_surface;
  426. instExtensions[instExtensionCount++] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
  427. }
  428. else if(extensionName == VK_KHR_XLIB_SURFACE_EXTENSION_NAME)
  429. {
  430. m_extensions |= VulkanExtensions::kKHR_xlib_surface;
  431. instExtensions[instExtensionCount++] = VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
  432. }
  433. #elif ANKI_OS_WINDOWS
  434. if(extensionName == VK_KHR_WIN32_SURFACE_EXTENSION_NAME)
  435. {
  436. m_extensions |= VulkanExtensions::kKHR_win32_surface;
  437. instExtensions[instExtensionCount++] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  438. }
  439. #elif ANKI_OS_ANDROID
  440. if(extensionName == VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)
  441. {
  442. m_extensions |= VulkanExtensions::kKHR_android_surface;
  443. instExtensions[instExtensionCount++] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  444. }
  445. #else
  446. # error Not implemented
  447. #endif
  448. else if(extensionName == VK_KHR_SURFACE_EXTENSION_NAME)
  449. {
  450. m_extensions |= VulkanExtensions::kKHR_surface;
  451. instExtensions[instExtensionCount++] = VK_KHR_SURFACE_EXTENSION_NAME;
  452. }
  453. else if(extensionName == VK_EXT_DEBUG_UTILS_EXTENSION_NAME
  454. && (g_debugMarkersCVar.get() || g_validationCVar.get() || g_debugPrintfCVar.get()))
  455. {
  456. m_extensions |= VulkanExtensions::kEXT_debug_utils;
  457. instExtensions[instExtensionCount++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  458. }
  459. }
  460. if(!(m_extensions
  461. & (VulkanExtensions::kEXT_headless_surface | VulkanExtensions::kKHR_xcb_surface | VulkanExtensions::kKHR_xlib_surface
  462. | VulkanExtensions::kKHR_win32_surface | VulkanExtensions::kKHR_android_surface)))
  463. {
  464. ANKI_VK_LOGE("Couldn't find suitable surface extension");
  465. return Error::kFunctionFailed;
  466. }
  467. if(instExtensionCount)
  468. {
  469. ANKI_VK_LOGI("Will enable the following instance extensions:");
  470. for(U32 i = 0; i < instExtensionCount; ++i)
  471. {
  472. ANKI_VK_LOGI("\t%s", instExtensions[i]);
  473. }
  474. ci.enabledExtensionCount = instExtensionCount;
  475. ci.ppEnabledExtensionNames = &instExtensions[0];
  476. }
  477. }
  478. #if ANKI_GR_MANAGER_DEBUG_MEMMORY
  479. m_debugAllocCbs = {};
  480. m_debugAllocCbs.pUserData = this;
  481. m_debugAllocCbs.pfnAllocation = allocateCallback;
  482. m_debugAllocCbs.pfnReallocation = reallocateCallback;
  483. m_debugAllocCbs.pfnFree = freeCallback;
  484. VkAllocationCallbacks* pallocCbs = &m_debugAllocCbs;
  485. #else
  486. VkAllocationCallbacks* pallocCbs = nullptr;
  487. #endif
  488. ANKI_VK_CHECK(vkCreateInstance(&ci, pallocCbs, &m_instance));
  489. // Get symbolx
  490. //
  491. volkLoadInstance(m_instance);
  492. // Set debug callbacks
  493. if(!!(m_extensions & VulkanExtensions::kEXT_debug_utils))
  494. {
  495. VkDebugUtilsMessengerCreateInfoEXT info = {};
  496. info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  497. info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
  498. | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  499. info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
  500. | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
  501. info.pfnUserCallback = debugReportCallbackEXT;
  502. info.pUserData = this;
  503. vkCreateDebugUtilsMessengerEXT(m_instance, &info, nullptr, &m_debugUtilsMessager);
  504. }
  505. // Create the physical device
  506. //
  507. {
  508. uint32_t count = 0;
  509. ANKI_VK_CHECK(vkEnumeratePhysicalDevices(m_instance, &count, nullptr));
  510. if(count < 1)
  511. {
  512. ANKI_VK_LOGE("Wrong number of physical devices");
  513. return Error::kFunctionFailed;
  514. }
  515. GrDynamicArray<VkPhysicalDevice> physicalDevices;
  516. physicalDevices.resize(count);
  517. ANKI_VK_CHECK(vkEnumeratePhysicalDevices(m_instance, &count, &physicalDevices[0]));
  518. class Dev
  519. {
  520. public:
  521. VkPhysicalDevice m_pdev;
  522. VkPhysicalDeviceProperties2 m_vkProps;
  523. };
  524. GrDynamicArray<Dev> devs;
  525. devs.resize(count);
  526. for(U32 devIdx = 0; devIdx < count; ++devIdx)
  527. {
  528. devs[devIdx].m_pdev = physicalDevices[devIdx];
  529. devs[devIdx].m_vkProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  530. vkGetPhysicalDeviceProperties2(physicalDevices[devIdx], &devs[devIdx].m_vkProps);
  531. }
  532. // Sort the devices with the most powerful first
  533. std::sort(devs.getBegin(), devs.getEnd(), [](const Dev& a, const Dev& b) {
  534. if(a.m_vkProps.properties.deviceType != b.m_vkProps.properties.deviceType)
  535. {
  536. auto findDeviceTypeWeight = [](VkPhysicalDeviceType type) {
  537. switch(type)
  538. {
  539. case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
  540. return 1.0;
  541. case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
  542. return 2.0;
  543. default:
  544. return 0.0;
  545. }
  546. };
  547. // Put descrete GPUs first
  548. return findDeviceTypeWeight(a.m_vkProps.properties.deviceType) > findDeviceTypeWeight(b.m_vkProps.properties.deviceType);
  549. }
  550. else
  551. {
  552. return a.m_vkProps.properties.apiVersion >= b.m_vkProps.properties.apiVersion;
  553. }
  554. });
  555. const U32 chosenPhysDevIdx = min<U32>(g_deviceCVar.get(), devs.getSize() - 1);
  556. ANKI_VK_LOGI("Physical devices:");
  557. for(U32 devIdx = 0; devIdx < count; ++devIdx)
  558. {
  559. ANKI_VK_LOGI((devIdx == chosenPhysDevIdx) ? "\t(Selected) %s" : "\t%s", devs[devIdx].m_vkProps.properties.deviceName);
  560. }
  561. m_capabilities.m_discreteGpu = devs[chosenPhysDevIdx].m_vkProps.properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  562. m_physicalDevice = devs[chosenPhysDevIdx].m_pdev;
  563. }
  564. m_rtPipelineProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR;
  565. m_accelerationStructureProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR;
  566. m_devProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  567. m_devProps.pNext = &m_rtPipelineProps;
  568. m_rtPipelineProps.pNext = &m_accelerationStructureProps;
  569. vkGetPhysicalDeviceProperties2(m_physicalDevice, &m_devProps);
  570. // Find vendor
  571. switch(m_devProps.properties.vendorID)
  572. {
  573. case 0x13B5:
  574. m_capabilities.m_gpuVendor = GpuVendor::kArm;
  575. m_capabilities.m_minSubgroupSize = 16;
  576. m_capabilities.m_maxSubgroupSize = 16;
  577. break;
  578. case 0x10DE:
  579. m_capabilities.m_gpuVendor = GpuVendor::kNvidia;
  580. m_capabilities.m_minSubgroupSize = 32;
  581. m_capabilities.m_maxSubgroupSize = 32;
  582. break;
  583. case 0x1002:
  584. case 0x1022:
  585. m_capabilities.m_gpuVendor = GpuVendor::kAMD;
  586. m_capabilities.m_minSubgroupSize = 32;
  587. m_capabilities.m_maxSubgroupSize = 64;
  588. break;
  589. case 0x8086:
  590. m_capabilities.m_gpuVendor = GpuVendor::kIntel;
  591. m_capabilities.m_minSubgroupSize = 8;
  592. m_capabilities.m_maxSubgroupSize = 32;
  593. break;
  594. case 0x5143:
  595. m_capabilities.m_gpuVendor = GpuVendor::kQualcomm;
  596. m_capabilities.m_minSubgroupSize = 64;
  597. m_capabilities.m_maxSubgroupSize = 128;
  598. break;
  599. default:
  600. m_capabilities.m_gpuVendor = GpuVendor::kUnknown;
  601. // Choose something really low
  602. m_capabilities.m_minSubgroupSize = 8;
  603. m_capabilities.m_maxSubgroupSize = 8;
  604. }
  605. ANKI_VK_LOGI("GPU is %s. Vendor identified as %s", m_devProps.properties.deviceName, &kGPUVendorStrings[m_capabilities.m_gpuVendor][0]);
  606. // Set limits
  607. m_capabilities.m_constantBufferBindOffsetAlignment =
  608. max<U32>(ANKI_SAFE_ALIGNMENT, U32(m_devProps.properties.limits.minUniformBufferOffsetAlignment));
  609. m_capabilities.m_constantBufferMaxRange = m_devProps.properties.limits.maxUniformBufferRange;
  610. m_capabilities.m_uavBufferBindOffsetAlignment = max<U32>(ANKI_SAFE_ALIGNMENT, U32(m_devProps.properties.limits.minStorageBufferOffsetAlignment));
  611. m_capabilities.m_uavBufferMaxRange = m_devProps.properties.limits.maxStorageBufferRange;
  612. m_capabilities.m_textureBufferBindOffsetAlignment =
  613. max<U32>(ANKI_SAFE_ALIGNMENT, U32(m_devProps.properties.limits.minTexelBufferOffsetAlignment));
  614. m_capabilities.m_textureBufferMaxRange = kMaxU32;
  615. m_capabilities.m_computeSharedMemorySize = m_devProps.properties.limits.maxComputeSharedMemorySize;
  616. m_capabilities.m_maxDrawIndirectCount = m_devProps.properties.limits.maxDrawIndirectCount;
  617. m_capabilities.m_majorApiVersion = vulkanMajor;
  618. m_capabilities.m_minorApiVersion = vulkanMinor;
  619. m_capabilities.m_shaderGroupHandleSize = m_rtPipelineProps.shaderGroupHandleSize;
  620. m_capabilities.m_sbtRecordAlignment = m_rtPipelineProps.shaderGroupBaseAlignment;
  621. #if ANKI_PLATFORM_MOBILE
  622. if(m_capabilities.m_gpuVendor == GpuVendor::kQualcomm)
  623. {
  624. // Calling vkCreateGraphicsPipeline from multiple threads crashes qualcomm's compiler
  625. ANKI_VK_LOGI("Enabling workaround for vkCreateGraphicsPipeline crashing when called from multiple threads");
  626. m_globalCreatePipelineMtx = anki::newInstance<Mutex>(GrMemoryPool::getSingleton());
  627. }
  628. #endif
  629. // DLSS checks
  630. m_capabilities.m_dlss = ANKI_DLSS && m_capabilities.m_gpuVendor == GpuVendor::kNvidia;
  631. return Error::kNone;
  632. }
  633. Error GrManagerImpl::initDevice()
  634. {
  635. uint32_t count = 0;
  636. vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, nullptr);
  637. ANKI_VK_LOGI("Number of queue families: %u", count);
  638. GrDynamicArray<VkQueueFamilyProperties> queueInfos;
  639. queueInfos.resize(count);
  640. vkGetPhysicalDeviceQueueFamilyProperties(m_physicalDevice, &count, &queueInfos[0]);
  641. const VkQueueFlags GENERAL_QUEUE_FLAGS = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
  642. for(U32 i = 0; i < count; ++i)
  643. {
  644. VkBool32 supportsPresent = false;
  645. ANKI_VK_CHECK(vkGetPhysicalDeviceSurfaceSupportKHR(m_physicalDevice, i, m_surface, &supportsPresent));
  646. if(supportsPresent)
  647. {
  648. if((queueInfos[i].queueFlags & GENERAL_QUEUE_FLAGS) == GENERAL_QUEUE_FLAGS)
  649. {
  650. m_queueFamilyIndices[VulkanQueueType::kGeneral] = i;
  651. }
  652. else if((queueInfos[i].queueFlags & VK_QUEUE_COMPUTE_BIT) && !(queueInfos[i].queueFlags & VK_QUEUE_GRAPHICS_BIT))
  653. {
  654. // This must be the async compute
  655. m_queueFamilyIndices[VulkanQueueType::kCompute] = i;
  656. }
  657. }
  658. }
  659. if(m_queueFamilyIndices[VulkanQueueType::kGeneral] == kMaxU32)
  660. {
  661. ANKI_VK_LOGE("Couldn't find a queue family with graphics+compute+transfer+present. "
  662. "Something is wrong");
  663. return Error::kFunctionFailed;
  664. }
  665. if(!g_asyncComputeCVar.get())
  666. {
  667. m_queueFamilyIndices[VulkanQueueType::kCompute] = kMaxU32;
  668. }
  669. if(m_queueFamilyIndices[VulkanQueueType::kCompute] == kMaxU32)
  670. {
  671. ANKI_VK_LOGW("Couldn't find an async compute queue. Will try to use the general queue instead");
  672. }
  673. else
  674. {
  675. ANKI_VK_LOGI("Async compute is enabled");
  676. }
  677. const F32 priority = 1.0f;
  678. Array<VkDeviceQueueCreateInfo, U32(VulkanQueueType::kCount)> q = {};
  679. VkDeviceCreateInfo ci = {};
  680. ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  681. ci.pQueueCreateInfos = &q[0];
  682. for(VulkanQueueType qtype : EnumIterable<VulkanQueueType>())
  683. {
  684. if(m_queueFamilyIndices[qtype] != kMaxU32)
  685. {
  686. q[qtype].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  687. q[qtype].queueFamilyIndex = m_queueFamilyIndices[qtype];
  688. q[qtype].queueCount = 1;
  689. q[qtype].pQueuePriorities = &priority;
  690. ++ci.queueCreateInfoCount;
  691. }
  692. }
  693. // Extensions
  694. U32 extCount = 0;
  695. vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &extCount, nullptr);
  696. GrDynamicArray<VkExtensionProperties> extensionInfos; // Keep it alive in the stack
  697. GrDynamicArray<const char*> extensionsToEnable;
  698. if(extCount)
  699. {
  700. extensionInfos.resize(extCount);
  701. extensionsToEnable.resize(extCount);
  702. U32 extensionsToEnableCount = 0;
  703. vkEnumerateDeviceExtensionProperties(m_physicalDevice, nullptr, &extCount, &extensionInfos[0]);
  704. ANKI_VK_LOGV("Found the following device extensions:");
  705. for(U32 i = 0; i < extCount; ++i)
  706. {
  707. ANKI_VK_LOGV("\t%s", extensionInfos[i].extensionName);
  708. }
  709. while(extCount-- != 0)
  710. {
  711. const CString extensionName(&extensionInfos[extCount].extensionName[0]);
  712. if(extensionName == VK_KHR_SWAPCHAIN_EXTENSION_NAME)
  713. {
  714. m_extensions |= VulkanExtensions::kKHR_swapchain;
  715. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  716. }
  717. else if(extensionName == VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME)
  718. {
  719. m_extensions |= VulkanExtensions::kAMD_rasterization_order;
  720. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  721. }
  722. else if(extensionName == VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME && g_rayTracingCVar.get())
  723. {
  724. m_extensions |= VulkanExtensions::kKHR_ray_tracing;
  725. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  726. m_capabilities.m_rayTracingEnabled = true;
  727. }
  728. else if(extensionName == VK_KHR_RAY_QUERY_EXTENSION_NAME && g_rayTracingCVar.get())
  729. {
  730. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  731. }
  732. else if(extensionName == VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME && g_rayTracingCVar.get())
  733. {
  734. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  735. }
  736. else if(extensionName == VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME && g_rayTracingCVar.get())
  737. {
  738. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  739. }
  740. else if(extensionName == VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME && g_rayTracingCVar.get())
  741. {
  742. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  743. }
  744. else if(extensionName == VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME && g_displayStatsCVar.get() > 1)
  745. {
  746. m_extensions |= VulkanExtensions::kKHR_pipeline_executable_properties;
  747. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  748. }
  749. else if(extensionName == VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME && g_debugPrintfCVar.get())
  750. {
  751. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  752. }
  753. else if(extensionName == VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME)
  754. {
  755. m_extensions |= VulkanExtensions::kEXT_descriptor_indexing;
  756. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  757. }
  758. else if(extensionName == VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME)
  759. {
  760. m_extensions |= VulkanExtensions::kKHR_buffer_device_address;
  761. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  762. }
  763. else if(extensionName == VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)
  764. {
  765. m_extensions |= VulkanExtensions::kEXT_scalar_block_layout;
  766. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  767. }
  768. else if(extensionName == VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME)
  769. {
  770. m_extensions |= VulkanExtensions::kKHR_timeline_semaphore;
  771. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  772. }
  773. else if(extensionName == VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME)
  774. {
  775. m_extensions |= VulkanExtensions::kKHR_shader_float16_int8;
  776. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  777. }
  778. else if(extensionName == VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME && g_64bitAtomicsCVar.get())
  779. {
  780. m_extensions |= VulkanExtensions::kKHR_shader_atomic_int64;
  781. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  782. }
  783. else if(extensionName == VK_KHR_SPIRV_1_4_EXTENSION_NAME)
  784. {
  785. m_extensions |= VulkanExtensions::kKHR_spirv_1_4;
  786. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  787. }
  788. else if(extensionName == VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME)
  789. {
  790. m_extensions |= VulkanExtensions::kKHR_shader_float_controls;
  791. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  792. }
  793. else if(extensionName == VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME && g_samplerFilterMinMaxCVar.get())
  794. {
  795. m_extensions |= VulkanExtensions::kKHR_sampler_filter_min_max;
  796. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  797. }
  798. else if(extensionName == VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)
  799. {
  800. m_extensions |= VulkanExtensions::kKHR_create_renderpass_2;
  801. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  802. }
  803. else if(extensionName == VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME && g_vrsCVar.get())
  804. {
  805. m_extensions |= VulkanExtensions::kKHR_fragment_shading_rate;
  806. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  807. }
  808. else if(extensionName == VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME)
  809. {
  810. m_extensions |= VulkanExtensions::kEXT_astc_decode_mode;
  811. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  812. }
  813. else if(extensionName == VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME)
  814. {
  815. m_extensions |= VulkanExtensions::kEXT_texture_compression_astc_hdr;
  816. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  817. }
  818. else if(m_capabilities.m_dlss && extensionName == VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)
  819. {
  820. m_extensions |= VulkanExtensions::kKHR_push_descriptor;
  821. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  822. }
  823. else if(m_capabilities.m_dlss && extensionName == ANKI_VK_NVX_BINARY_IMPORT)
  824. {
  825. m_extensions |= VulkanExtensions::kNVX_binary_import;
  826. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  827. }
  828. else if(m_capabilities.m_dlss && extensionName == VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME)
  829. {
  830. m_extensions |= VulkanExtensions::kNVX_image_view_handle;
  831. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  832. }
  833. else if(extensionName == VK_KHR_MAINTENANCE_4_EXTENSION_NAME)
  834. {
  835. m_extensions |= VulkanExtensions::kKHR_maintenance_4;
  836. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  837. }
  838. else if(extensionName == VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)
  839. {
  840. m_extensions |= VulkanExtensions::kKHR_draw_indirect_count;
  841. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  842. }
  843. else if(extensionName == VK_EXT_MESH_SHADER_EXTENSION_NAME && g_meshShadersCVar.get())
  844. {
  845. m_extensions |= VulkanExtensions::kEXT_mesh_shader;
  846. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  847. }
  848. else if(extensionName == VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)
  849. {
  850. m_extensions |= VulkanExtensions::kEXT_host_query_reset;
  851. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  852. }
  853. else if(extensionName == VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME)
  854. {
  855. m_extensions |= VulkanExtensions::kKHR_fragment_shader_barycentric;
  856. extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
  857. }
  858. }
  859. ANKI_VK_LOGI("Will enable the following device extensions:");
  860. for(U32 i = 0; i < extensionsToEnableCount; ++i)
  861. {
  862. ANKI_VK_LOGI("\t%s", extensionsToEnable[i]);
  863. }
  864. ci.enabledExtensionCount = extensionsToEnableCount;
  865. ci.ppEnabledExtensionNames = &extensionsToEnable[0];
  866. }
  867. // Enable/disable generic features
  868. VkPhysicalDeviceFeatures devFeatures = {};
  869. {
  870. VkPhysicalDeviceFeatures2 devFeatures2 = {};
  871. devFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  872. vkGetPhysicalDeviceFeatures2(m_physicalDevice, &devFeatures2);
  873. devFeatures = devFeatures2.features;
  874. devFeatures.robustBufferAccess = (g_validationCVar.get() && devFeatures.robustBufferAccess) ? true : false;
  875. ANKI_VK_LOGI("Robust buffer access is %s", (devFeatures.robustBufferAccess) ? "enabled" : "disabled");
  876. if(devFeatures.pipelineStatisticsQuery)
  877. {
  878. m_capabilities.m_pipelineQuery = true;
  879. ANKI_VK_LOGV("GPU supports pipeline statistics queries");
  880. }
  881. ci.pEnabledFeatures = &devFeatures;
  882. }
  883. #if ANKI_PLATFORM_MOBILE
  884. if(!(m_extensions & VulkanExtensions::kEXT_texture_compression_astc_hdr))
  885. {
  886. ANKI_VK_LOGE(VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME " is not supported");
  887. return Error::kFunctionFailed;
  888. }
  889. #endif
  890. if(!(m_extensions & VulkanExtensions::kKHR_create_renderpass_2))
  891. {
  892. ANKI_VK_LOGE(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME " is not supported");
  893. return Error::kFunctionFailed;
  894. }
  895. if(!!(m_extensions & VulkanExtensions::kKHR_sampler_filter_min_max))
  896. {
  897. m_capabilities.m_samplingFilterMinMax = true;
  898. }
  899. else
  900. {
  901. m_capabilities.m_samplingFilterMinMax = false;
  902. ANKI_VK_LOGI(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME " is not supported or disabled");
  903. }
  904. // Descriptor indexing
  905. VkPhysicalDeviceDescriptorIndexingFeatures descriptorIndexingFeatures = {};
  906. if(!(m_extensions & VulkanExtensions::kEXT_descriptor_indexing))
  907. {
  908. ANKI_VK_LOGE(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME " is not supported");
  909. return Error::kFunctionFailed;
  910. }
  911. else
  912. {
  913. descriptorIndexingFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
  914. getPhysicalDevicaFeatures2(descriptorIndexingFeatures);
  915. if(!descriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing
  916. || !descriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing)
  917. {
  918. ANKI_VK_LOGE("Non uniform indexing is not supported by the device");
  919. return Error::kFunctionFailed;
  920. }
  921. if(!descriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind
  922. || !descriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
  923. {
  924. ANKI_VK_LOGE("Update descriptors after bind is not supported by the device");
  925. return Error::kFunctionFailed;
  926. }
  927. if(!descriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending)
  928. {
  929. ANKI_VK_LOGE("Update descriptors while cmd buffer is pending is not supported by the device");
  930. return Error::kFunctionFailed;
  931. }
  932. descriptorIndexingFeatures.pNext = const_cast<void*>(ci.pNext);
  933. ci.pNext = &descriptorIndexingFeatures;
  934. }
  935. // Buffer address
  936. VkPhysicalDeviceBufferDeviceAddressFeaturesKHR deviceBufferFeatures = {};
  937. if(!(m_extensions & VulkanExtensions::kKHR_buffer_device_address))
  938. {
  939. ANKI_VK_LOGW(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME " is not supported");
  940. }
  941. else
  942. {
  943. deviceBufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR;
  944. getPhysicalDevicaFeatures2(deviceBufferFeatures);
  945. deviceBufferFeatures.bufferDeviceAddressCaptureReplay = deviceBufferFeatures.bufferDeviceAddressCaptureReplay && g_debugMarkersCVar.get();
  946. deviceBufferFeatures.bufferDeviceAddressMultiDevice = false;
  947. deviceBufferFeatures.pNext = const_cast<void*>(ci.pNext);
  948. ci.pNext = &deviceBufferFeatures;
  949. }
  950. // Scalar block layout
  951. VkPhysicalDeviceScalarBlockLayoutFeaturesEXT scalarBlockLayoutFeatures = {};
  952. if(!(m_extensions & VulkanExtensions::kEXT_scalar_block_layout))
  953. {
  954. ANKI_VK_LOGE(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME " is not supported");
  955. return Error::kFunctionFailed;
  956. }
  957. else
  958. {
  959. scalarBlockLayoutFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
  960. getPhysicalDevicaFeatures2(scalarBlockLayoutFeatures);
  961. if(!scalarBlockLayoutFeatures.scalarBlockLayout)
  962. {
  963. ANKI_VK_LOGE("Scalar block layout is not supported by the device");
  964. return Error::kFunctionFailed;
  965. }
  966. scalarBlockLayoutFeatures.pNext = const_cast<void*>(ci.pNext);
  967. ci.pNext = &scalarBlockLayoutFeatures;
  968. }
  969. // Timeline semaphore
  970. VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timelineSemaphoreFeatures = {};
  971. if(!(m_extensions & VulkanExtensions::kKHR_timeline_semaphore))
  972. {
  973. ANKI_VK_LOGE(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME " is not supported");
  974. return Error::kFunctionFailed;
  975. }
  976. else
  977. {
  978. timelineSemaphoreFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
  979. getPhysicalDevicaFeatures2(timelineSemaphoreFeatures);
  980. if(!timelineSemaphoreFeatures.timelineSemaphore)
  981. {
  982. ANKI_VK_LOGE("Timeline semaphores are not supported by the device");
  983. return Error::kFunctionFailed;
  984. }
  985. timelineSemaphoreFeatures.pNext = const_cast<void*>(ci.pNext);
  986. ci.pNext = &timelineSemaphoreFeatures;
  987. }
  988. // Set RT features
  989. VkPhysicalDeviceRayTracingPipelineFeaturesKHR rtPipelineFeatures = {};
  990. VkPhysicalDeviceRayQueryFeaturesKHR rayQueryFeatures = {};
  991. VkPhysicalDeviceAccelerationStructureFeaturesKHR accelerationStructureFeatures = {};
  992. if(!!(m_extensions & VulkanExtensions::kKHR_ray_tracing))
  993. {
  994. rtPipelineFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR;
  995. rayQueryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR;
  996. accelerationStructureFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR;
  997. VkPhysicalDeviceFeatures2 features = {};
  998. features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  999. features.pNext = &rtPipelineFeatures;
  1000. rtPipelineFeatures.pNext = &rayQueryFeatures;
  1001. rayQueryFeatures.pNext = &accelerationStructureFeatures;
  1002. vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
  1003. if(!rtPipelineFeatures.rayTracingPipeline || !rayQueryFeatures.rayQuery || !accelerationStructureFeatures.accelerationStructure)
  1004. {
  1005. ANKI_VK_LOGE("Ray tracing and ray query are both required");
  1006. return Error::kFunctionFailed;
  1007. }
  1008. // Only enable what's necessary
  1009. rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplay = false;
  1010. rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplayMixed = false;
  1011. rtPipelineFeatures.rayTraversalPrimitiveCulling = false;
  1012. accelerationStructureFeatures.accelerationStructureCaptureReplay = false;
  1013. accelerationStructureFeatures.accelerationStructureHostCommands = false;
  1014. accelerationStructureFeatures.descriptorBindingAccelerationStructureUpdateAfterBind = false;
  1015. ANKI_ASSERT(accelerationStructureFeatures.pNext == nullptr);
  1016. accelerationStructureFeatures.pNext = const_cast<void*>(ci.pNext);
  1017. ci.pNext = &rtPipelineFeatures;
  1018. // Get some more stuff
  1019. VkPhysicalDeviceAccelerationStructurePropertiesKHR props = {};
  1020. props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR;
  1021. getPhysicalDeviceProperties2(props);
  1022. m_capabilities.m_accelerationStructureBuildScratchOffsetAlignment = props.minAccelerationStructureScratchOffsetAlignment;
  1023. }
  1024. // Pipeline features
  1025. VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR pplineExecutablePropertiesFeatures = {};
  1026. if(!!(m_extensions & VulkanExtensions::kKHR_pipeline_executable_properties))
  1027. {
  1028. pplineExecutablePropertiesFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
  1029. pplineExecutablePropertiesFeatures.pipelineExecutableInfo = true;
  1030. pplineExecutablePropertiesFeatures.pNext = const_cast<void*>(ci.pNext);
  1031. ci.pNext = &pplineExecutablePropertiesFeatures;
  1032. }
  1033. // F16 I8
  1034. VkPhysicalDeviceShaderFloat16Int8FeaturesKHR float16Int8Features = {};
  1035. if(!(m_extensions & VulkanExtensions::kKHR_shader_float16_int8))
  1036. {
  1037. ANKI_VK_LOGE(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME " is not supported");
  1038. return Error::kFunctionFailed;
  1039. }
  1040. else
  1041. {
  1042. float16Int8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR;
  1043. getPhysicalDevicaFeatures2(float16Int8Features);
  1044. float16Int8Features.pNext = const_cast<void*>(ci.pNext);
  1045. ci.pNext = &float16Int8Features;
  1046. }
  1047. // 64bit atomics
  1048. VkPhysicalDeviceShaderAtomicInt64FeaturesKHR atomicInt64Features = {};
  1049. if(!(m_extensions & VulkanExtensions::kKHR_shader_atomic_int64))
  1050. {
  1051. ANKI_VK_LOGW(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME " is not supported or disabled");
  1052. m_capabilities.m_64bitAtomics = false;
  1053. }
  1054. else
  1055. {
  1056. m_capabilities.m_64bitAtomics = true;
  1057. atomicInt64Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
  1058. getPhysicalDevicaFeatures2(atomicInt64Features);
  1059. atomicInt64Features.pNext = const_cast<void*>(ci.pNext);
  1060. ci.pNext = &atomicInt64Features;
  1061. }
  1062. // VRS
  1063. VkPhysicalDeviceFragmentShadingRateFeaturesKHR fragmentShadingRateFeatures = {};
  1064. if(!(m_extensions & VulkanExtensions::kKHR_fragment_shading_rate))
  1065. {
  1066. ANKI_VK_LOGI(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME " is not supported or disabled");
  1067. m_capabilities.m_vrs = false;
  1068. }
  1069. else
  1070. {
  1071. m_capabilities.m_vrs = true;
  1072. fragmentShadingRateFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
  1073. getPhysicalDevicaFeatures2(fragmentShadingRateFeatures);
  1074. // Some checks
  1075. if(!fragmentShadingRateFeatures.attachmentFragmentShadingRate || !fragmentShadingRateFeatures.pipelineFragmentShadingRate)
  1076. {
  1077. ANKI_VK_LOGW(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME " doesn't support attachment and/or pipeline rates. Will disable VRS");
  1078. m_capabilities.m_vrs = false;
  1079. }
  1080. else
  1081. {
  1082. // Disable some things
  1083. fragmentShadingRateFeatures.primitiveFragmentShadingRate = false;
  1084. }
  1085. if(m_capabilities.m_vrs)
  1086. {
  1087. VkPhysicalDeviceFragmentShadingRatePropertiesKHR fragmentShadingRateProperties = {};
  1088. fragmentShadingRateProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR;
  1089. getPhysicalDeviceProperties2(fragmentShadingRateProperties);
  1090. if(fragmentShadingRateProperties.minFragmentShadingRateAttachmentTexelSize.width > 16
  1091. || fragmentShadingRateProperties.minFragmentShadingRateAttachmentTexelSize.height > 16
  1092. || fragmentShadingRateProperties.maxFragmentShadingRateAttachmentTexelSize.width < 8
  1093. || fragmentShadingRateProperties.maxFragmentShadingRateAttachmentTexelSize.height < 8)
  1094. {
  1095. ANKI_VK_LOGW(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME
  1096. " doesn't support 8x8 or 16x16 shading rate attachment texel size. Will disable VRS");
  1097. m_capabilities.m_vrs = false;
  1098. }
  1099. else
  1100. {
  1101. m_capabilities.m_minShadingRateImageTexelSize = max(fragmentShadingRateProperties.minFragmentShadingRateAttachmentTexelSize.width,
  1102. fragmentShadingRateProperties.minFragmentShadingRateAttachmentTexelSize.height);
  1103. }
  1104. }
  1105. if(m_capabilities.m_vrs)
  1106. {
  1107. fragmentShadingRateFeatures.pNext = const_cast<void*>(ci.pNext);
  1108. ci.pNext = &fragmentShadingRateFeatures;
  1109. }
  1110. }
  1111. // Mesh shaders
  1112. VkPhysicalDeviceMeshShaderFeaturesEXT meshShadersFeatures = {};
  1113. if(!!(m_extensions & VulkanExtensions::kEXT_mesh_shader))
  1114. {
  1115. m_capabilities.m_meshShaders = true;
  1116. meshShadersFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT;
  1117. getPhysicalDevicaFeatures2(meshShadersFeatures);
  1118. if(meshShadersFeatures.taskShader == false)
  1119. {
  1120. ANKI_LOGE(VK_EXT_MESH_SHADER_EXTENSION_NAME " doesn't support task shaders");
  1121. return Error::kFunctionFailed;
  1122. }
  1123. meshShadersFeatures.multiviewMeshShader = false;
  1124. meshShadersFeatures.primitiveFragmentShadingRateMeshShader = false;
  1125. meshShadersFeatures.pNext = const_cast<void*>(ci.pNext);
  1126. ci.pNext = &meshShadersFeatures;
  1127. ANKI_VK_LOGI(VK_EXT_MESH_SHADER_EXTENSION_NAME " is supported and enabled");
  1128. }
  1129. else
  1130. {
  1131. ANKI_VK_LOGI(VK_EXT_MESH_SHADER_EXTENSION_NAME " is not supported or disabled ");
  1132. }
  1133. // Host query reset
  1134. VkPhysicalDeviceHostQueryResetFeaturesEXT hostQueryResetFeatures = {};
  1135. if(!!(m_extensions & VulkanExtensions::kEXT_host_query_reset))
  1136. {
  1137. hostQueryResetFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES;
  1138. getPhysicalDevicaFeatures2(hostQueryResetFeatures);
  1139. if(hostQueryResetFeatures.hostQueryReset == false)
  1140. {
  1141. ANKI_VK_LOGE("VkPhysicalDeviceHostQueryResetFeaturesEXT::hostQueryReset is false");
  1142. return Error::kFunctionFailed;
  1143. }
  1144. hostQueryResetFeatures.pNext = const_cast<void*>(ci.pNext);
  1145. ci.pNext = &hostQueryResetFeatures;
  1146. }
  1147. else
  1148. {
  1149. ANKI_VK_LOGE(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME " is not supported");
  1150. return Error::kFunctionFailed;
  1151. }
  1152. // Barycentrics
  1153. VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR baryFeatures = {};
  1154. if(!!(m_extensions & VulkanExtensions::kKHR_fragment_shader_barycentric))
  1155. {
  1156. baryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR;
  1157. getPhysicalDevicaFeatures2(baryFeatures);
  1158. if(baryFeatures.fragmentShaderBarycentric == false)
  1159. {
  1160. ANKI_VK_LOGE("VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR::fragmentShaderBarycentric is false");
  1161. return Error::kFunctionFailed;
  1162. }
  1163. baryFeatures.pNext = const_cast<void*>(ci.pNext);
  1164. ci.pNext = &baryFeatures;
  1165. m_capabilities.m_barycentrics = true;
  1166. }
  1167. VkPhysicalDeviceMaintenance4FeaturesKHR maintenance4Features = {};
  1168. if(!!(m_extensions & VulkanExtensions::kKHR_maintenance_4))
  1169. {
  1170. maintenance4Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR;
  1171. maintenance4Features.maintenance4 = true;
  1172. maintenance4Features.pNext = const_cast<void*>(ci.pNext);
  1173. ci.pNext = &maintenance4Features;
  1174. }
  1175. if(!(m_extensions & VulkanExtensions::kKHR_draw_indirect_count) || m_capabilities.m_maxDrawIndirectCount < kMaxU32)
  1176. {
  1177. ANKI_VK_LOGE(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME " not supported or too small maxDrawIndirectCount");
  1178. return Error::kFunctionFailed;
  1179. }
  1180. ANKI_VK_CHECK(vkCreateDevice(m_physicalDevice, &ci, nullptr, &m_device));
  1181. if(!(m_extensions & VulkanExtensions::kKHR_spirv_1_4))
  1182. {
  1183. ANKI_VK_LOGE(VK_KHR_SPIRV_1_4_EXTENSION_NAME " is not supported");
  1184. return Error::kFunctionFailed;
  1185. }
  1186. if(!(m_extensions & VulkanExtensions::kKHR_shader_float_controls))
  1187. {
  1188. ANKI_VK_LOGE(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME " is not supported");
  1189. return Error::kFunctionFailed;
  1190. }
  1191. return Error::kNone;
  1192. }
  1193. Error GrManagerImpl::initMemory()
  1194. {
  1195. vkGetPhysicalDeviceMemoryProperties(m_physicalDevice, &m_memoryProperties);
  1196. // Print some info
  1197. ANKI_VK_LOGV("Vulkan memory info:");
  1198. for(U32 i = 0; i < m_memoryProperties.memoryHeapCount; ++i)
  1199. {
  1200. ANKI_VK_LOGV("\tHeap %u size %zu", i, m_memoryProperties.memoryHeaps[i].size);
  1201. }
  1202. for(U32 i = 0; i < m_memoryProperties.memoryTypeCount; ++i)
  1203. {
  1204. ANKI_VK_LOGV("\tMem type %u points to heap %u, flags %" ANKI_PRIb32, i, m_memoryProperties.memoryTypes[i].heapIndex,
  1205. ANKI_FORMAT_U32(m_memoryProperties.memoryTypes[i].propertyFlags));
  1206. }
  1207. m_gpuMemManager.init(!!(m_extensions & VulkanExtensions::kKHR_buffer_device_address));
  1208. return Error::kNone;
  1209. }
  1210. #if ANKI_GR_MANAGER_DEBUG_MEMMORY
  1211. void* GrManagerImpl::allocateCallback(void* userData, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
  1212. {
  1213. if(size == 0) [[unlikely]]
  1214. {
  1215. return nullptr;
  1216. }
  1217. ANKI_ASSERT(userData);
  1218. ANKI_ASSERT(size);
  1219. ANKI_ASSERT(isPowerOfTwo(alignment));
  1220. ANKI_ASSERT(alignment <= MAX_ALLOC_ALIGNMENT);
  1221. auto alloc = static_cast<GrManagerImpl*>(userData)->getAllocator();
  1222. PtrSize newSize = size + sizeof(AllocHeader);
  1223. AllocHeader* header = static_cast<AllocHeader*>(alloc.getMemoryPool().allocate(newSize, MAX_ALLOC_ALIGNMENT));
  1224. header->m_sig = ALLOC_SIG;
  1225. header->m_size = size;
  1226. ++header;
  1227. return static_cast<AllocHeader*>(header);
  1228. }
  1229. void* GrManagerImpl::reallocateCallback(void* userData, void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
  1230. {
  1231. if(original && size == 0)
  1232. {
  1233. freeCallback(userData, original);
  1234. return nullptr;
  1235. }
  1236. void* mem = allocateCallback(userData, size, alignment, allocationScope);
  1237. if(original)
  1238. {
  1239. // Move the data
  1240. AllocHeader* header = static_cast<AllocHeader*>(original);
  1241. --header;
  1242. ANKI_ASSERT(header->m_sig == ALLOC_SIG);
  1243. memcpy(mem, original, header->m_size);
  1244. }
  1245. return mem;
  1246. }
  1247. void GrManagerImpl::freeCallback(void* userData, void* ptr)
  1248. {
  1249. if(ptr)
  1250. {
  1251. ANKI_ASSERT(userData);
  1252. auto alloc = static_cast<GrManagerImpl*>(userData)->getAllocator();
  1253. AllocHeader* header = static_cast<AllocHeader*>(ptr);
  1254. --header;
  1255. ANKI_ASSERT(header->m_sig == ALLOC_SIG);
  1256. alloc.getMemoryPool().free(header);
  1257. }
  1258. }
  1259. #endif
  1260. TexturePtr GrManagerImpl::acquireNextPresentableTexture()
  1261. {
  1262. ANKI_TRACE_SCOPED_EVENT(VkAcquireImage);
  1263. LockGuard<Mutex> lock(m_globalMtx);
  1264. PerFrame& frame = m_perFrame[m_frame % kMaxFramesInFlight];
  1265. // Create sync objects
  1266. MicroFencePtr fence = m_fenceFactory.newInstance();
  1267. frame.m_acquireSemaphore = m_semaphoreFactory.newInstance(fence, false);
  1268. // Get new image
  1269. uint32_t imageIdx;
  1270. VkResult res = vkAcquireNextImageKHR(m_device, m_crntSwapchain->m_swapchain, UINT64_MAX, frame.m_acquireSemaphore->getHandle(),
  1271. fence->getHandle(), &imageIdx);
  1272. if(res == VK_ERROR_OUT_OF_DATE_KHR)
  1273. {
  1274. ANKI_VK_LOGW("Swapchain is out of date. Will wait for the queue and create a new one");
  1275. for(VkQueue queue : m_queues)
  1276. {
  1277. if(queue)
  1278. {
  1279. vkQueueWaitIdle(queue);
  1280. }
  1281. }
  1282. m_crntSwapchain.reset(nullptr);
  1283. m_crntSwapchain = m_swapchainFactory.newInstance();
  1284. // Can't fail a second time
  1285. ANKI_VK_CHECKF(vkAcquireNextImageKHR(m_device, m_crntSwapchain->m_swapchain, UINT64_MAX, frame.m_acquireSemaphore->getHandle(),
  1286. fence->getHandle(), &imageIdx));
  1287. }
  1288. else
  1289. {
  1290. ANKI_VK_CHECKF(res);
  1291. }
  1292. m_acquiredImageIdx = U8(imageIdx);
  1293. return m_crntSwapchain->m_textures[imageIdx];
  1294. }
  1295. void GrManagerImpl::endFrame()
  1296. {
  1297. ANKI_TRACE_SCOPED_EVENT(VkPresent);
  1298. LockGuard<Mutex> lock(m_globalMtx);
  1299. PerFrame& frame = m_perFrame[m_frame % kMaxFramesInFlight];
  1300. // Wait for the fence of N-2 frame
  1301. const U waitFrameIdx = (m_frame + 1) % kMaxFramesInFlight;
  1302. PerFrame& waitFrame = m_perFrame[waitFrameIdx];
  1303. if(waitFrame.m_presentFence)
  1304. {
  1305. waitFrame.m_presentFence->wait();
  1306. }
  1307. resetFrame(waitFrame);
  1308. if(!frame.m_renderSemaphore)
  1309. {
  1310. ANKI_VK_LOGW("Nobody draw to the default framebuffer");
  1311. }
  1312. // Present
  1313. VkResult res;
  1314. VkPresentInfoKHR present = {};
  1315. present.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
  1316. present.waitSemaphoreCount = (frame.m_renderSemaphore) ? 1 : 0;
  1317. present.pWaitSemaphores = (frame.m_renderSemaphore) ? &frame.m_renderSemaphore->getHandle() : nullptr;
  1318. present.swapchainCount = 1;
  1319. present.pSwapchains = &m_crntSwapchain->m_swapchain;
  1320. const U32 idx = m_acquiredImageIdx;
  1321. present.pImageIndices = &idx;
  1322. present.pResults = &res;
  1323. const VkResult res1 = vkQueuePresentKHR(m_queues[frame.m_queueWroteToSwapchainImage], &present);
  1324. if(res1 == VK_ERROR_OUT_OF_DATE_KHR)
  1325. {
  1326. ANKI_VK_LOGW("Swapchain is out of date. Will wait for the queues and create a new one");
  1327. for(VkQueue queue : m_queues)
  1328. {
  1329. if(queue)
  1330. {
  1331. vkQueueWaitIdle(queue);
  1332. }
  1333. }
  1334. vkDeviceWaitIdle(m_device);
  1335. m_crntSwapchain.reset(nullptr);
  1336. m_crntSwapchain = m_swapchainFactory.newInstance();
  1337. }
  1338. else
  1339. {
  1340. ANKI_VK_CHECKF(res1);
  1341. ANKI_VK_CHECKF(res);
  1342. }
  1343. m_gpuMemManager.updateStats();
  1344. // Finalize
  1345. ++m_frame;
  1346. }
  1347. void GrManagerImpl::resetFrame(PerFrame& frame)
  1348. {
  1349. frame.m_presentFence.reset(nullptr);
  1350. frame.m_acquireSemaphore.reset(nullptr);
  1351. frame.m_renderSemaphore.reset(nullptr);
  1352. }
  1353. void GrManagerImpl::flushCommandBuffers(WeakArray<MicroCommandBuffer*> cmdbs, Bool cmdbRenderedToSwapchain,
  1354. WeakArray<MicroSemaphore*> userWaitSemaphores, MicroSemaphorePtr* userSignalSemaphore, Bool wait)
  1355. {
  1356. constexpr U32 maxSemaphores = 8;
  1357. VkSubmitInfo submit = {};
  1358. submit.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1359. Array<VkSemaphore, maxSemaphores> waitSemaphores;
  1360. submit.pWaitSemaphores = &waitSemaphores[0];
  1361. Array<VkSemaphore, maxSemaphores> signalSemaphores;
  1362. submit.pSignalSemaphores = &signalSemaphores[0];
  1363. Array<VkPipelineStageFlags, maxSemaphores> waitStages;
  1364. submit.pWaitDstStageMask = &waitStages[0];
  1365. // First thing, create a fence
  1366. MicroFencePtr fence = m_fenceFactory.newInstance();
  1367. // Command buffers
  1368. Array<VkCommandBuffer, 16> handles;
  1369. submit.pCommandBuffers = handles.getBegin();
  1370. VulkanQueueType queueType = cmdbs[0]->getVulkanQueueType();
  1371. for(MicroCommandBuffer* cmdb : cmdbs)
  1372. {
  1373. handles[submit.commandBufferCount] = cmdb->getHandle();
  1374. cmdb->setFence(fence.get());
  1375. ++submit.commandBufferCount;
  1376. ANKI_ASSERT(cmdb->getVulkanQueueType() == queueType);
  1377. queueType = cmdb->getVulkanQueueType();
  1378. }
  1379. // Handle user semaphores
  1380. Array<U64, maxSemaphores> waitTimelineValues;
  1381. Array<U64, maxSemaphores> signalTimelineValues;
  1382. VkTimelineSemaphoreSubmitInfo timelineInfo = {};
  1383. timelineInfo.sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO;
  1384. timelineInfo.waitSemaphoreValueCount = userWaitSemaphores.getSize();
  1385. timelineInfo.pWaitSemaphoreValues = &waitTimelineValues[0];
  1386. timelineInfo.pSignalSemaphoreValues = &signalTimelineValues[0];
  1387. submit.pNext = &timelineInfo;
  1388. for(MicroSemaphore* userWaitSemaphore : userWaitSemaphores)
  1389. {
  1390. ANKI_ASSERT(userWaitSemaphore->isTimeline());
  1391. waitSemaphores[submit.waitSemaphoreCount] = userWaitSemaphore->getHandle();
  1392. waitTimelineValues[submit.waitSemaphoreCount] = userWaitSemaphore->getSemaphoreValue();
  1393. // Be a bit conservative
  1394. waitStages[submit.waitSemaphoreCount] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
  1395. ++submit.waitSemaphoreCount;
  1396. // Refresh the fence because the semaphore can't be recycled until the current submission is done
  1397. userWaitSemaphore->setFence(fence.get());
  1398. }
  1399. if(userSignalSemaphore)
  1400. {
  1401. *userSignalSemaphore = m_semaphoreFactory.newInstance(fence, true);
  1402. signalSemaphores[submit.signalSemaphoreCount++] = (*userSignalSemaphore)->getHandle();
  1403. signalTimelineValues[timelineInfo.signalSemaphoreValueCount++] = (*userSignalSemaphore)->getNextSemaphoreValue();
  1404. }
  1405. // Submit
  1406. {
  1407. // Protect the class, the queue and other stuff
  1408. LockGuard<Mutex> lock(m_globalMtx);
  1409. // Do some special stuff for the last command buffer
  1410. PerFrame& frame = m_perFrame[m_frame % kMaxFramesInFlight];
  1411. if(cmdbRenderedToSwapchain)
  1412. {
  1413. // Wait semaphore
  1414. waitSemaphores[submit.waitSemaphoreCount] = frame.m_acquireSemaphore->getHandle();
  1415. // That depends on how we use the swapchain img. Be a bit conservative
  1416. waitStages[submit.waitSemaphoreCount] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  1417. ++submit.waitSemaphoreCount;
  1418. // Refresh the fence because the semaphore can't be recycled until the current submission is done
  1419. frame.m_acquireSemaphore->setFence(fence.get());
  1420. // Create the semaphore to signal
  1421. ANKI_ASSERT(!frame.m_renderSemaphore && "Only one begin/end render pass is allowed with the default fb");
  1422. frame.m_renderSemaphore = m_semaphoreFactory.newInstance(fence, false);
  1423. signalSemaphores[submit.signalSemaphoreCount++] = frame.m_renderSemaphore->getHandle();
  1424. // Increment the timeline values as well because the spec wants a dummy value even for non-timeline semaphores
  1425. signalTimelineValues[timelineInfo.signalSemaphoreValueCount++] = 0;
  1426. // Update the frame fence
  1427. frame.m_presentFence = fence;
  1428. // Update the swapchain's fence
  1429. m_crntSwapchain->setFence(fence.get());
  1430. frame.m_queueWroteToSwapchainImage = queueType;
  1431. }
  1432. // Submit
  1433. ANKI_TRACE_SCOPED_EVENT(VkQueueSubmit);
  1434. ANKI_VK_CHECKF(vkQueueSubmit(m_queues[queueType], 1, &submit, fence->getHandle()));
  1435. if(wait)
  1436. {
  1437. vkQueueWaitIdle(m_queues[queueType]);
  1438. }
  1439. }
  1440. // Garbage work
  1441. if(cmdbRenderedToSwapchain)
  1442. {
  1443. m_frameGarbageCollector.setNewFrame(fence);
  1444. }
  1445. }
  1446. void GrManagerImpl::finish()
  1447. {
  1448. LockGuard<Mutex> lock(m_globalMtx);
  1449. for(VkQueue queue : m_queues)
  1450. {
  1451. if(queue)
  1452. {
  1453. vkQueueWaitIdle(queue);
  1454. }
  1455. }
  1456. }
  1457. void GrManagerImpl::trySetVulkanHandleName(CString name, VkObjectType type, U64 handle) const
  1458. {
  1459. if(name && name.getLength() && !!(m_extensions & VulkanExtensions::kEXT_debug_utils))
  1460. {
  1461. VkDebugUtilsObjectNameInfoEXT info = {};
  1462. info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  1463. info.objectHandle = handle;
  1464. info.objectType = type;
  1465. info.pObjectName = name.cstr();
  1466. vkSetDebugUtilsObjectNameEXT(m_device, &info);
  1467. }
  1468. }
  1469. VkBool32 GrManagerImpl::debugReportCallbackEXT(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  1470. [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT messageTypes,
  1471. const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, [[maybe_unused]] void* pUserData)
  1472. {
  1473. #if ANKI_PLATFORM_MOBILE
  1474. if(pCallbackData->messageIdNumber == 101294395)
  1475. {
  1476. // Interface mismatch error. Eg vert shader is writing to varying that is not consumed by frag. Ignore this
  1477. // stupid error because I'm not going to create more shader variants to fix it. Especially when mobile drivers
  1478. // do linking anyway. On desktop just enable the maintenance4 extension
  1479. return false;
  1480. }
  1481. #endif
  1482. if(pCallbackData->messageIdNumber == 1944932341 || pCallbackData->messageIdNumber == 1303270965)
  1483. {
  1484. // Not sure why I'm getting that
  1485. return false;
  1486. }
  1487. // Get all names of affected objects
  1488. GrString objectNames;
  1489. if(pCallbackData->objectCount)
  1490. {
  1491. for(U32 i = 0; i < pCallbackData->objectCount; ++i)
  1492. {
  1493. const Char* name = pCallbackData->pObjects[i].pObjectName;
  1494. objectNames += (name) ? name : "?";
  1495. if(i < pCallbackData->objectCount - 1)
  1496. {
  1497. objectNames += ", ";
  1498. }
  1499. }
  1500. }
  1501. else
  1502. {
  1503. objectNames = "N/A";
  1504. }
  1505. if(messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT)
  1506. {
  1507. ANKI_VK_LOGE("VK debug report: %s. Affected objects: %s", pCallbackData->pMessage, objectNames.cstr());
  1508. }
  1509. else if(messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT)
  1510. {
  1511. ANKI_VK_LOGW("VK debug report: %s. Affected objects: %s", pCallbackData->pMessage, objectNames.cstr());
  1512. }
  1513. else
  1514. {
  1515. ANKI_VK_LOGI("VK debug report: %s. Affected objects: %s", pCallbackData->pMessage, objectNames.cstr());
  1516. }
  1517. return false;
  1518. }
  1519. void GrManagerImpl::printPipelineShaderInfo(VkPipeline ppline, CString name, U64 hash) const
  1520. {
  1521. if(printPipelineShaderInfoInternal(ppline, name, hash))
  1522. {
  1523. ANKI_VK_LOGE("Ignoring previous errors");
  1524. }
  1525. }
  1526. Error GrManagerImpl::printPipelineShaderInfoInternal(VkPipeline ppline, CString name, U64 hash) const
  1527. {
  1528. if(!!(m_extensions & VulkanExtensions::kKHR_pipeline_executable_properties))
  1529. {
  1530. GrStringList log;
  1531. VkPipelineInfoKHR pplineInf = {};
  1532. pplineInf.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
  1533. pplineInf.pipeline = ppline;
  1534. U32 executableCount = 0;
  1535. ANKI_VK_CHECK(vkGetPipelineExecutablePropertiesKHR(m_device, &pplineInf, &executableCount, nullptr));
  1536. GrDynamicArray<VkPipelineExecutablePropertiesKHR> executableProps;
  1537. executableProps.resize(executableCount);
  1538. for(VkPipelineExecutablePropertiesKHR& prop : executableProps)
  1539. {
  1540. prop = {};
  1541. prop.sType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR;
  1542. }
  1543. ANKI_VK_CHECK(vkGetPipelineExecutablePropertiesKHR(m_device, &pplineInf, &executableCount, &executableProps[0]));
  1544. log.pushBackSprintf("Pipeline info \"%s\" (0x%016" PRIx64 "): ", name.cstr(), hash);
  1545. for(U32 i = 0; i < executableCount; ++i)
  1546. {
  1547. const VkPipelineExecutablePropertiesKHR& p = executableProps[i];
  1548. log.pushBackSprintf("%s: ", p.description);
  1549. // Get stats
  1550. VkPipelineExecutableInfoKHR exeInf = {};
  1551. exeInf.sType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR;
  1552. exeInf.executableIndex = i;
  1553. exeInf.pipeline = ppline;
  1554. U32 statCount = 0;
  1555. vkGetPipelineExecutableStatisticsKHR(m_device, &exeInf, &statCount, nullptr);
  1556. GrDynamicArray<VkPipelineExecutableStatisticKHR> stats;
  1557. stats.resize(statCount);
  1558. for(VkPipelineExecutableStatisticKHR& s : stats)
  1559. {
  1560. s = {};
  1561. s.sType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR;
  1562. }
  1563. vkGetPipelineExecutableStatisticsKHR(m_device, &exeInf, &statCount, &stats[0]);
  1564. for(U32 s = 0; s < statCount; ++s)
  1565. {
  1566. const VkPipelineExecutableStatisticKHR& ss = stats[s];
  1567. switch(ss.format)
  1568. {
  1569. case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR:
  1570. log.pushBackSprintf("%s: %u, ", ss.name, ss.value.b32);
  1571. break;
  1572. case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR:
  1573. log.pushBackSprintf("%s: %" PRId64 ", ", ss.name, ss.value.i64);
  1574. break;
  1575. case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR:
  1576. log.pushBackSprintf("%s: %" PRIu64 ", ", ss.name, ss.value.u64);
  1577. break;
  1578. case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR:
  1579. log.pushBackSprintf("%s: %f, ", ss.name, ss.value.f64);
  1580. break;
  1581. default:
  1582. ANKI_ASSERT(0);
  1583. }
  1584. }
  1585. log.pushBackSprintf("Subgroup size: %u", p.subgroupSize);
  1586. if(i < executableCount - 1)
  1587. {
  1588. log.pushBack(", ");
  1589. }
  1590. }
  1591. GrString finalLog;
  1592. log.join("", finalLog);
  1593. ANKI_VK_LOGV("%s", finalLog.cstr());
  1594. }
  1595. return Error::kNone;
  1596. }
  1597. Error GrManagerImpl::initSurface()
  1598. {
  1599. #if ANKI_WINDOWING_SYSTEM_SDL
  1600. if(!SDL_Vulkan_CreateSurface(static_cast<NativeWindowSdl&>(NativeWindow::getSingleton()).m_sdlWindow, m_instance, &m_surface))
  1601. {
  1602. ANKI_VK_LOGE("SDL_Vulkan_CreateSurface() failed: %s", SDL_GetError());
  1603. return Error::kFunctionFailed;
  1604. }
  1605. #elif ANKI_WINDOWING_SYSTEM_ANDROID
  1606. VkAndroidSurfaceCreateInfoKHR createInfo = {};
  1607. createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
  1608. createInfo.window = static_cast<NativeWindowAndroid&>(NativeWindow::getSingleton()).m_nativeWindowAndroid;
  1609. ANKI_VK_CHECK(vkCreateAndroidSurfaceKHR(m_instance, &createInfo, nullptr, &m_surface));
  1610. #elif ANKI_WINDOWING_SYSTEM_HEADLESS
  1611. VkHeadlessSurfaceCreateInfoEXT createInfo = {};
  1612. createInfo.sType = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT;
  1613. ANKI_VK_CHECK(vkCreateHeadlessSurfaceEXT(m_instance, &createInfo, nullptr, &m_surface));
  1614. m_nativeWindowWidth = NativeWindow::getSingleton().getWidth();
  1615. m_nativeWindowHeight = NativeWindow::getSingleton().getHeight();
  1616. #else
  1617. # error Unsupported
  1618. #endif
  1619. return Error::kNone;
  1620. }
  1621. } // end namespace anki