DescriptorSet.cpp 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135
  1. // Copyright (C) 2009-2022, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/DescriptorSet.h>
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/BufferImpl.h>
  8. #include <AnKi/Util/List.h>
  9. #include <AnKi/Util/HashMap.h>
  10. #include <AnKi/Util/Tracer.h>
  11. #include <algorithm>
  12. namespace anki {
  13. thread_local DescriptorSetFactory::ThreadLocal* DescriptorSetFactory::m_threadLocal = nullptr;
  14. /// Wraps a global descriptor set that is used to store bindless textures.
  15. class DescriptorSetFactory::BindlessDescriptorSet
  16. {
  17. public:
  18. ~BindlessDescriptorSet();
  19. Error init(const GrAllocator<U8>& alloc, VkDevice dev, const U32 bindlessTextureCount, U32 bindlessImageCount);
  20. /// Bind a sampled image.
  21. /// @note It's thread-safe.
  22. U32 bindTexture(const VkImageView view, const VkImageLayout layout);
  23. /// Bind a storage image.
  24. /// @note It's thread-safe.
  25. U32 bindImage(const VkImageView view);
  26. /// @note It's thread-safe.
  27. void unbindTexture(U32 idx)
  28. {
  29. unbindCommon(idx, m_freeTexIndices, m_freeTexIndexCount);
  30. }
  31. /// @note It's thread-safe.
  32. void unbindImage(U32 idx)
  33. {
  34. unbindCommon(idx, m_freeImgIndices, m_freeImgIndexCount);
  35. }
  36. DescriptorSet getDescriptorSet() const
  37. {
  38. ANKI_ASSERT(m_dset);
  39. DescriptorSet out;
  40. out.m_handle = m_dset;
  41. return out;
  42. }
  43. VkDescriptorSetLayout getDescriptorSetLayout() const
  44. {
  45. ANKI_ASSERT(m_layout);
  46. return m_layout;
  47. }
  48. private:
  49. GrAllocator<U8> m_alloc;
  50. VkDevice m_dev = VK_NULL_HANDLE;
  51. VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
  52. VkDescriptorPool m_pool = VK_NULL_HANDLE;
  53. VkDescriptorSet m_dset = VK_NULL_HANDLE;
  54. Mutex m_mtx;
  55. DynamicArray<U16> m_freeTexIndices;
  56. DynamicArray<U16> m_freeImgIndices;
  57. U16 m_freeTexIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  58. U16 m_freeImgIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  59. void unbindCommon(U32 idx, DynamicArray<U16>& freeIndices, U16& freeIndexCount);
  60. };
  61. /// Descriptor set internal class.
  62. class DS : public IntrusiveListEnabled<DS>
  63. {
  64. public:
  65. VkDescriptorSet m_handle = {};
  66. U64 m_lastFrameUsed = MAX_U64;
  67. U64 m_hash;
  68. };
  69. /// Per thread allocator.
  70. class DescriptorSetFactory::DSAllocator
  71. {
  72. public:
  73. DSAllocator(const DSAllocator&) = delete; // Non-copyable
  74. DSAllocator& operator=(const DSAllocator&) = delete; // Non-copyable
  75. DSAllocator(const DSLayoutCacheEntry* layout)
  76. : m_layoutEntry(layout)
  77. {
  78. ANKI_ASSERT(m_layoutEntry);
  79. }
  80. ~DSAllocator();
  81. Error init();
  82. Error createNewPool();
  83. Error getOrCreateSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  84. StackAllocator<U8>& tmpAlloc, const DS*& out)
  85. {
  86. out = tryFindSet(hash);
  87. if(out == nullptr)
  88. {
  89. ANKI_CHECK(newSet(hash, bindings, tmpAlloc, out));
  90. }
  91. return Error::NONE;
  92. }
  93. private:
  94. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  95. DynamicArray<VkDescriptorPool> m_pools;
  96. U32 m_lastPoolDSCount = 0;
  97. U32 m_lastPoolFreeDSCount = 0;
  98. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  99. HashMap<U64, DS*> m_hashmap;
  100. [[nodiscard]] const DS* tryFindSet(U64 hash);
  101. Error newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  102. StackAllocator<U8>& tmpAlloc, const DS*& out);
  103. void writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  104. StackAllocator<U8>& tmpAlloc);
  105. };
  106. class alignas(ANKI_CACHE_LINE_SIZE) DescriptorSetFactory::ThreadLocal
  107. {
  108. public:
  109. DynamicArray<DSAllocator*> m_allocators;
  110. };
  111. /// Cache entry. It's built around a specific descriptor set layout.
  112. class DSLayoutCacheEntry
  113. {
  114. public:
  115. DescriptorSetFactory* m_factory;
  116. U64 m_hash = 0; ///< Layout hash.
  117. VkDescriptorSetLayout m_layoutHandle = {};
  118. BitSet<MAX_BINDINGS_PER_DESCRIPTOR_SET, U32> m_activeBindings = {false};
  119. Array<U32, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingArraySize = {};
  120. Array<DescriptorType, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingType = {};
  121. U32 m_minBinding = MAX_U32;
  122. U32 m_maxBinding = 0;
  123. U32 m_index = 0; ///< Index in DescriptorSetFactory::m_caches
  124. // Cache the create info
  125. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> m_poolSizesCreateInf = {};
  126. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  127. DSLayoutCacheEntry(DescriptorSetFactory* factory, U32 index)
  128. : m_factory(factory)
  129. , m_index(index)
  130. {
  131. }
  132. ~DSLayoutCacheEntry();
  133. Error init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash);
  134. /// @note Thread-safe.
  135. Error getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc);
  136. };
  137. DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
  138. {
  139. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  140. ANKI_ASSERT(m_freeImgIndexCount == m_freeImgIndices.getSize() && "Forgot to unbind some images");
  141. if(m_pool)
  142. {
  143. vkDestroyDescriptorPool(m_dev, m_pool, nullptr);
  144. m_pool = VK_NULL_HANDLE;
  145. m_dset = VK_NULL_HANDLE;
  146. }
  147. if(m_layout)
  148. {
  149. vkDestroyDescriptorSetLayout(m_dev, m_layout, nullptr);
  150. m_layout = VK_NULL_HANDLE;
  151. }
  152. m_freeImgIndices.destroy(m_alloc);
  153. m_freeTexIndices.destroy(m_alloc);
  154. }
  155. Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& alloc, VkDevice dev,
  156. U32 bindlessTextureCount, U32 bindlessImageCount)
  157. {
  158. ANKI_ASSERT(dev);
  159. m_alloc = alloc;
  160. m_dev = dev;
  161. // Create the layout
  162. {
  163. Array<VkDescriptorSetLayoutBinding, 2> bindings = {};
  164. bindings[0].binding = 0;
  165. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  166. bindings[0].descriptorCount = bindlessTextureCount;
  167. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  168. bindings[1].binding = 1;
  169. bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
  170. bindings[1].descriptorCount = bindlessImageCount;
  171. bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  172. Array<VkDescriptorBindingFlagsEXT, 2> bindingFlags = {};
  173. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT
  174. | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  175. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  176. bindingFlags[1] = bindingFlags[0];
  177. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  178. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  179. extraInfos.bindingCount = bindingFlags.getSize();
  180. extraInfos.pBindingFlags = &bindingFlags[0];
  181. VkDescriptorSetLayoutCreateInfo ci = {};
  182. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  183. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  184. ci.bindingCount = bindings.getSize();
  185. ci.pBindings = &bindings[0];
  186. ci.pNext = &extraInfos;
  187. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_dev, &ci, nullptr, &m_layout));
  188. }
  189. // Create the pool
  190. {
  191. Array<VkDescriptorPoolSize, 2> sizes = {};
  192. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  193. sizes[0].descriptorCount = bindlessTextureCount;
  194. sizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  195. sizes[1].descriptorCount = bindlessImageCount;
  196. VkDescriptorPoolCreateInfo ci = {};
  197. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  198. ci.maxSets = 1;
  199. ci.poolSizeCount = sizes.getSize();
  200. ci.pPoolSizes = &sizes[0];
  201. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  202. ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_pool));
  203. }
  204. // Create the descriptor set
  205. {
  206. VkDescriptorSetAllocateInfo ci = {};
  207. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  208. ci.descriptorPool = m_pool;
  209. ci.descriptorSetCount = 1;
  210. ci.pSetLayouts = &m_layout;
  211. ANKI_VK_CHECK(vkAllocateDescriptorSets(m_dev, &ci, &m_dset));
  212. }
  213. // Init the free arrays
  214. {
  215. m_freeTexIndices.create(m_alloc, bindlessTextureCount);
  216. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  217. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  218. {
  219. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  220. }
  221. m_freeImgIndices.create(m_alloc, bindlessImageCount);
  222. m_freeImgIndexCount = U16(m_freeImgIndices.getSize());
  223. for(U32 i = 0; i < m_freeImgIndices.getSize(); ++i)
  224. {
  225. m_freeImgIndices[i] = U16(m_freeImgIndices.getSize() - i - 1);
  226. }
  227. }
  228. return Error::NONE;
  229. }
  230. U32 DescriptorSetFactory::BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  231. {
  232. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  233. ANKI_ASSERT(view);
  234. LockGuard<Mutex> lock(m_mtx);
  235. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  236. // Pop the index
  237. --m_freeTexIndexCount;
  238. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  239. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  240. // Update the set
  241. VkDescriptorImageInfo imageInf = {};
  242. imageInf.imageView = view;
  243. imageInf.imageLayout = layout;
  244. VkWriteDescriptorSet write = {};
  245. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  246. write.pNext = nullptr;
  247. write.dstSet = m_dset;
  248. write.dstBinding = 0;
  249. write.descriptorCount = 1;
  250. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  251. write.dstArrayElement = idx;
  252. write.pImageInfo = &imageInf;
  253. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  254. return idx;
  255. }
  256. U32 DescriptorSetFactory::BindlessDescriptorSet::bindImage(const VkImageView view)
  257. {
  258. ANKI_ASSERT(view);
  259. LockGuard<Mutex> lock(m_mtx);
  260. ANKI_ASSERT(m_freeImgIndexCount > 0 && "Out of indices");
  261. // Get the index
  262. --m_freeImgIndexCount;
  263. const U32 idx = m_freeImgIndices[m_freeImgIndexCount];
  264. ANKI_ASSERT(idx < m_freeImgIndices.getSize());
  265. // Update the set
  266. VkDescriptorImageInfo imageInf = {};
  267. imageInf.imageView = view;
  268. imageInf.imageLayout = VK_IMAGE_LAYOUT_GENERAL; // Storage images are always in general.
  269. VkWriteDescriptorSet write = {};
  270. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  271. write.pNext = nullptr;
  272. write.dstSet = m_dset;
  273. write.dstBinding = 1;
  274. write.descriptorCount = 1;
  275. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  276. write.dstArrayElement = idx;
  277. write.pImageInfo = &imageInf;
  278. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  279. return idx;
  280. }
  281. void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, DynamicArray<U16>& freeIndices,
  282. U16& freeIndexCount)
  283. {
  284. LockGuard<Mutex> lock(m_mtx);
  285. ANKI_ASSERT(idx < freeIndices.getSize());
  286. ANKI_ASSERT(freeIndexCount < freeIndices.getSize());
  287. freeIndices[freeIndexCount] = U16(idx);
  288. ++freeIndexCount;
  289. // Sort the free indices to minimize fragmentation
  290. std::sort(&freeIndices[0], &freeIndices[0] + freeIndexCount, std::greater<U16>());
  291. // Make sure there are no duplicates
  292. for(U32 i = 1; i < freeIndexCount; ++i)
  293. {
  294. ANKI_ASSERT(freeIndices[i] != freeIndices[i - 1]);
  295. }
  296. }
  297. DescriptorSetFactory::DSAllocator::~DSAllocator()
  298. {
  299. auto alloc = m_layoutEntry->m_factory->m_alloc;
  300. while(!m_list.isEmpty())
  301. {
  302. DS* ds = &m_list.getFront();
  303. m_list.popFront();
  304. alloc.deleteInstance(ds);
  305. }
  306. for(VkDescriptorPool pool : m_pools)
  307. {
  308. vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
  309. }
  310. m_pools.destroy(alloc);
  311. m_hashmap.destroy(alloc);
  312. }
  313. Error DescriptorSetFactory::DSAllocator::init()
  314. {
  315. ANKI_CHECK(createNewPool());
  316. return Error::NONE;
  317. }
  318. Error DescriptorSetFactory::DSAllocator::createNewPool()
  319. {
  320. m_lastPoolDSCount = (m_lastPoolDSCount != 0) ? U32(F32(m_lastPoolDSCount) * DESCRIPTOR_POOL_SIZE_SCALE)
  321. : DESCRIPTOR_POOL_INITIAL_SIZE;
  322. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  323. // Set the create info
  324. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> poolSizes;
  325. memcpy(&poolSizes[0], &m_layoutEntry->m_poolSizesCreateInf[0],
  326. sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  327. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  328. {
  329. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  330. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  331. }
  332. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  333. ci.pPoolSizes = &poolSizes[0];
  334. ci.maxSets = m_lastPoolDSCount;
  335. // Create
  336. VkDescriptorPool pool;
  337. ANKI_VK_CHECK(vkCreateDescriptorPool(m_layoutEntry->m_factory->m_dev, &ci, nullptr, &pool));
  338. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
  339. // Push back
  340. m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
  341. m_pools[m_pools.getSize() - 1] = pool;
  342. return Error::NONE;
  343. }
  344. const DS* DescriptorSetFactory::DSAllocator::tryFindSet(U64 hash)
  345. {
  346. ANKI_ASSERT(hash > 0);
  347. auto it = m_hashmap.find(hash);
  348. if(it == m_hashmap.getEnd())
  349. {
  350. return nullptr;
  351. }
  352. else
  353. {
  354. DS* ds = *it;
  355. // Remove from the list and place at the end of the list
  356. m_list.erase(ds);
  357. m_list.pushBack(ds);
  358. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  359. return ds;
  360. }
  361. }
  362. Error DescriptorSetFactory::DSAllocator::newSet(
  363. U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, StackAllocator<U8>& tmpAlloc,
  364. const DS*& out_)
  365. {
  366. DS* out = nullptr;
  367. // First try to see if there are unused to recycle
  368. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  369. auto it = m_list.getBegin();
  370. const auto end = m_list.getEnd();
  371. while(it != end)
  372. {
  373. DS* set = &(*it);
  374. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  375. if(frameDiff > DESCRIPTOR_FRAME_BUFFERING)
  376. {
  377. // Found something, recycle
  378. auto it2 = m_hashmap.find(set->m_hash);
  379. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  380. m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
  381. m_list.erase(set);
  382. m_list.pushBack(set);
  383. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
  384. out = set;
  385. break;
  386. }
  387. ++it;
  388. }
  389. if(out == nullptr)
  390. {
  391. // Need to allocate one
  392. if(m_lastPoolFreeDSCount == 0)
  393. {
  394. // Can't allocate one from the current pool, create new
  395. ANKI_CHECK(createNewPool());
  396. }
  397. --m_lastPoolFreeDSCount;
  398. VkDescriptorSetAllocateInfo ci = {};
  399. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  400. ci.descriptorPool = m_pools.getBack();
  401. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  402. ci.descriptorSetCount = 1;
  403. VkDescriptorSet handle;
  404. [[maybe_unused]] VkResult rez = vkAllocateDescriptorSets(m_layoutEntry->m_factory->m_dev, &ci, &handle);
  405. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  406. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
  407. out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
  408. out->m_handle = handle;
  409. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
  410. m_list.pushBack(out);
  411. }
  412. ANKI_ASSERT(out);
  413. out->m_lastFrameUsed = crntFrame;
  414. out->m_hash = hash;
  415. // Finally, write it
  416. writeSet(bindings, *out, tmpAlloc);
  417. out_ = out;
  418. return Error::NONE;
  419. }
  420. void DescriptorSetFactory::DSAllocator::writeSet(
  421. const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  422. StackAllocator<U8>& tmpAlloc)
  423. {
  424. DynamicArrayAuto<VkWriteDescriptorSet> writeInfos(tmpAlloc);
  425. DynamicArrayAuto<VkDescriptorImageInfo> texInfos(tmpAlloc);
  426. DynamicArrayAuto<VkDescriptorBufferInfo> buffInfos(tmpAlloc);
  427. DynamicArrayAuto<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(tmpAlloc);
  428. DynamicArrayAuto<VkBufferView> bufferViews(tmpAlloc);
  429. // First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
  430. for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  431. {
  432. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  433. {
  434. for(U arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  435. {
  436. ANKI_ASSERT(bindings[bindingIdx].m_arraySize >= m_layoutEntry->m_bindingArraySize[bindingIdx]);
  437. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  438. : bindings[bindingIdx].m_array[arrIdx];
  439. switch(b.m_type)
  440. {
  441. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  442. {
  443. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  444. info.sampler = b.m_texAndSampler.m_samplerHandle;
  445. info.imageView = b.m_texAndSampler.m_imgViewHandle;
  446. info.imageLayout = b.m_texAndSampler.m_layout;
  447. break;
  448. }
  449. case DescriptorType::TEXTURE:
  450. {
  451. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  452. info.sampler = VK_NULL_HANDLE;
  453. info.imageView = b.m_tex.m_imgViewHandle;
  454. info.imageLayout = b.m_tex.m_layout;
  455. break;
  456. }
  457. case DescriptorType::SAMPLER:
  458. {
  459. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  460. info.sampler = b.m_sampler.m_samplerHandle;
  461. info.imageView = VK_NULL_HANDLE;
  462. info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  463. break;
  464. }
  465. case DescriptorType::UNIFORM_BUFFER:
  466. case DescriptorType::STORAGE_BUFFER:
  467. {
  468. VkDescriptorBufferInfo& info = *buffInfos.emplaceBack();
  469. info.buffer = b.m_buff.m_buffHandle;
  470. info.offset = 0;
  471. info.range = (b.m_buff.m_range == MAX_PTR_SIZE) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  472. break;
  473. }
  474. case DescriptorType::READ_TEXTURE_BUFFER:
  475. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  476. {
  477. VkBufferView& view = *bufferViews.emplaceBack();
  478. view = b.m_textureBuffer.m_buffView;
  479. break;
  480. }
  481. case DescriptorType::IMAGE:
  482. {
  483. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  484. info.sampler = VK_NULL_HANDLE;
  485. info.imageView = b.m_image.m_imgViewHandle;
  486. info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  487. break;
  488. }
  489. case DescriptorType::ACCELERATION_STRUCTURE:
  490. {
  491. VkWriteDescriptorSetAccelerationStructureKHR& info = *asInfos.emplaceBack();
  492. info.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
  493. info.pNext = nullptr;
  494. info.accelerationStructureCount = 1;
  495. info.pAccelerationStructures = &b.m_accelerationStructure.m_accelerationStructureHandle;
  496. break;
  497. }
  498. default:
  499. ANKI_ASSERT(0);
  500. }
  501. }
  502. }
  503. }
  504. // Second pass: Populate the VkWriteDescriptorSet with VkDescriptorImageInfo and VkDescriptorBufferInfo
  505. U32 texCounter = 0;
  506. U32 buffCounter = 0;
  507. U32 asCounter = 0;
  508. U32 buffViewsCounter = 0;
  509. VkWriteDescriptorSet writeTemplate = {};
  510. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  511. writeTemplate.pNext = nullptr;
  512. writeTemplate.dstSet = set.m_handle;
  513. writeTemplate.descriptorCount = 1;
  514. for(U32 bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  515. {
  516. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  517. {
  518. for(U32 arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  519. {
  520. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  521. : bindings[bindingIdx].m_array[arrIdx];
  522. VkWriteDescriptorSet& writeInfo = *writeInfos.emplaceBack(writeTemplate);
  523. writeInfo.descriptorType = convertDescriptorType(b.m_type);
  524. writeInfo.dstArrayElement = arrIdx;
  525. writeInfo.dstBinding = bindingIdx;
  526. switch(b.m_type)
  527. {
  528. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  529. case DescriptorType::TEXTURE:
  530. case DescriptorType::SAMPLER:
  531. case DescriptorType::IMAGE:
  532. writeInfo.pImageInfo = &texInfos[texCounter++];
  533. break;
  534. case DescriptorType::UNIFORM_BUFFER:
  535. case DescriptorType::STORAGE_BUFFER:
  536. writeInfo.pBufferInfo = &buffInfos[buffCounter++];
  537. break;
  538. case DescriptorType::READ_TEXTURE_BUFFER:
  539. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  540. writeInfo.pTexelBufferView = &bufferViews[buffViewsCounter++];
  541. break;
  542. case DescriptorType::ACCELERATION_STRUCTURE:
  543. writeInfo.pNext = &asInfos[asCounter++];
  544. break;
  545. default:
  546. ANKI_ASSERT(0);
  547. }
  548. }
  549. }
  550. }
  551. // Write
  552. vkUpdateDescriptorSets(m_layoutEntry->m_factory->m_dev, writeInfos.getSize(),
  553. (writeInfos.getSize() > 0) ? &writeInfos[0] : nullptr, 0, nullptr);
  554. }
  555. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  556. {
  557. auto alloc = m_factory->m_alloc;
  558. if(m_layoutHandle)
  559. {
  560. vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
  561. }
  562. }
  563. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash)
  564. {
  565. ANKI_ASSERT(bindings);
  566. ANKI_ASSERT(hash > 0);
  567. m_hash = hash;
  568. // Create the VK layout
  569. Array<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> vkBindings;
  570. VkDescriptorSetLayoutCreateInfo ci = {};
  571. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  572. for(U i = 0; i < bindingCount; ++i)
  573. {
  574. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  575. const DescriptorBinding& ak = bindings[i];
  576. vk.binding = ak.m_binding;
  577. vk.descriptorCount = ak.m_arraySize;
  578. vk.descriptorType = convertDescriptorType(ak.m_type);
  579. vk.pImmutableSamplers = nullptr;
  580. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  581. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  582. m_activeBindings.set(ak.m_binding);
  583. m_bindingType[ak.m_binding] = ak.m_type;
  584. m_bindingArraySize[ak.m_binding] = ak.m_arraySize;
  585. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  586. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  587. }
  588. ci.bindingCount = bindingCount;
  589. ci.pBindings = &vkBindings[0];
  590. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_factory->m_dev, &ci, nullptr, &m_layoutHandle));
  591. // Create the pool info
  592. U32 poolSizeCount = 0;
  593. for(U i = 0; i < bindingCount; ++i)
  594. {
  595. U j;
  596. for(j = 0; j < poolSizeCount; ++j)
  597. {
  598. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  599. {
  600. m_poolSizesCreateInf[j].descriptorCount += bindings[i].m_arraySize;
  601. break;
  602. }
  603. }
  604. if(j == poolSizeCount)
  605. {
  606. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  607. m_poolSizesCreateInf[poolSizeCount].descriptorCount = bindings[i].m_arraySize;
  608. ++poolSizeCount;
  609. }
  610. }
  611. if(poolSizeCount == 0)
  612. {
  613. // If the poolSizeCount it means that the DS layout has 0 descriptors. Since the pool sizes can't be zero put
  614. // something in them
  615. m_poolSizesCreateInf[0].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  616. m_poolSizesCreateInf[0].descriptorCount = 1;
  617. ++poolSizeCount;
  618. }
  619. ANKI_ASSERT(poolSizeCount > 0);
  620. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  621. m_poolCreateInf.poolSizeCount = poolSizeCount;
  622. return Error::NONE;
  623. }
  624. Error DSLayoutCacheEntry::getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc)
  625. {
  626. alloc = nullptr;
  627. // Get or create thread-local
  628. DescriptorSetFactory::ThreadLocal* threadLocal = DescriptorSetFactory::m_threadLocal;
  629. if(ANKI_UNLIKELY(threadLocal == nullptr))
  630. {
  631. threadLocal = m_factory->m_alloc.newInstance<DescriptorSetFactory::ThreadLocal>();
  632. DescriptorSetFactory::m_threadLocal = threadLocal;
  633. LockGuard<Mutex> lock(m_factory->m_allThreadLocalsMtx);
  634. m_factory->m_allThreadLocals.emplaceBack(m_factory->m_alloc, threadLocal);
  635. }
  636. // Get or create the allocator
  637. if(ANKI_UNLIKELY(m_index >= threadLocal->m_allocators.getSize()))
  638. {
  639. threadLocal->m_allocators.resize(m_factory->m_alloc, m_index + 1, nullptr);
  640. alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
  641. ANKI_CHECK(alloc->init());
  642. threadLocal->m_allocators[m_index] = alloc;
  643. }
  644. else if(ANKI_UNLIKELY(threadLocal->m_allocators[m_index] == nullptr))
  645. {
  646. alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
  647. ANKI_CHECK(alloc->init());
  648. threadLocal->m_allocators[m_index] = alloc;
  649. }
  650. else
  651. {
  652. alloc = threadLocal->m_allocators[m_index];
  653. }
  654. ANKI_ASSERT(alloc);
  655. return Error::NONE;
  656. }
  657. void DescriptorSetState::flush(U64& hash, Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  658. U32& dynamicOffsetCount, Bool& bindlessDSet)
  659. {
  660. // Set some values
  661. hash = 0;
  662. dynamicOffsetCount = 0;
  663. bindlessDSet = false;
  664. // There is a chance where the bindless set is bound but the actual shaders have an empty DS layout (maybe because
  665. // the dead code elimination eliminated the bindless set). In that case we can't bind the bindless DS. We have to
  666. // treat it as regular set
  667. ANKI_ASSERT(!(m_layout.m_entry == nullptr && !m_bindlessDSetBound)
  668. && "DS layout points to bindless but no bindless is bound");
  669. const Bool reallyBindless = m_bindlessDSetBound && m_layout.m_entry == nullptr;
  670. if(!reallyBindless)
  671. {
  672. // Get cache entry
  673. ANKI_ASSERT(m_layout.m_entry);
  674. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  675. // Early out if nothing happened
  676. const Bool anyActiveBindingDirty = !!(entry.m_activeBindings & m_dirtyBindings);
  677. if(!anyActiveBindingDirty && !m_layoutDirty)
  678. {
  679. return;
  680. }
  681. Bool dynamicOffsetsDirty = false;
  682. // Compute the hash
  683. Array<U64, MAX_BINDINGS_PER_DESCRIPTOR_SET * 2 * 2> toHash;
  684. U toHashCount = 0;
  685. const U minBinding = entry.m_minBinding;
  686. const U maxBinding = entry.m_maxBinding;
  687. for(U i = minBinding; i <= maxBinding; ++i)
  688. {
  689. if(entry.m_activeBindings.get(i))
  690. {
  691. ANKI_ASSERT(m_bindingSet.get(i) && "Forgot to bind");
  692. ANKI_ASSERT(m_bindings[i].m_arraySize >= entry.m_bindingArraySize[i] && "Bound less");
  693. const Bool crntBindingDirty = m_dirtyBindings.get(i);
  694. m_dirtyBindings.unset(i);
  695. for(U arrIdx = 0; arrIdx < entry.m_bindingArraySize[i]; ++arrIdx)
  696. {
  697. ANKI_ASSERT(arrIdx < m_bindings[i].m_arraySize);
  698. if(arrIdx > 1)
  699. {
  700. ANKI_ASSERT(m_bindings[i].m_array[arrIdx].m_type == m_bindings[i].m_array[arrIdx - 1].m_type);
  701. }
  702. const AnyBinding& anyBinding =
  703. (m_bindings[i].m_arraySize == 1) ? m_bindings[i].m_single : m_bindings[i].m_array[arrIdx];
  704. ANKI_ASSERT(anyBinding.m_uuids[0] != 0 && "Forgot to bind");
  705. toHash[toHashCount++] = anyBinding.m_uuids[0];
  706. switch(entry.m_bindingType[i])
  707. {
  708. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  709. ANKI_ASSERT(anyBinding.m_type == DescriptorType::COMBINED_TEXTURE_SAMPLER
  710. && "Have bound the wrong type");
  711. toHash[toHashCount++] = anyBinding.m_uuids[1];
  712. toHash[toHashCount++] = U64(anyBinding.m_texAndSampler.m_layout);
  713. break;
  714. case DescriptorType::TEXTURE:
  715. ANKI_ASSERT(anyBinding.m_type == DescriptorType::TEXTURE && "Have bound the wrong type");
  716. toHash[toHashCount++] = U64(anyBinding.m_tex.m_layout);
  717. break;
  718. case DescriptorType::SAMPLER:
  719. ANKI_ASSERT(anyBinding.m_type == DescriptorType::SAMPLER && "Have bound the wrong type");
  720. break;
  721. case DescriptorType::UNIFORM_BUFFER:
  722. ANKI_ASSERT(anyBinding.m_type == DescriptorType::UNIFORM_BUFFER && "Have bound the wrong type");
  723. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  724. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  725. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  726. break;
  727. case DescriptorType::STORAGE_BUFFER:
  728. ANKI_ASSERT(anyBinding.m_type == DescriptorType::STORAGE_BUFFER && "Have bound the wrong type");
  729. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  730. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  731. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  732. break;
  733. case DescriptorType::READ_TEXTURE_BUFFER:
  734. ANKI_ASSERT(anyBinding.m_type == DescriptorType::READ_TEXTURE_BUFFER
  735. && "Have bound the wrong type");
  736. toHash[toHashCount++] = anyBinding.m_uuids[1];
  737. break;
  738. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  739. ANKI_ASSERT(anyBinding.m_type == DescriptorType::READ_WRITE_TEXTURE_BUFFER
  740. && "Have bound the wrong type");
  741. toHash[toHashCount++] = anyBinding.m_uuids[1];
  742. break;
  743. case DescriptorType::IMAGE:
  744. ANKI_ASSERT(anyBinding.m_type == DescriptorType::IMAGE && "Have bound the wrong type");
  745. break;
  746. case DescriptorType::ACCELERATION_STRUCTURE:
  747. ANKI_ASSERT(anyBinding.m_type == DescriptorType::ACCELERATION_STRUCTURE
  748. && "Have bound the wrong type");
  749. break;
  750. default:
  751. ANKI_ASSERT(0);
  752. }
  753. }
  754. }
  755. }
  756. const U64 newHash = computeHash(&toHash[0], toHashCount * sizeof(U64));
  757. if(newHash != m_lastHash || dynamicOffsetsDirty || m_layoutDirty)
  758. {
  759. // DS needs rebind
  760. m_lastHash = newHash;
  761. hash = newHash;
  762. }
  763. else
  764. {
  765. // All clean, keep hash equal to 0
  766. }
  767. m_layoutDirty = false;
  768. }
  769. else
  770. {
  771. // Custom set
  772. if(!m_bindlessDSetDirty && !m_layoutDirty)
  773. {
  774. return;
  775. }
  776. bindlessDSet = true;
  777. hash = 1;
  778. m_bindlessDSetDirty = false;
  779. m_layoutDirty = false;
  780. }
  781. }
  782. DescriptorSetFactory::~DescriptorSetFactory()
  783. {
  784. }
  785. Error DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev, U32 bindlessTextureCount,
  786. U32 bindlessImageCount)
  787. {
  788. m_alloc = alloc;
  789. m_dev = dev;
  790. m_bindless = m_alloc.newInstance<BindlessDescriptorSet>();
  791. ANKI_CHECK(m_bindless->init(alloc, dev, bindlessTextureCount, bindlessImageCount));
  792. m_bindlessTextureCount = bindlessTextureCount;
  793. m_bindlessImageCount = bindlessImageCount;
  794. return Error::NONE;
  795. }
  796. void DescriptorSetFactory::destroy()
  797. {
  798. for(ThreadLocal* threadLocal : m_allThreadLocals)
  799. {
  800. for(DSAllocator* alloc : threadLocal->m_allocators)
  801. {
  802. m_alloc.deleteInstance(alloc);
  803. }
  804. threadLocal->m_allocators.destroy(m_alloc);
  805. m_alloc.deleteInstance(threadLocal);
  806. }
  807. m_allThreadLocals.destroy(m_alloc);
  808. for(DSLayoutCacheEntry* l : m_caches)
  809. {
  810. m_alloc.deleteInstance(l);
  811. }
  812. m_caches.destroy(m_alloc);
  813. if(m_bindless)
  814. {
  815. m_alloc.deleteInstance(m_bindless);
  816. }
  817. }
  818. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  819. {
  820. // Compute the hash for the layout
  821. Array<DescriptorBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> bindings;
  822. const U32 bindingCount = init.m_bindings.getSize();
  823. U64 hash;
  824. if(init.m_bindings.getSize() > 0)
  825. {
  826. memcpy(bindings.getBegin(), init.m_bindings.getBegin(), init.m_bindings.getSizeInBytes());
  827. std::sort(bindings.getBegin(), bindings.getBegin() + bindingCount,
  828. [](const DescriptorBinding& a, const DescriptorBinding& b) {
  829. return a.m_binding < b.m_binding;
  830. });
  831. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  832. ANKI_ASSERT(hash != 1);
  833. }
  834. else
  835. {
  836. hash = 1;
  837. }
  838. // Identify if the DS is the bindless one. It is if there is at least one binding that matches the criteria
  839. Bool isBindless = false;
  840. if(bindingCount > 0)
  841. {
  842. isBindless = true;
  843. for(U32 i = 0; i < bindingCount; ++i)
  844. {
  845. const DescriptorBinding& binding = bindings[i];
  846. if(binding.m_binding == 0 && binding.m_type == DescriptorType::TEXTURE
  847. && binding.m_arraySize == m_bindlessTextureCount)
  848. {
  849. // All good
  850. }
  851. else if(binding.m_binding == 1 && binding.m_type == DescriptorType::IMAGE
  852. && binding.m_arraySize == m_bindlessImageCount)
  853. {
  854. // All good
  855. }
  856. else
  857. {
  858. isBindless = false;
  859. }
  860. }
  861. }
  862. // Find or create the cache entry
  863. if(isBindless)
  864. {
  865. layout.m_handle = m_bindless->getDescriptorSetLayout();
  866. layout.m_entry = nullptr;
  867. }
  868. else
  869. {
  870. LockGuard<SpinLock> lock(m_cachesMtx);
  871. DSLayoutCacheEntry* cache = nullptr;
  872. U count = 0;
  873. for(DSLayoutCacheEntry* it : m_caches)
  874. {
  875. if(it->m_hash == hash)
  876. {
  877. cache = it;
  878. break;
  879. }
  880. ++count;
  881. }
  882. if(cache == nullptr)
  883. {
  884. cache = m_alloc.newInstance<DSLayoutCacheEntry>(this, m_caches.getSize());
  885. ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
  886. m_caches.emplaceBack(m_alloc, cache);
  887. }
  888. // Set the layout
  889. layout.m_handle = cache->m_layoutHandle;
  890. layout.m_entry = cache;
  891. }
  892. return Error::NONE;
  893. }
  894. Error DescriptorSetFactory::newDescriptorSet(StackAllocator<U8>& tmpAlloc, DescriptorSetState& state,
  895. DescriptorSet& set, Bool& dirty,
  896. Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  897. U32& dynamicOffsetCount)
  898. {
  899. ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  900. U64 hash;
  901. Bool bindlessDSet;
  902. state.flush(hash, dynamicOffsets, dynamicOffsetCount, bindlessDSet);
  903. if(hash == 0)
  904. {
  905. dirty = false;
  906. return Error::NONE;
  907. }
  908. else
  909. {
  910. dirty = true;
  911. if(!bindlessDSet)
  912. {
  913. DescriptorSetLayout layout = state.m_layout;
  914. DSLayoutCacheEntry& entry = *layout.m_entry;
  915. // Get thread allocator
  916. DSAllocator* alloc;
  917. ANKI_CHECK(entry.getOrCreateDSAllocator(alloc));
  918. // Finally, allocate
  919. const DS* s;
  920. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpAlloc, s));
  921. set.m_handle = s->m_handle;
  922. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  923. }
  924. else
  925. {
  926. set = m_bindless->getDescriptorSet();
  927. }
  928. }
  929. return Error::NONE;
  930. }
  931. U32 DescriptorSetFactory::bindBindlessTexture(const VkImageView view, const VkImageLayout layout)
  932. {
  933. ANKI_ASSERT(m_bindless);
  934. return m_bindless->bindTexture(view, layout);
  935. }
  936. U32 DescriptorSetFactory::bindBindlessImage(const VkImageView view)
  937. {
  938. ANKI_ASSERT(m_bindless);
  939. return m_bindless->bindImage(view);
  940. }
  941. void DescriptorSetFactory::unbindBindlessTexture(U32 idx)
  942. {
  943. ANKI_ASSERT(m_bindless);
  944. m_bindless->unbindTexture(idx);
  945. }
  946. void DescriptorSetFactory::unbindBindlessImage(U32 idx)
  947. {
  948. ANKI_ASSERT(m_bindless);
  949. m_bindless->unbindImage(idx);
  950. }
  951. } // end namespace anki