DescriptorSet.cpp 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182
  1. // Copyright (C) 2009-2022, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/DescriptorSet.h>
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/BufferImpl.h>
  8. #include <AnKi/Util/List.h>
  9. #include <AnKi/Util/HashMap.h>
  10. #include <AnKi/Util/Tracer.h>
  11. #include <algorithm>
  12. namespace anki {
  13. thread_local DescriptorSetFactory::ThreadLocal* DescriptorSetFactory::m_threadLocal = nullptr;
  14. /// Wraps a global descriptor set that is used to store bindless textures.
  15. class DescriptorSetFactory::BindlessDescriptorSet
  16. {
  17. public:
  18. ~BindlessDescriptorSet();
  19. Error init(const GrAllocator<U8>& alloc, VkDevice dev, const U32 bindlessTextureCount, U32 bindlessImageCount);
  20. /// Bind a sampled image.
  21. /// @note It's thread-safe.
  22. U32 bindTexture(const VkImageView view, const VkImageLayout layout);
  23. /// Bind a uniform texel buffer.
  24. /// @note It's thread-safe.
  25. U32 bindUniformTexelBuffer(VkBufferView view);
  26. /// @note It's thread-safe.
  27. void unbindTexture(U32 idx)
  28. {
  29. unbindCommon(idx, m_freeTexIndices, m_freeTexIndexCount);
  30. }
  31. /// @note It's thread-safe.
  32. void unbindUniformTexelBuffer(U32 idx)
  33. {
  34. unbindCommon(idx, m_freeTexelBufferIndices, m_freeTexelBufferIndexCount);
  35. }
  36. DescriptorSet getDescriptorSet() const
  37. {
  38. ANKI_ASSERT(m_dset);
  39. DescriptorSet out;
  40. out.m_handle = m_dset;
  41. return out;
  42. }
  43. VkDescriptorSetLayout getDescriptorSetLayout() const
  44. {
  45. ANKI_ASSERT(m_layout);
  46. return m_layout;
  47. }
  48. private:
  49. GrAllocator<U8> m_alloc;
  50. VkDevice m_dev = VK_NULL_HANDLE;
  51. VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
  52. VkDescriptorPool m_pool = VK_NULL_HANDLE;
  53. VkDescriptorSet m_dset = VK_NULL_HANDLE;
  54. Mutex m_mtx;
  55. DynamicArray<U16> m_freeTexIndices;
  56. DynamicArray<U16> m_freeTexelBufferIndices;
  57. U16 m_freeTexIndexCount = MAX_U16;
  58. U16 m_freeTexelBufferIndexCount = MAX_U16;
  59. void unbindCommon(U32 idx, DynamicArray<U16>& freeIndices, U16& freeIndexCount);
  60. };
  61. /// Descriptor set internal class.
  62. class DS : public IntrusiveListEnabled<DS>
  63. {
  64. public:
  65. VkDescriptorSet m_handle = {};
  66. U64 m_lastFrameUsed = MAX_U64;
  67. U64 m_hash;
  68. };
  69. /// Per thread allocator.
  70. class DescriptorSetFactory::DSAllocator
  71. {
  72. public:
  73. DSAllocator(const DSAllocator&) = delete; // Non-copyable
  74. DSAllocator& operator=(const DSAllocator&) = delete; // Non-copyable
  75. DSAllocator(const DSLayoutCacheEntry* layout)
  76. : m_layoutEntry(layout)
  77. {
  78. ANKI_ASSERT(m_layoutEntry);
  79. }
  80. ~DSAllocator();
  81. Error init();
  82. Error createNewPool();
  83. Error getOrCreateSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  84. StackAllocator<U8>& tmpAlloc, const DS*& out)
  85. {
  86. out = tryFindSet(hash);
  87. if(out == nullptr)
  88. {
  89. ANKI_CHECK(newSet(hash, bindings, tmpAlloc, out));
  90. }
  91. return Error::NONE;
  92. }
  93. private:
  94. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  95. DynamicArray<VkDescriptorPool> m_pools;
  96. U32 m_lastPoolDSCount = 0;
  97. U32 m_lastPoolFreeDSCount = 0;
  98. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  99. HashMap<U64, DS*> m_hashmap;
  100. [[nodiscard]] const DS* tryFindSet(U64 hash);
  101. Error newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  102. StackAllocator<U8>& tmpAlloc, const DS*& out);
  103. void writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  104. StackAllocator<U8>& tmpAlloc);
  105. };
  106. class alignas(ANKI_CACHE_LINE_SIZE) DescriptorSetFactory::ThreadLocal
  107. {
  108. public:
  109. DynamicArray<DSAllocator*> m_allocators;
  110. };
  111. /// Cache entry. It's built around a specific descriptor set layout.
  112. class DSLayoutCacheEntry
  113. {
  114. public:
  115. DescriptorSetFactory* m_factory;
  116. U64 m_hash = 0; ///< Layout hash.
  117. VkDescriptorSetLayout m_layoutHandle = {};
  118. BitSet<MAX_BINDINGS_PER_DESCRIPTOR_SET, U32> m_activeBindings = {false};
  119. Array<U32, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingArraySize = {};
  120. Array<DescriptorType, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingType = {};
  121. U32 m_minBinding = MAX_U32;
  122. U32 m_maxBinding = 0;
  123. U32 m_index = 0; ///< Index in DescriptorSetFactory::m_caches
  124. // Cache the create info
  125. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> m_poolSizesCreateInf = {};
  126. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  127. DSLayoutCacheEntry(DescriptorSetFactory* factory, U32 index)
  128. : m_factory(factory)
  129. , m_index(index)
  130. {
  131. }
  132. ~DSLayoutCacheEntry();
  133. Error init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash);
  134. /// @note Thread-safe.
  135. Error getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc);
  136. };
  137. DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
  138. {
  139. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  140. ANKI_ASSERT(m_freeTexelBufferIndexCount == m_freeTexelBufferIndices.getSize()
  141. && "Forgot to unbind some texel buffers");
  142. if(m_pool)
  143. {
  144. vkDestroyDescriptorPool(m_dev, m_pool, nullptr);
  145. m_pool = VK_NULL_HANDLE;
  146. m_dset = VK_NULL_HANDLE;
  147. }
  148. if(m_layout)
  149. {
  150. vkDestroyDescriptorSetLayout(m_dev, m_layout, nullptr);
  151. m_layout = VK_NULL_HANDLE;
  152. }
  153. m_freeTexIndices.destroy(m_alloc);
  154. m_freeTexelBufferIndices.destroy(m_alloc);
  155. }
  156. Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& alloc, VkDevice dev,
  157. U32 bindlessTextureCount, U32 bindlessTextureBuffers)
  158. {
  159. ANKI_ASSERT(dev);
  160. m_alloc = alloc;
  161. m_dev = dev;
  162. // Create the layout
  163. {
  164. Array<VkDescriptorSetLayoutBinding, 2> bindings = {};
  165. bindings[0].binding = 0;
  166. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  167. bindings[0].descriptorCount = bindlessTextureCount;
  168. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  169. bindings[1].binding = 1;
  170. bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
  171. bindings[1].descriptorCount = bindlessTextureBuffers;
  172. bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  173. Array<VkDescriptorBindingFlagsEXT, 2> bindingFlags = {};
  174. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT
  175. | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  176. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  177. bindingFlags[1] = bindingFlags[0];
  178. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  179. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  180. extraInfos.bindingCount = bindingFlags.getSize();
  181. extraInfos.pBindingFlags = &bindingFlags[0];
  182. VkDescriptorSetLayoutCreateInfo ci = {};
  183. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  184. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  185. ci.bindingCount = bindings.getSize();
  186. ci.pBindings = &bindings[0];
  187. ci.pNext = &extraInfos;
  188. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_dev, &ci, nullptr, &m_layout));
  189. }
  190. // Create the pool
  191. {
  192. Array<VkDescriptorPoolSize, 2> sizes = {};
  193. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  194. sizes[0].descriptorCount = bindlessTextureCount;
  195. sizes[1].type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  196. sizes[1].descriptorCount = bindlessTextureBuffers;
  197. VkDescriptorPoolCreateInfo ci = {};
  198. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  199. ci.maxSets = 1;
  200. ci.poolSizeCount = sizes.getSize();
  201. ci.pPoolSizes = &sizes[0];
  202. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  203. ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_pool));
  204. }
  205. // Create the descriptor set
  206. {
  207. VkDescriptorSetAllocateInfo ci = {};
  208. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  209. ci.descriptorPool = m_pool;
  210. ci.descriptorSetCount = 1;
  211. ci.pSetLayouts = &m_layout;
  212. ANKI_VK_CHECK(vkAllocateDescriptorSets(m_dev, &ci, &m_dset));
  213. }
  214. // Init the free arrays
  215. {
  216. m_freeTexIndices.create(m_alloc, bindlessTextureCount);
  217. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  218. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  219. {
  220. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  221. }
  222. m_freeTexelBufferIndices.create(m_alloc, bindlessTextureBuffers);
  223. m_freeTexelBufferIndexCount = U16(m_freeTexelBufferIndices.getSize());
  224. for(U32 i = 0; i < m_freeTexelBufferIndices.getSize(); ++i)
  225. {
  226. m_freeTexelBufferIndices[i] = U16(m_freeTexelBufferIndices.getSize() - i - 1);
  227. }
  228. }
  229. return Error::NONE;
  230. }
  231. U32 DescriptorSetFactory::BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  232. {
  233. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  234. ANKI_ASSERT(view);
  235. LockGuard<Mutex> lock(m_mtx);
  236. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  237. // Pop the index
  238. --m_freeTexIndexCount;
  239. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  240. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  241. // Update the set
  242. VkDescriptorImageInfo imageInf = {};
  243. imageInf.imageView = view;
  244. imageInf.imageLayout = layout;
  245. VkWriteDescriptorSet write = {};
  246. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  247. write.pNext = nullptr;
  248. write.dstSet = m_dset;
  249. write.dstBinding = 0;
  250. write.descriptorCount = 1;
  251. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  252. write.dstArrayElement = idx;
  253. write.pImageInfo = &imageInf;
  254. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  255. return idx;
  256. }
  257. U32 DescriptorSetFactory::BindlessDescriptorSet::bindUniformTexelBuffer(VkBufferView view)
  258. {
  259. ANKI_ASSERT(view);
  260. LockGuard<Mutex> lock(m_mtx);
  261. ANKI_ASSERT(m_freeTexelBufferIndexCount > 0 && "Out of indices");
  262. // Pop the index
  263. --m_freeTexelBufferIndexCount;
  264. const U16 idx = m_freeTexelBufferIndices[m_freeTexelBufferIndexCount];
  265. ANKI_ASSERT(idx < m_freeTexelBufferIndices.getSize());
  266. // Update the set
  267. VkWriteDescriptorSet write = {};
  268. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  269. write.pNext = nullptr;
  270. write.dstSet = m_dset;
  271. write.dstBinding = 1;
  272. write.descriptorCount = 1;
  273. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  274. write.dstArrayElement = idx;
  275. write.pTexelBufferView = &view;
  276. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  277. return idx;
  278. }
  279. void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, DynamicArray<U16>& freeIndices,
  280. U16& freeIndexCount)
  281. {
  282. LockGuard<Mutex> lock(m_mtx);
  283. ANKI_ASSERT(idx < freeIndices.getSize());
  284. ANKI_ASSERT(freeIndexCount < freeIndices.getSize());
  285. freeIndices[freeIndexCount] = U16(idx);
  286. ++freeIndexCount;
  287. // Sort the free indices to minimize fragmentation
  288. std::sort(&freeIndices[0], &freeIndices[0] + freeIndexCount, std::greater<U16>());
  289. // Make sure there are no duplicates
  290. for(U32 i = 1; i < freeIndexCount; ++i)
  291. {
  292. ANKI_ASSERT(freeIndices[i] != freeIndices[i - 1]);
  293. }
  294. }
  295. DescriptorSetFactory::DSAllocator::~DSAllocator()
  296. {
  297. auto alloc = m_layoutEntry->m_factory->m_alloc;
  298. while(!m_list.isEmpty())
  299. {
  300. DS* ds = &m_list.getFront();
  301. m_list.popFront();
  302. alloc.deleteInstance(ds);
  303. }
  304. for(VkDescriptorPool pool : m_pools)
  305. {
  306. vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
  307. }
  308. m_pools.destroy(alloc);
  309. m_hashmap.destroy(alloc);
  310. }
  311. Error DescriptorSetFactory::DSAllocator::init()
  312. {
  313. ANKI_CHECK(createNewPool());
  314. return Error::NONE;
  315. }
  316. Error DescriptorSetFactory::DSAllocator::createNewPool()
  317. {
  318. m_lastPoolDSCount = (m_lastPoolDSCount != 0) ? U32(F32(m_lastPoolDSCount) * DESCRIPTOR_POOL_SIZE_SCALE)
  319. : DESCRIPTOR_POOL_INITIAL_SIZE;
  320. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  321. // Set the create info
  322. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> poolSizes;
  323. memcpy(&poolSizes[0], &m_layoutEntry->m_poolSizesCreateInf[0],
  324. sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  325. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  326. {
  327. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  328. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  329. }
  330. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  331. ci.pPoolSizes = &poolSizes[0];
  332. ci.maxSets = m_lastPoolDSCount;
  333. // Create
  334. VkDescriptorPool pool;
  335. ANKI_VK_CHECK(vkCreateDescriptorPool(m_layoutEntry->m_factory->m_dev, &ci, nullptr, &pool));
  336. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
  337. // Push back
  338. m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
  339. m_pools[m_pools.getSize() - 1] = pool;
  340. return Error::NONE;
  341. }
  342. const DS* DescriptorSetFactory::DSAllocator::tryFindSet(U64 hash)
  343. {
  344. ANKI_ASSERT(hash > 0);
  345. auto it = m_hashmap.find(hash);
  346. if(it == m_hashmap.getEnd())
  347. {
  348. return nullptr;
  349. }
  350. else
  351. {
  352. DS* ds = *it;
  353. // Remove from the list and place at the end of the list
  354. m_list.erase(ds);
  355. m_list.pushBack(ds);
  356. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  357. return ds;
  358. }
  359. }
  360. Error DescriptorSetFactory::DSAllocator::newSet(
  361. U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, StackAllocator<U8>& tmpAlloc,
  362. const DS*& out_)
  363. {
  364. DS* out = nullptr;
  365. // First try to see if there are unused to recycle
  366. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  367. auto it = m_list.getBegin();
  368. const auto end = m_list.getEnd();
  369. while(it != end)
  370. {
  371. DS* set = &(*it);
  372. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  373. if(frameDiff > DESCRIPTOR_FRAME_BUFFERING)
  374. {
  375. // Found something, recycle
  376. auto it2 = m_hashmap.find(set->m_hash);
  377. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  378. m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
  379. m_list.erase(set);
  380. m_list.pushBack(set);
  381. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
  382. out = set;
  383. break;
  384. }
  385. ++it;
  386. }
  387. if(out == nullptr)
  388. {
  389. // Need to allocate one
  390. if(m_lastPoolFreeDSCount == 0)
  391. {
  392. // Can't allocate one from the current pool, create new
  393. ANKI_CHECK(createNewPool());
  394. }
  395. --m_lastPoolFreeDSCount;
  396. VkDescriptorSetAllocateInfo ci = {};
  397. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  398. ci.descriptorPool = m_pools.getBack();
  399. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  400. ci.descriptorSetCount = 1;
  401. VkDescriptorSet handle;
  402. [[maybe_unused]] VkResult rez = vkAllocateDescriptorSets(m_layoutEntry->m_factory->m_dev, &ci, &handle);
  403. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  404. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
  405. out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
  406. out->m_handle = handle;
  407. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
  408. m_list.pushBack(out);
  409. }
  410. ANKI_ASSERT(out);
  411. out->m_lastFrameUsed = crntFrame;
  412. out->m_hash = hash;
  413. // Finally, write it
  414. writeSet(bindings, *out, tmpAlloc);
  415. out_ = out;
  416. return Error::NONE;
  417. }
  418. void DescriptorSetFactory::DSAllocator::writeSet(
  419. const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  420. StackAllocator<U8>& tmpAlloc)
  421. {
  422. DynamicArrayAuto<VkWriteDescriptorSet> writeInfos(tmpAlloc);
  423. DynamicArrayAuto<VkDescriptorImageInfo> texInfos(tmpAlloc);
  424. DynamicArrayAuto<VkDescriptorBufferInfo> buffInfos(tmpAlloc);
  425. DynamicArrayAuto<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(tmpAlloc);
  426. DynamicArrayAuto<VkBufferView> bufferViews(tmpAlloc);
  427. // First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
  428. for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  429. {
  430. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  431. {
  432. for(U arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  433. {
  434. ANKI_ASSERT(bindings[bindingIdx].m_arraySize >= m_layoutEntry->m_bindingArraySize[bindingIdx]);
  435. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  436. : bindings[bindingIdx].m_array[arrIdx];
  437. switch(b.m_type)
  438. {
  439. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  440. {
  441. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  442. info.sampler = b.m_texAndSampler.m_samplerHandle;
  443. info.imageView = b.m_texAndSampler.m_imgViewHandle;
  444. info.imageLayout = b.m_texAndSampler.m_layout;
  445. break;
  446. }
  447. case DescriptorType::TEXTURE:
  448. {
  449. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  450. info.sampler = VK_NULL_HANDLE;
  451. info.imageView = b.m_tex.m_imgViewHandle;
  452. info.imageLayout = b.m_tex.m_layout;
  453. break;
  454. }
  455. case DescriptorType::SAMPLER:
  456. {
  457. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  458. info.sampler = b.m_sampler.m_samplerHandle;
  459. info.imageView = VK_NULL_HANDLE;
  460. info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  461. break;
  462. }
  463. case DescriptorType::UNIFORM_BUFFER:
  464. case DescriptorType::STORAGE_BUFFER:
  465. {
  466. VkDescriptorBufferInfo& info = *buffInfos.emplaceBack();
  467. info.buffer = b.m_buff.m_buffHandle;
  468. info.offset = 0;
  469. info.range = (b.m_buff.m_range == MAX_PTR_SIZE) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  470. break;
  471. }
  472. case DescriptorType::READ_TEXTURE_BUFFER:
  473. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  474. {
  475. VkBufferView& view = *bufferViews.emplaceBack();
  476. view = b.m_textureBuffer.m_buffView;
  477. break;
  478. }
  479. case DescriptorType::IMAGE:
  480. {
  481. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  482. info.sampler = VK_NULL_HANDLE;
  483. info.imageView = b.m_image.m_imgViewHandle;
  484. info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  485. break;
  486. }
  487. case DescriptorType::ACCELERATION_STRUCTURE:
  488. {
  489. VkWriteDescriptorSetAccelerationStructureKHR& info = *asInfos.emplaceBack();
  490. info.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
  491. info.pNext = nullptr;
  492. info.accelerationStructureCount = 1;
  493. info.pAccelerationStructures = &b.m_accelerationStructure.m_accelerationStructureHandle;
  494. break;
  495. }
  496. default:
  497. ANKI_ASSERT(0);
  498. }
  499. }
  500. }
  501. }
  502. // Second pass: Populate the VkWriteDescriptorSet with VkDescriptorImageInfo and VkDescriptorBufferInfo
  503. U32 texCounter = 0;
  504. U32 buffCounter = 0;
  505. U32 asCounter = 0;
  506. U32 buffViewsCounter = 0;
  507. VkWriteDescriptorSet writeTemplate = {};
  508. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  509. writeTemplate.pNext = nullptr;
  510. writeTemplate.dstSet = set.m_handle;
  511. writeTemplate.descriptorCount = 1;
  512. for(U32 bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  513. {
  514. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  515. {
  516. for(U32 arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  517. {
  518. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  519. : bindings[bindingIdx].m_array[arrIdx];
  520. VkWriteDescriptorSet& writeInfo = *writeInfos.emplaceBack(writeTemplate);
  521. writeInfo.descriptorType = convertDescriptorType(b.m_type);
  522. writeInfo.dstArrayElement = arrIdx;
  523. writeInfo.dstBinding = bindingIdx;
  524. switch(b.m_type)
  525. {
  526. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  527. case DescriptorType::TEXTURE:
  528. case DescriptorType::SAMPLER:
  529. case DescriptorType::IMAGE:
  530. writeInfo.pImageInfo = &texInfos[texCounter++];
  531. break;
  532. case DescriptorType::UNIFORM_BUFFER:
  533. case DescriptorType::STORAGE_BUFFER:
  534. writeInfo.pBufferInfo = &buffInfos[buffCounter++];
  535. break;
  536. case DescriptorType::READ_TEXTURE_BUFFER:
  537. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  538. writeInfo.pTexelBufferView = &bufferViews[buffViewsCounter++];
  539. break;
  540. case DescriptorType::ACCELERATION_STRUCTURE:
  541. writeInfo.pNext = &asInfos[asCounter++];
  542. break;
  543. default:
  544. ANKI_ASSERT(0);
  545. }
  546. }
  547. }
  548. }
  549. // Write
  550. vkUpdateDescriptorSets(m_layoutEntry->m_factory->m_dev, writeInfos.getSize(),
  551. (writeInfos.getSize() > 0) ? &writeInfos[0] : nullptr, 0, nullptr);
  552. }
  553. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  554. {
  555. auto alloc = m_factory->m_alloc;
  556. if(m_layoutHandle)
  557. {
  558. vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
  559. }
  560. }
  561. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash)
  562. {
  563. ANKI_ASSERT(bindings);
  564. ANKI_ASSERT(hash > 0);
  565. m_hash = hash;
  566. // Create the VK layout
  567. Array<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> vkBindings;
  568. VkDescriptorSetLayoutCreateInfo ci = {};
  569. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  570. for(U i = 0; i < bindingCount; ++i)
  571. {
  572. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  573. const DescriptorBinding& ak = bindings[i];
  574. vk.binding = ak.m_binding;
  575. vk.descriptorCount = ak.m_arraySize;
  576. vk.descriptorType = convertDescriptorType(ak.m_type);
  577. vk.pImmutableSamplers = nullptr;
  578. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  579. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  580. m_activeBindings.set(ak.m_binding);
  581. m_bindingType[ak.m_binding] = ak.m_type;
  582. m_bindingArraySize[ak.m_binding] = ak.m_arraySize;
  583. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  584. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  585. }
  586. ci.bindingCount = bindingCount;
  587. ci.pBindings = &vkBindings[0];
  588. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_factory->m_dev, &ci, nullptr, &m_layoutHandle));
  589. // Create the pool info
  590. U32 poolSizeCount = 0;
  591. for(U i = 0; i < bindingCount; ++i)
  592. {
  593. U j;
  594. for(j = 0; j < poolSizeCount; ++j)
  595. {
  596. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  597. {
  598. m_poolSizesCreateInf[j].descriptorCount += bindings[i].m_arraySize;
  599. break;
  600. }
  601. }
  602. if(j == poolSizeCount)
  603. {
  604. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  605. m_poolSizesCreateInf[poolSizeCount].descriptorCount = bindings[i].m_arraySize;
  606. ++poolSizeCount;
  607. }
  608. }
  609. if(poolSizeCount == 0)
  610. {
  611. // If the poolSizeCount it means that the DS layout has 0 descriptors. Since the pool sizes can't be zero put
  612. // something in them
  613. m_poolSizesCreateInf[0].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  614. m_poolSizesCreateInf[0].descriptorCount = 1;
  615. ++poolSizeCount;
  616. }
  617. ANKI_ASSERT(poolSizeCount > 0);
  618. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  619. m_poolCreateInf.poolSizeCount = poolSizeCount;
  620. return Error::NONE;
  621. }
  622. Error DSLayoutCacheEntry::getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc)
  623. {
  624. alloc = nullptr;
  625. // Get or create thread-local
  626. DescriptorSetFactory::ThreadLocal* threadLocal = DescriptorSetFactory::m_threadLocal;
  627. if(ANKI_UNLIKELY(threadLocal == nullptr))
  628. {
  629. threadLocal = m_factory->m_alloc.newInstance<DescriptorSetFactory::ThreadLocal>();
  630. DescriptorSetFactory::m_threadLocal = threadLocal;
  631. LockGuard<Mutex> lock(m_factory->m_allThreadLocalsMtx);
  632. m_factory->m_allThreadLocals.emplaceBack(m_factory->m_alloc, threadLocal);
  633. }
  634. // Get or create the allocator
  635. if(ANKI_UNLIKELY(m_index >= threadLocal->m_allocators.getSize()))
  636. {
  637. threadLocal->m_allocators.resize(m_factory->m_alloc, m_index + 1, nullptr);
  638. alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
  639. ANKI_CHECK(alloc->init());
  640. threadLocal->m_allocators[m_index] = alloc;
  641. }
  642. else if(ANKI_UNLIKELY(threadLocal->m_allocators[m_index] == nullptr))
  643. {
  644. alloc = m_factory->m_alloc.newInstance<DescriptorSetFactory::DSAllocator>(this);
  645. ANKI_CHECK(alloc->init());
  646. threadLocal->m_allocators[m_index] = alloc;
  647. }
  648. else
  649. {
  650. alloc = threadLocal->m_allocators[m_index];
  651. }
  652. ANKI_ASSERT(alloc);
  653. return Error::NONE;
  654. }
  655. AnyBinding& DescriptorSetState::getBindingToPopulate(U32 bindingIdx, U32 arrayIdx)
  656. {
  657. ANKI_ASSERT(bindingIdx < MAX_BINDINGS_PER_DESCRIPTOR_SET);
  658. AnyBindingExtended& extended = m_bindings[bindingIdx];
  659. AnyBinding* out;
  660. const Bool bindingIsSet = m_bindingSet.get(bindingIdx);
  661. m_bindingSet.set(bindingIdx);
  662. extended.m_arraySize = (!bindingIsSet) ? 0 : extended.m_arraySize;
  663. if(ANKI_LIKELY(arrayIdx == 0 && extended.m_arraySize <= 1))
  664. {
  665. // Array idx is zero, most common case
  666. out = &extended.m_single;
  667. extended.m_arraySize = 1;
  668. }
  669. else if(arrayIdx < extended.m_arraySize)
  670. {
  671. // It's (or was) an array and there enough space in thar array
  672. out = &extended.m_array[arrayIdx];
  673. }
  674. else
  675. {
  676. // Need to grow
  677. const U32 newSize = max(extended.m_arraySize * 2, arrayIdx + 1);
  678. AnyBinding* newArr = m_alloc.newArray<AnyBinding>(newSize);
  679. if(extended.m_arraySize == 1)
  680. {
  681. newArr[0] = extended.m_single;
  682. }
  683. else if(extended.m_arraySize > 1)
  684. {
  685. // Copy old to new.
  686. memcpy(newArr, extended.m_array, sizeof(AnyBinding) * extended.m_arraySize);
  687. }
  688. // Zero the rest
  689. memset(newArr + extended.m_arraySize, 0, sizeof(AnyBinding) * (newSize - extended.m_arraySize));
  690. extended.m_arraySize = newSize;
  691. extended.m_array = newArr;
  692. // Return
  693. out = &extended.m_array[arrayIdx];
  694. }
  695. ANKI_ASSERT(out);
  696. return *out;
  697. }
  698. void DescriptorSetState::flush(U64& hash, Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  699. U32& dynamicOffsetCount, Bool& bindlessDSet)
  700. {
  701. // Set some values
  702. hash = 0;
  703. dynamicOffsetCount = 0;
  704. bindlessDSet = false;
  705. // There is a chance where the bindless set is bound but the actual shaders have an empty DS layout (maybe because
  706. // the dead code elimination eliminated the bindless set). In that case we can't bind the bindless DS. We have to
  707. // treat it as regular set
  708. ANKI_ASSERT(!(m_layout.m_entry == nullptr && !m_bindlessDSetBound)
  709. && "DS layout points to bindless but no bindless is bound");
  710. const Bool reallyBindless = m_bindlessDSetBound && m_layout.m_entry == nullptr;
  711. if(!reallyBindless)
  712. {
  713. // Get cache entry
  714. ANKI_ASSERT(m_layout.m_entry);
  715. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  716. // Early out if nothing happened
  717. const Bool anyActiveBindingDirty = !!(entry.m_activeBindings & m_dirtyBindings);
  718. if(!anyActiveBindingDirty && !m_layoutDirty)
  719. {
  720. return;
  721. }
  722. Bool dynamicOffsetsDirty = false;
  723. // Compute the hash
  724. Array<U64, MAX_BINDINGS_PER_DESCRIPTOR_SET * 2 * 2> toHash;
  725. U toHashCount = 0;
  726. const U minBinding = entry.m_minBinding;
  727. const U maxBinding = entry.m_maxBinding;
  728. for(U i = minBinding; i <= maxBinding; ++i)
  729. {
  730. if(entry.m_activeBindings.get(i))
  731. {
  732. ANKI_ASSERT(m_bindingSet.get(i) && "Forgot to bind");
  733. ANKI_ASSERT(m_bindings[i].m_arraySize >= entry.m_bindingArraySize[i] && "Bound less");
  734. const Bool crntBindingDirty = m_dirtyBindings.get(i);
  735. m_dirtyBindings.unset(i);
  736. for(U arrIdx = 0; arrIdx < entry.m_bindingArraySize[i]; ++arrIdx)
  737. {
  738. ANKI_ASSERT(arrIdx < m_bindings[i].m_arraySize);
  739. if(arrIdx > 1)
  740. {
  741. ANKI_ASSERT(m_bindings[i].m_array[arrIdx].m_type == m_bindings[i].m_array[arrIdx - 1].m_type);
  742. }
  743. const AnyBinding& anyBinding =
  744. (m_bindings[i].m_arraySize == 1) ? m_bindings[i].m_single : m_bindings[i].m_array[arrIdx];
  745. ANKI_ASSERT(anyBinding.m_uuids[0] != 0 && "Forgot to bind");
  746. toHash[toHashCount++] = anyBinding.m_uuids[0];
  747. switch(entry.m_bindingType[i])
  748. {
  749. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  750. ANKI_ASSERT(anyBinding.m_type == DescriptorType::COMBINED_TEXTURE_SAMPLER
  751. && "Have bound the wrong type");
  752. toHash[toHashCount++] = anyBinding.m_uuids[1];
  753. toHash[toHashCount++] = U64(anyBinding.m_texAndSampler.m_layout);
  754. break;
  755. case DescriptorType::TEXTURE:
  756. ANKI_ASSERT(anyBinding.m_type == DescriptorType::TEXTURE && "Have bound the wrong type");
  757. toHash[toHashCount++] = U64(anyBinding.m_tex.m_layout);
  758. break;
  759. case DescriptorType::SAMPLER:
  760. ANKI_ASSERT(anyBinding.m_type == DescriptorType::SAMPLER && "Have bound the wrong type");
  761. break;
  762. case DescriptorType::UNIFORM_BUFFER:
  763. ANKI_ASSERT(anyBinding.m_type == DescriptorType::UNIFORM_BUFFER && "Have bound the wrong type");
  764. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  765. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  766. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  767. break;
  768. case DescriptorType::STORAGE_BUFFER:
  769. ANKI_ASSERT(anyBinding.m_type == DescriptorType::STORAGE_BUFFER && "Have bound the wrong type");
  770. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  771. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  772. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  773. break;
  774. case DescriptorType::READ_TEXTURE_BUFFER:
  775. ANKI_ASSERT(anyBinding.m_type == DescriptorType::READ_TEXTURE_BUFFER
  776. && "Have bound the wrong type");
  777. toHash[toHashCount++] = anyBinding.m_uuids[1];
  778. break;
  779. case DescriptorType::READ_WRITE_TEXTURE_BUFFER:
  780. ANKI_ASSERT(anyBinding.m_type == DescriptorType::READ_WRITE_TEXTURE_BUFFER
  781. && "Have bound the wrong type");
  782. toHash[toHashCount++] = anyBinding.m_uuids[1];
  783. break;
  784. case DescriptorType::IMAGE:
  785. ANKI_ASSERT(anyBinding.m_type == DescriptorType::IMAGE && "Have bound the wrong type");
  786. break;
  787. case DescriptorType::ACCELERATION_STRUCTURE:
  788. ANKI_ASSERT(anyBinding.m_type == DescriptorType::ACCELERATION_STRUCTURE
  789. && "Have bound the wrong type");
  790. break;
  791. default:
  792. ANKI_ASSERT(0);
  793. }
  794. }
  795. }
  796. }
  797. const U64 newHash = computeHash(&toHash[0], toHashCount * sizeof(U64));
  798. if(newHash != m_lastHash || dynamicOffsetsDirty || m_layoutDirty)
  799. {
  800. // DS needs rebind
  801. m_lastHash = newHash;
  802. hash = newHash;
  803. }
  804. else
  805. {
  806. // All clean, keep hash equal to 0
  807. }
  808. m_layoutDirty = false;
  809. }
  810. else
  811. {
  812. // Custom set
  813. if(!m_bindlessDSetDirty && !m_layoutDirty)
  814. {
  815. return;
  816. }
  817. bindlessDSet = true;
  818. hash = 1;
  819. m_bindlessDSetDirty = false;
  820. m_layoutDirty = false;
  821. }
  822. }
  823. DescriptorSetFactory::~DescriptorSetFactory()
  824. {
  825. }
  826. Error DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev, U32 bindlessTextureCount,
  827. U32 bindlessTextureBuffers)
  828. {
  829. m_alloc = alloc;
  830. m_dev = dev;
  831. m_bindless = m_alloc.newInstance<BindlessDescriptorSet>();
  832. ANKI_CHECK(m_bindless->init(alloc, dev, bindlessTextureCount, bindlessTextureBuffers));
  833. m_bindlessTextureCount = bindlessTextureCount;
  834. m_bindlessUniformTexelBufferCount = bindlessTextureBuffers;
  835. return Error::NONE;
  836. }
  837. void DescriptorSetFactory::destroy()
  838. {
  839. for(ThreadLocal* threadLocal : m_allThreadLocals)
  840. {
  841. for(DSAllocator* alloc : threadLocal->m_allocators)
  842. {
  843. m_alloc.deleteInstance(alloc);
  844. }
  845. threadLocal->m_allocators.destroy(m_alloc);
  846. m_alloc.deleteInstance(threadLocal);
  847. }
  848. m_allThreadLocals.destroy(m_alloc);
  849. for(DSLayoutCacheEntry* l : m_caches)
  850. {
  851. m_alloc.deleteInstance(l);
  852. }
  853. m_caches.destroy(m_alloc);
  854. if(m_bindless)
  855. {
  856. m_alloc.deleteInstance(m_bindless);
  857. }
  858. }
  859. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  860. {
  861. // Compute the hash for the layout
  862. Array<DescriptorBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> bindings;
  863. const U32 bindingCount = init.m_bindings.getSize();
  864. U64 hash;
  865. if(init.m_bindings.getSize() > 0)
  866. {
  867. memcpy(bindings.getBegin(), init.m_bindings.getBegin(), init.m_bindings.getSizeInBytes());
  868. std::sort(bindings.getBegin(), bindings.getBegin() + bindingCount,
  869. [](const DescriptorBinding& a, const DescriptorBinding& b) {
  870. return a.m_binding < b.m_binding;
  871. });
  872. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  873. ANKI_ASSERT(hash != 1);
  874. }
  875. else
  876. {
  877. hash = 1;
  878. }
  879. // Identify if the DS is the bindless one. It is if there is at least one binding that matches the criteria
  880. Bool isBindless = false;
  881. if(bindingCount > 0)
  882. {
  883. isBindless = true;
  884. for(U32 i = 0; i < bindingCount; ++i)
  885. {
  886. const DescriptorBinding& binding = bindings[i];
  887. if(binding.m_binding == 0 && binding.m_type == DescriptorType::TEXTURE
  888. && binding.m_arraySize == m_bindlessTextureCount)
  889. {
  890. // All good
  891. }
  892. else if(binding.m_binding == 1 && binding.m_type == DescriptorType::READ_TEXTURE_BUFFER
  893. && binding.m_arraySize == m_bindlessUniformTexelBufferCount)
  894. {
  895. // All good
  896. }
  897. else
  898. {
  899. isBindless = false;
  900. }
  901. }
  902. }
  903. // Find or create the cache entry
  904. if(isBindless)
  905. {
  906. layout.m_handle = m_bindless->getDescriptorSetLayout();
  907. layout.m_entry = nullptr;
  908. }
  909. else
  910. {
  911. LockGuard<SpinLock> lock(m_cachesMtx);
  912. DSLayoutCacheEntry* cache = nullptr;
  913. U count = 0;
  914. for(DSLayoutCacheEntry* it : m_caches)
  915. {
  916. if(it->m_hash == hash)
  917. {
  918. cache = it;
  919. break;
  920. }
  921. ++count;
  922. }
  923. if(cache == nullptr)
  924. {
  925. cache = m_alloc.newInstance<DSLayoutCacheEntry>(this, m_caches.getSize());
  926. ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
  927. m_caches.emplaceBack(m_alloc, cache);
  928. }
  929. // Set the layout
  930. layout.m_handle = cache->m_layoutHandle;
  931. layout.m_entry = cache;
  932. }
  933. return Error::NONE;
  934. }
  935. Error DescriptorSetFactory::newDescriptorSet(StackAllocator<U8>& tmpAlloc, DescriptorSetState& state,
  936. DescriptorSet& set, Bool& dirty,
  937. Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  938. U32& dynamicOffsetCount)
  939. {
  940. ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  941. U64 hash;
  942. Bool bindlessDSet;
  943. state.flush(hash, dynamicOffsets, dynamicOffsetCount, bindlessDSet);
  944. if(hash == 0)
  945. {
  946. dirty = false;
  947. return Error::NONE;
  948. }
  949. else
  950. {
  951. dirty = true;
  952. if(!bindlessDSet)
  953. {
  954. DescriptorSetLayout layout = state.m_layout;
  955. DSLayoutCacheEntry& entry = *layout.m_entry;
  956. // Get thread allocator
  957. DSAllocator* alloc;
  958. ANKI_CHECK(entry.getOrCreateDSAllocator(alloc));
  959. // Finally, allocate
  960. const DS* s;
  961. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpAlloc, s));
  962. set.m_handle = s->m_handle;
  963. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  964. }
  965. else
  966. {
  967. set = m_bindless->getDescriptorSet();
  968. }
  969. }
  970. return Error::NONE;
  971. }
  972. U32 DescriptorSetFactory::bindBindlessTexture(const VkImageView view, const VkImageLayout layout)
  973. {
  974. ANKI_ASSERT(m_bindless);
  975. return m_bindless->bindTexture(view, layout);
  976. }
  977. U32 DescriptorSetFactory::bindBindlessUniformTexelBuffer(const VkBufferView view)
  978. {
  979. ANKI_ASSERT(m_bindless);
  980. return m_bindless->bindUniformTexelBuffer(view);
  981. }
  982. void DescriptorSetFactory::unbindBindlessTexture(U32 idx)
  983. {
  984. ANKI_ASSERT(m_bindless);
  985. m_bindless->unbindTexture(idx);
  986. }
  987. void DescriptorSetFactory::unbindBindlessUniformTexelBuffer(U32 idx)
  988. {
  989. ANKI_ASSERT(m_bindless);
  990. m_bindless->unbindUniformTexelBuffer(idx);
  991. }
  992. } // end namespace anki