DescriptorSet.cpp 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134
  1. // Copyright (C) 2009-2023, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/DescriptorSet.h>
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/BufferImpl.h>
  8. #include <AnKi/Util/List.h>
  9. #include <AnKi/Util/HashMap.h>
  10. #include <AnKi/Util/Tracer.h>
  11. #include <algorithm>
  12. namespace anki {
  13. thread_local DescriptorSetFactory::ThreadLocal* DescriptorSetFactory::m_threadLocal = nullptr;
  14. /// Wraps a global descriptor set that is used to store bindless textures.
  15. class DescriptorSetFactory::BindlessDescriptorSet
  16. {
  17. public:
  18. ~BindlessDescriptorSet();
  19. Error init(const U32 bindlessTextureCount, U32 bindlessImageCount);
  20. /// Bind a sampled image.
  21. /// @note It's thread-safe.
  22. U32 bindTexture(const VkImageView view, const VkImageLayout layout);
  23. /// Bind a uniform texel buffer.
  24. /// @note It's thread-safe.
  25. U32 bindUniformTexelBuffer(VkBufferView view);
  26. /// @note It's thread-safe.
  27. void unbindTexture(U32 idx)
  28. {
  29. unbindCommon(idx, m_freeTexIndices, m_freeTexIndexCount);
  30. }
  31. /// @note It's thread-safe.
  32. void unbindUniformTexelBuffer(U32 idx)
  33. {
  34. unbindCommon(idx, m_freeTexelBufferIndices, m_freeTexelBufferIndexCount);
  35. }
  36. DescriptorSet getDescriptorSet() const
  37. {
  38. ANKI_ASSERT(m_dset);
  39. DescriptorSet out;
  40. out.m_handle = m_dset;
  41. return out;
  42. }
  43. VkDescriptorSetLayout getDescriptorSetLayout() const
  44. {
  45. ANKI_ASSERT(m_layout);
  46. return m_layout;
  47. }
  48. private:
  49. VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
  50. VkDescriptorPool m_dsPool = VK_NULL_HANDLE;
  51. VkDescriptorSet m_dset = VK_NULL_HANDLE;
  52. Mutex m_mtx;
  53. GrDynamicArray<U16> m_freeTexIndices;
  54. GrDynamicArray<U16> m_freeTexelBufferIndices;
  55. U16 m_freeTexIndexCount = kMaxU16;
  56. U16 m_freeTexelBufferIndexCount = kMaxU16;
  57. void unbindCommon(U32 idx, GrDynamicArray<U16>& freeIndices, U16& freeIndexCount);
  58. };
  59. /// Descriptor set internal class.
  60. class DS : public IntrusiveListEnabled<DS>
  61. {
  62. public:
  63. VkDescriptorSet m_handle = {};
  64. U64 m_lastFrameUsed = kMaxU64;
  65. U64 m_hash;
  66. };
  67. /// Per thread allocator.
  68. class DescriptorSetFactory::DSAllocator
  69. {
  70. public:
  71. DSAllocator(const DSAllocator&) = delete; // Non-copyable
  72. DSAllocator& operator=(const DSAllocator&) = delete; // Non-copyable
  73. DSAllocator(const DSLayoutCacheEntry* layout)
  74. : m_layoutEntry(layout)
  75. {
  76. ANKI_ASSERT(m_layoutEntry);
  77. }
  78. ~DSAllocator();
  79. Error init();
  80. Error createNewPool();
  81. Error getOrCreateSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, StackMemoryPool& tmpPool, const DS*& out)
  82. {
  83. out = tryFindSet(hash);
  84. if(out == nullptr)
  85. {
  86. ANKI_CHECK(newSet(hash, bindings, tmpPool, out));
  87. }
  88. return Error::kNone;
  89. }
  90. private:
  91. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  92. GrDynamicArray<VkDescriptorPool> m_pools;
  93. U32 m_lastPoolDSCount = 0;
  94. U32 m_lastPoolFreeDSCount = 0;
  95. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  96. GrHashMap<U64, DS*> m_hashmap;
  97. [[nodiscard]] const DS* tryFindSet(U64 hash);
  98. Error newSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, StackMemoryPool& tmpPool, const DS*& out);
  99. void writeSet(const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set, StackMemoryPool& tmpPool);
  100. };
  101. class alignas(ANKI_CACHE_LINE_SIZE) DescriptorSetFactory::ThreadLocal
  102. {
  103. public:
  104. GrDynamicArray<DSAllocator*> m_allocators;
  105. };
  106. /// Cache entry. It's built around a specific descriptor set layout.
  107. class DSLayoutCacheEntry
  108. {
  109. public:
  110. DescriptorSetFactory* m_factory;
  111. U64 m_hash = 0; ///< Layout hash.
  112. VkDescriptorSetLayout m_layoutHandle = {};
  113. BitSet<kMaxBindingsPerDescriptorSet, U32> m_activeBindings = {false};
  114. Array<U32, kMaxBindingsPerDescriptorSet> m_bindingArraySize = {};
  115. Array<DescriptorType, kMaxBindingsPerDescriptorSet> m_bindingType = {};
  116. U32 m_minBinding = kMaxU32;
  117. U32 m_maxBinding = 0;
  118. U32 m_index = 0; ///< Index in DescriptorSetFactory::m_caches
  119. // Cache the create info
  120. Array<VkDescriptorPoolSize, U(DescriptorType::kCount)> m_poolSizesCreateInf = {};
  121. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  122. DSLayoutCacheEntry(DescriptorSetFactory* factory, U32 index)
  123. : m_factory(factory)
  124. , m_index(index)
  125. {
  126. }
  127. ~DSLayoutCacheEntry();
  128. Error init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash);
  129. /// @note Thread-safe.
  130. Error getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc);
  131. };
  132. DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
  133. {
  134. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  135. ANKI_ASSERT(m_freeTexelBufferIndexCount == m_freeTexelBufferIndices.getSize() && "Forgot to unbind some texel buffers");
  136. if(m_dsPool)
  137. {
  138. vkDestroyDescriptorPool(getVkDevice(), m_dsPool, nullptr);
  139. m_dsPool = VK_NULL_HANDLE;
  140. m_dset = VK_NULL_HANDLE;
  141. }
  142. if(m_layout)
  143. {
  144. vkDestroyDescriptorSetLayout(getVkDevice(), m_layout, nullptr);
  145. m_layout = VK_NULL_HANDLE;
  146. }
  147. }
  148. Error DescriptorSetFactory::BindlessDescriptorSet::init(U32 bindlessTextureCount, U32 bindlessTextureBuffers)
  149. {
  150. // Create the layout
  151. {
  152. Array<VkDescriptorSetLayoutBinding, 2> bindings = {};
  153. bindings[0].binding = 0;
  154. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  155. bindings[0].descriptorCount = bindlessTextureCount;
  156. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  157. bindings[1].binding = 1;
  158. bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
  159. bindings[1].descriptorCount = bindlessTextureBuffers;
  160. bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  161. Array<VkDescriptorBindingFlagsEXT, 2> bindingFlags = {};
  162. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  163. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  164. bindingFlags[1] = bindingFlags[0];
  165. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  166. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  167. extraInfos.bindingCount = bindingFlags.getSize();
  168. extraInfos.pBindingFlags = &bindingFlags[0];
  169. VkDescriptorSetLayoutCreateInfo ci = {};
  170. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  171. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  172. ci.bindingCount = bindings.getSize();
  173. ci.pBindings = &bindings[0];
  174. ci.pNext = &extraInfos;
  175. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(getVkDevice(), &ci, nullptr, &m_layout));
  176. }
  177. // Create the pool
  178. {
  179. Array<VkDescriptorPoolSize, 2> sizes = {};
  180. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  181. sizes[0].descriptorCount = bindlessTextureCount;
  182. sizes[1].type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  183. sizes[1].descriptorCount = bindlessTextureBuffers;
  184. VkDescriptorPoolCreateInfo ci = {};
  185. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  186. ci.maxSets = 1;
  187. ci.poolSizeCount = sizes.getSize();
  188. ci.pPoolSizes = &sizes[0];
  189. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  190. ANKI_VK_CHECK(vkCreateDescriptorPool(getVkDevice(), &ci, nullptr, &m_dsPool));
  191. }
  192. // Create the descriptor set
  193. {
  194. VkDescriptorSetAllocateInfo ci = {};
  195. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  196. ci.descriptorPool = m_dsPool;
  197. ci.descriptorSetCount = 1;
  198. ci.pSetLayouts = &m_layout;
  199. ANKI_VK_CHECK(vkAllocateDescriptorSets(getVkDevice(), &ci, &m_dset));
  200. }
  201. // Init the free arrays
  202. {
  203. m_freeTexIndices.resize(bindlessTextureCount);
  204. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  205. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  206. {
  207. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  208. }
  209. m_freeTexelBufferIndices.resize(bindlessTextureBuffers);
  210. m_freeTexelBufferIndexCount = U16(m_freeTexelBufferIndices.getSize());
  211. for(U32 i = 0; i < m_freeTexelBufferIndices.getSize(); ++i)
  212. {
  213. m_freeTexelBufferIndices[i] = U16(m_freeTexelBufferIndices.getSize() - i - 1);
  214. }
  215. }
  216. return Error::kNone;
  217. }
  218. U32 DescriptorSetFactory::BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  219. {
  220. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  221. ANKI_ASSERT(view);
  222. LockGuard<Mutex> lock(m_mtx);
  223. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  224. // Pop the index
  225. --m_freeTexIndexCount;
  226. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  227. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  228. // Update the set
  229. VkDescriptorImageInfo imageInf = {};
  230. imageInf.imageView = view;
  231. imageInf.imageLayout = layout;
  232. VkWriteDescriptorSet write = {};
  233. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  234. write.pNext = nullptr;
  235. write.dstSet = m_dset;
  236. write.dstBinding = 0;
  237. write.descriptorCount = 1;
  238. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  239. write.dstArrayElement = idx;
  240. write.pImageInfo = &imageInf;
  241. vkUpdateDescriptorSets(getVkDevice(), 1, &write, 0, nullptr);
  242. return idx;
  243. }
  244. U32 DescriptorSetFactory::BindlessDescriptorSet::bindUniformTexelBuffer(VkBufferView view)
  245. {
  246. ANKI_ASSERT(view);
  247. LockGuard<Mutex> lock(m_mtx);
  248. ANKI_ASSERT(m_freeTexelBufferIndexCount > 0 && "Out of indices");
  249. // Pop the index
  250. --m_freeTexelBufferIndexCount;
  251. const U16 idx = m_freeTexelBufferIndices[m_freeTexelBufferIndexCount];
  252. ANKI_ASSERT(idx < m_freeTexelBufferIndices.getSize());
  253. // Update the set
  254. VkWriteDescriptorSet write = {};
  255. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  256. write.pNext = nullptr;
  257. write.dstSet = m_dset;
  258. write.dstBinding = 1;
  259. write.descriptorCount = 1;
  260. write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  261. write.dstArrayElement = idx;
  262. write.pTexelBufferView = &view;
  263. vkUpdateDescriptorSets(getVkDevice(), 1, &write, 0, nullptr);
  264. return idx;
  265. }
  266. void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, GrDynamicArray<U16>& freeIndices, U16& freeIndexCount)
  267. {
  268. LockGuard<Mutex> lock(m_mtx);
  269. ANKI_ASSERT(idx < freeIndices.getSize());
  270. ANKI_ASSERT(freeIndexCount < freeIndices.getSize());
  271. freeIndices[freeIndexCount] = U16(idx);
  272. ++freeIndexCount;
  273. // Sort the free indices to minimize fragmentation
  274. std::sort(&freeIndices[0], &freeIndices[0] + freeIndexCount, std::greater<U16>());
  275. // Make sure there are no duplicates
  276. for(U32 i = 1; i < freeIndexCount; ++i)
  277. {
  278. ANKI_ASSERT(freeIndices[i] != freeIndices[i - 1]);
  279. }
  280. }
  281. DescriptorSetFactory::DSAllocator::~DSAllocator()
  282. {
  283. while(!m_list.isEmpty())
  284. {
  285. DS* ds = &m_list.getFront();
  286. m_list.popFront();
  287. deleteInstance(GrMemoryPool::getSingleton(), ds);
  288. }
  289. for(VkDescriptorPool pool : m_pools)
  290. {
  291. vkDestroyDescriptorPool(getVkDevice(), pool, nullptr);
  292. }
  293. }
  294. Error DescriptorSetFactory::DSAllocator::init()
  295. {
  296. ANKI_CHECK(createNewPool());
  297. return Error::kNone;
  298. }
  299. Error DescriptorSetFactory::DSAllocator::createNewPool()
  300. {
  301. m_lastPoolDSCount = (m_lastPoolDSCount != 0) ? U32(F32(m_lastPoolDSCount) * kDescriptorPoolSizeScale) : kDescriptorPoolInitialSize;
  302. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  303. // Set the create info
  304. Array<VkDescriptorPoolSize, U(DescriptorType::kCount)> poolSizes;
  305. memcpy(&poolSizes[0], &m_layoutEntry->m_poolSizesCreateInf[0], sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  306. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  307. {
  308. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  309. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  310. }
  311. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  312. ci.pPoolSizes = &poolSizes[0];
  313. ci.maxSets = m_lastPoolDSCount;
  314. // Create
  315. VkDescriptorPool pool;
  316. ANKI_VK_CHECK(vkCreateDescriptorPool(getVkDevice(), &ci, nullptr, &pool));
  317. ANKI_TRACE_INC_COUNTER(VkDescriptorPoolCreate, 1);
  318. // Push back
  319. m_pools.resize(m_pools.getSize() + 1);
  320. m_pools[m_pools.getSize() - 1] = pool;
  321. return Error::kNone;
  322. }
  323. const DS* DescriptorSetFactory::DSAllocator::tryFindSet(U64 hash)
  324. {
  325. ANKI_ASSERT(hash > 0);
  326. auto it = m_hashmap.find(hash);
  327. if(it == m_hashmap.getEnd())
  328. {
  329. return nullptr;
  330. }
  331. else
  332. {
  333. DS* ds = *it;
  334. // Remove from the list and place at the end of the list
  335. m_list.erase(ds);
  336. m_list.pushBack(ds);
  337. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  338. return ds;
  339. }
  340. }
  341. Error DescriptorSetFactory::DSAllocator::newSet(U64 hash, const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings,
  342. StackMemoryPool& tmpPool, const DS*& out_)
  343. {
  344. DS* out = nullptr;
  345. // First try to see if there are unused to recycle
  346. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  347. auto it = m_list.getBegin();
  348. const auto end = m_list.getEnd();
  349. while(it != end)
  350. {
  351. DS* set = &(*it);
  352. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  353. if(frameDiff > kDescriptorBufferedFrameCount)
  354. {
  355. // Found something, recycle
  356. auto it2 = m_hashmap.find(set->m_hash);
  357. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  358. m_hashmap.erase(it2);
  359. m_list.erase(set);
  360. m_list.pushBack(set);
  361. m_hashmap.emplace(hash, set);
  362. out = set;
  363. break;
  364. }
  365. ++it;
  366. }
  367. if(out == nullptr)
  368. {
  369. // Need to allocate one
  370. if(m_lastPoolFreeDSCount == 0)
  371. {
  372. // Can't allocate one from the current pool, create new
  373. ANKI_CHECK(createNewPool());
  374. }
  375. --m_lastPoolFreeDSCount;
  376. VkDescriptorSetAllocateInfo ci = {};
  377. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  378. ci.descriptorPool = m_pools.getBack();
  379. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  380. ci.descriptorSetCount = 1;
  381. VkDescriptorSet handle;
  382. [[maybe_unused]] VkResult rez = vkAllocateDescriptorSets(getVkDevice(), &ci, &handle);
  383. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  384. ANKI_TRACE_INC_COUNTER(VkDescriptorSetCreate, 1);
  385. out = newInstance<DS>(GrMemoryPool::getSingleton());
  386. out->m_handle = handle;
  387. m_hashmap.emplace(hash, out);
  388. m_list.pushBack(out);
  389. }
  390. ANKI_ASSERT(out);
  391. out->m_lastFrameUsed = crntFrame;
  392. out->m_hash = hash;
  393. // Finally, write it
  394. writeSet(bindings, *out, tmpPool);
  395. out_ = out;
  396. return Error::kNone;
  397. }
  398. void DescriptorSetFactory::DSAllocator::writeSet(const Array<AnyBindingExtended, kMaxBindingsPerDescriptorSet>& bindings, const DS& set,
  399. StackMemoryPool& tmpPool)
  400. {
  401. DynamicArray<VkWriteDescriptorSet, MemoryPoolPtrWrapper<StackMemoryPool>> writeInfos(&tmpPool);
  402. DynamicArray<VkDescriptorImageInfo, MemoryPoolPtrWrapper<StackMemoryPool>> texInfos(&tmpPool);
  403. DynamicArray<VkDescriptorBufferInfo, MemoryPoolPtrWrapper<StackMemoryPool>> buffInfos(&tmpPool);
  404. DynamicArray<VkWriteDescriptorSetAccelerationStructureKHR, MemoryPoolPtrWrapper<StackMemoryPool>> asInfos(&tmpPool);
  405. DynamicArray<VkBufferView, MemoryPoolPtrWrapper<StackMemoryPool>> bufferViews(&tmpPool);
  406. // First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
  407. for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  408. {
  409. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  410. {
  411. for(U arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  412. {
  413. ANKI_ASSERT(bindings[bindingIdx].m_arraySize >= m_layoutEntry->m_bindingArraySize[bindingIdx]);
  414. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single : bindings[bindingIdx].m_array[arrIdx];
  415. switch(b.m_type)
  416. {
  417. case DescriptorType::kCombinedTextureSampler:
  418. {
  419. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  420. info.sampler = b.m_texAndSampler.m_samplerHandle;
  421. info.imageView = b.m_texAndSampler.m_imgViewHandle;
  422. info.imageLayout = b.m_texAndSampler.m_layout;
  423. break;
  424. }
  425. case DescriptorType::kTexture:
  426. {
  427. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  428. info.sampler = VK_NULL_HANDLE;
  429. info.imageView = b.m_tex.m_imgViewHandle;
  430. info.imageLayout = b.m_tex.m_layout;
  431. break;
  432. }
  433. case DescriptorType::kSampler:
  434. {
  435. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  436. info.sampler = b.m_sampler.m_samplerHandle;
  437. info.imageView = VK_NULL_HANDLE;
  438. info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  439. break;
  440. }
  441. case DescriptorType::kUniformBuffer:
  442. case DescriptorType::kStorageBuffer:
  443. {
  444. VkDescriptorBufferInfo& info = *buffInfos.emplaceBack();
  445. info.buffer = b.m_buff.m_buffHandle;
  446. info.offset = 0;
  447. info.range = (b.m_buff.m_range == kMaxPtrSize) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  448. break;
  449. }
  450. case DescriptorType::kReadTextureBuffer:
  451. case DescriptorType::kReadWriteTextureBuffer:
  452. {
  453. VkBufferView& view = *bufferViews.emplaceBack();
  454. view = b.m_textureBuffer.m_buffView;
  455. break;
  456. }
  457. case DescriptorType::kImage:
  458. {
  459. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  460. info.sampler = VK_NULL_HANDLE;
  461. info.imageView = b.m_image.m_imgViewHandle;
  462. info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  463. break;
  464. }
  465. case DescriptorType::kAccelerationStructure:
  466. {
  467. VkWriteDescriptorSetAccelerationStructureKHR& info = *asInfos.emplaceBack();
  468. info.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
  469. info.pNext = nullptr;
  470. info.accelerationStructureCount = 1;
  471. info.pAccelerationStructures = &b.m_accelerationStructure.m_accelerationStructureHandle;
  472. break;
  473. }
  474. default:
  475. ANKI_ASSERT(0);
  476. }
  477. }
  478. }
  479. }
  480. // Second pass: Populate the VkWriteDescriptorSet with VkDescriptorImageInfo and VkDescriptorBufferInfo
  481. U32 texCounter = 0;
  482. U32 buffCounter = 0;
  483. U32 asCounter = 0;
  484. U32 buffViewsCounter = 0;
  485. VkWriteDescriptorSet writeTemplate = {};
  486. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  487. writeTemplate.pNext = nullptr;
  488. writeTemplate.dstSet = set.m_handle;
  489. writeTemplate.descriptorCount = 1;
  490. for(U32 bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  491. {
  492. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  493. {
  494. for(U32 arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  495. {
  496. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single : bindings[bindingIdx].m_array[arrIdx];
  497. VkWriteDescriptorSet& writeInfo = *writeInfos.emplaceBack(writeTemplate);
  498. writeInfo.descriptorType = convertDescriptorType(b.m_type);
  499. writeInfo.dstArrayElement = arrIdx;
  500. writeInfo.dstBinding = bindingIdx;
  501. switch(b.m_type)
  502. {
  503. case DescriptorType::kCombinedTextureSampler:
  504. case DescriptorType::kTexture:
  505. case DescriptorType::kSampler:
  506. case DescriptorType::kImage:
  507. writeInfo.pImageInfo = &texInfos[texCounter++];
  508. break;
  509. case DescriptorType::kUniformBuffer:
  510. case DescriptorType::kStorageBuffer:
  511. writeInfo.pBufferInfo = &buffInfos[buffCounter++];
  512. break;
  513. case DescriptorType::kReadTextureBuffer:
  514. case DescriptorType::kReadWriteTextureBuffer:
  515. writeInfo.pTexelBufferView = &bufferViews[buffViewsCounter++];
  516. break;
  517. case DescriptorType::kAccelerationStructure:
  518. writeInfo.pNext = &asInfos[asCounter++];
  519. break;
  520. default:
  521. ANKI_ASSERT(0);
  522. }
  523. }
  524. }
  525. }
  526. // Write
  527. vkUpdateDescriptorSets(getVkDevice(), writeInfos.getSize(), (writeInfos.getSize() > 0) ? &writeInfos[0] : nullptr, 0, nullptr);
  528. }
  529. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  530. {
  531. if(m_layoutHandle)
  532. {
  533. vkDestroyDescriptorSetLayout(getVkDevice(), m_layoutHandle, nullptr);
  534. }
  535. }
  536. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash)
  537. {
  538. ANKI_ASSERT(bindings);
  539. ANKI_ASSERT(hash > 0);
  540. m_hash = hash;
  541. // Create the VK layout
  542. Array<VkDescriptorSetLayoutBinding, kMaxBindingsPerDescriptorSet> vkBindings;
  543. VkDescriptorSetLayoutCreateInfo ci = {};
  544. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  545. for(U i = 0; i < bindingCount; ++i)
  546. {
  547. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  548. const DescriptorBinding& ak = bindings[i];
  549. vk.binding = ak.m_binding;
  550. vk.descriptorCount = ak.m_arraySize;
  551. vk.descriptorType = convertDescriptorType(ak.m_type);
  552. vk.pImmutableSamplers = nullptr;
  553. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  554. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  555. m_activeBindings.set(ak.m_binding);
  556. m_bindingType[ak.m_binding] = ak.m_type;
  557. m_bindingArraySize[ak.m_binding] = ak.m_arraySize;
  558. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  559. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  560. }
  561. ci.bindingCount = bindingCount;
  562. ci.pBindings = &vkBindings[0];
  563. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(getVkDevice(), &ci, nullptr, &m_layoutHandle));
  564. // Create the pool info
  565. U32 poolSizeCount = 0;
  566. for(U i = 0; i < bindingCount; ++i)
  567. {
  568. U j;
  569. for(j = 0; j < poolSizeCount; ++j)
  570. {
  571. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  572. {
  573. m_poolSizesCreateInf[j].descriptorCount += bindings[i].m_arraySize;
  574. break;
  575. }
  576. }
  577. if(j == poolSizeCount)
  578. {
  579. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  580. m_poolSizesCreateInf[poolSizeCount].descriptorCount = bindings[i].m_arraySize;
  581. ++poolSizeCount;
  582. }
  583. }
  584. if(poolSizeCount == 0)
  585. {
  586. // If the poolSizeCount it means that the DS layout has 0 descriptors. Since the pool sizes can't be zero put
  587. // something in them
  588. m_poolSizesCreateInf[0].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  589. m_poolSizesCreateInf[0].descriptorCount = 1;
  590. ++poolSizeCount;
  591. }
  592. ANKI_ASSERT(poolSizeCount > 0);
  593. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  594. m_poolCreateInf.poolSizeCount = poolSizeCount;
  595. return Error::kNone;
  596. }
  597. Error DSLayoutCacheEntry::getOrCreateDSAllocator(DescriptorSetFactory::DSAllocator*& alloc)
  598. {
  599. alloc = nullptr;
  600. // Get or create thread-local
  601. DescriptorSetFactory::ThreadLocal* threadLocal = DescriptorSetFactory::m_threadLocal;
  602. if(threadLocal == nullptr) [[unlikely]]
  603. {
  604. threadLocal = newInstance<DescriptorSetFactory::ThreadLocal>(GrMemoryPool::getSingleton());
  605. DescriptorSetFactory::m_threadLocal = threadLocal;
  606. LockGuard<Mutex> lock(m_factory->m_allThreadLocalsMtx);
  607. m_factory->m_allThreadLocals.emplaceBack(threadLocal);
  608. }
  609. // Get or create the allocator
  610. if(m_index >= threadLocal->m_allocators.getSize()) [[unlikely]]
  611. {
  612. threadLocal->m_allocators.resize(m_index + 1, nullptr);
  613. alloc = newInstance<DescriptorSetFactory::DSAllocator>(GrMemoryPool::getSingleton(), this);
  614. ANKI_CHECK(alloc->init());
  615. threadLocal->m_allocators[m_index] = alloc;
  616. }
  617. else if(threadLocal->m_allocators[m_index] == nullptr) [[unlikely]]
  618. {
  619. alloc = newInstance<DescriptorSetFactory::DSAllocator>(GrMemoryPool::getSingleton(), this);
  620. ANKI_CHECK(alloc->init());
  621. threadLocal->m_allocators[m_index] = alloc;
  622. }
  623. else
  624. {
  625. alloc = threadLocal->m_allocators[m_index];
  626. }
  627. ANKI_ASSERT(alloc);
  628. return Error::kNone;
  629. }
  630. AnyBinding& DescriptorSetState::getBindingToPopulate(U32 bindingIdx, U32 arrayIdx)
  631. {
  632. ANKI_ASSERT(bindingIdx < kMaxBindingsPerDescriptorSet);
  633. AnyBindingExtended& extended = m_bindings[bindingIdx];
  634. AnyBinding* out;
  635. const Bool bindingIsSet = m_bindingSet.get(bindingIdx);
  636. m_bindingSet.set(bindingIdx);
  637. extended.m_arraySize = (!bindingIsSet) ? 0 : extended.m_arraySize;
  638. if(arrayIdx == 0 && extended.m_arraySize <= 1) [[likely]]
  639. {
  640. // Array idx is zero, most common case
  641. out = &extended.m_single;
  642. extended.m_arraySize = 1;
  643. }
  644. else if(arrayIdx < extended.m_arraySize)
  645. {
  646. // It's (or was) an array and there enough space in thar array
  647. out = &extended.m_array[arrayIdx];
  648. }
  649. else
  650. {
  651. // Need to grow
  652. const U32 newSize = max(extended.m_arraySize * 2, arrayIdx + 1);
  653. AnyBinding* newArr = newArray<AnyBinding>(*m_pool, newSize);
  654. if(extended.m_arraySize == 1)
  655. {
  656. newArr[0] = extended.m_single;
  657. }
  658. else if(extended.m_arraySize > 1)
  659. {
  660. // Copy old to new.
  661. memcpy(newArr, extended.m_array, sizeof(AnyBinding) * extended.m_arraySize);
  662. }
  663. // Zero the rest
  664. memset(newArr + extended.m_arraySize, 0, sizeof(AnyBinding) * (newSize - extended.m_arraySize));
  665. extended.m_arraySize = newSize;
  666. extended.m_array = newArr;
  667. // Return
  668. out = &extended.m_array[arrayIdx];
  669. }
  670. ANKI_ASSERT(out);
  671. return *out;
  672. }
  673. void DescriptorSetState::flush(U64& hash, Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets, U32& dynamicOffsetCount, Bool& bindlessDSet)
  674. {
  675. // Set some values
  676. hash = 0;
  677. dynamicOffsetCount = 0;
  678. bindlessDSet = false;
  679. // There is a chance where the bindless set is bound but the actual shaders have an empty DS layout (maybe because
  680. // the dead code elimination eliminated the bindless set). In that case we can't bind the bindless DS. We have to
  681. // treat it as regular set
  682. ANKI_ASSERT(!(m_layout.m_entry == nullptr && !m_bindlessDSetBound) && "DS layout points to bindless but no bindless is bound");
  683. const Bool reallyBindless = m_bindlessDSetBound && m_layout.m_entry == nullptr;
  684. if(!reallyBindless)
  685. {
  686. // Get cache entry
  687. ANKI_ASSERT(m_layout.m_entry);
  688. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  689. // Early out if nothing happened
  690. const Bool anyActiveBindingDirty = !!(entry.m_activeBindings & m_dirtyBindings);
  691. if(!anyActiveBindingDirty && !m_layoutDirty)
  692. {
  693. return;
  694. }
  695. Bool dynamicOffsetsDirty = false;
  696. // Compute the hash
  697. Array<U64, kMaxBindingsPerDescriptorSet * 2 * 2> toHash;
  698. U toHashCount = 0;
  699. const U minBinding = entry.m_minBinding;
  700. const U maxBinding = entry.m_maxBinding;
  701. for(U i = minBinding; i <= maxBinding; ++i)
  702. {
  703. if(entry.m_activeBindings.get(i))
  704. {
  705. ANKI_ASSERT(m_bindingSet.get(i) && "Forgot to bind");
  706. ANKI_ASSERT(m_bindings[i].m_arraySize >= entry.m_bindingArraySize[i] && "Bound less");
  707. const Bool crntBindingDirty = m_dirtyBindings.get(i);
  708. m_dirtyBindings.unset(i);
  709. for(U arrIdx = 0; arrIdx < entry.m_bindingArraySize[i]; ++arrIdx)
  710. {
  711. ANKI_ASSERT(arrIdx < m_bindings[i].m_arraySize);
  712. if(arrIdx > 1)
  713. {
  714. ANKI_ASSERT(m_bindings[i].m_array[arrIdx].m_type == m_bindings[i].m_array[arrIdx - 1].m_type);
  715. }
  716. const AnyBinding& anyBinding = (m_bindings[i].m_arraySize == 1) ? m_bindings[i].m_single : m_bindings[i].m_array[arrIdx];
  717. ANKI_ASSERT(anyBinding.m_uuids[0] != 0 && "Forgot to bind");
  718. toHash[toHashCount++] = anyBinding.m_uuids[0];
  719. switch(entry.m_bindingType[i])
  720. {
  721. case DescriptorType::kCombinedTextureSampler:
  722. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kCombinedTextureSampler && "Have bound the wrong type");
  723. toHash[toHashCount++] = anyBinding.m_uuids[1];
  724. toHash[toHashCount++] = U64(anyBinding.m_texAndSampler.m_layout);
  725. break;
  726. case DescriptorType::kTexture:
  727. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kTexture && "Have bound the wrong type");
  728. toHash[toHashCount++] = U64(anyBinding.m_tex.m_layout);
  729. break;
  730. case DescriptorType::kSampler:
  731. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kSampler && "Have bound the wrong type");
  732. break;
  733. case DescriptorType::kUniformBuffer:
  734. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kUniformBuffer && "Have bound the wrong type");
  735. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  736. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  737. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  738. break;
  739. case DescriptorType::kStorageBuffer:
  740. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kStorageBuffer && "Have bound the wrong type");
  741. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  742. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  743. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  744. break;
  745. case DescriptorType::kReadTextureBuffer:
  746. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kReadTextureBuffer && "Have bound the wrong type");
  747. toHash[toHashCount++] = anyBinding.m_uuids[1];
  748. break;
  749. case DescriptorType::kReadWriteTextureBuffer:
  750. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kReadWriteTextureBuffer && "Have bound the wrong type");
  751. toHash[toHashCount++] = anyBinding.m_uuids[1];
  752. break;
  753. case DescriptorType::kImage:
  754. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kImage && "Have bound the wrong type");
  755. break;
  756. case DescriptorType::kAccelerationStructure:
  757. ANKI_ASSERT(anyBinding.m_type == DescriptorType::kAccelerationStructure && "Have bound the wrong type");
  758. break;
  759. default:
  760. ANKI_ASSERT(0);
  761. }
  762. }
  763. }
  764. }
  765. const U64 newHash = computeHash(&toHash[0], toHashCount * sizeof(U64));
  766. if(newHash != m_lastHash || dynamicOffsetsDirty || m_layoutDirty)
  767. {
  768. // DS needs rebind
  769. m_lastHash = newHash;
  770. hash = newHash;
  771. }
  772. else
  773. {
  774. // All clean, keep hash equal to 0
  775. }
  776. m_layoutDirty = false;
  777. }
  778. else
  779. {
  780. // Custom set
  781. if(!m_bindlessDSetDirty && !m_layoutDirty)
  782. {
  783. return;
  784. }
  785. bindlessDSet = true;
  786. hash = 1;
  787. m_bindlessDSetDirty = false;
  788. m_layoutDirty = false;
  789. }
  790. }
  791. DescriptorSetFactory::~DescriptorSetFactory()
  792. {
  793. }
  794. Error DescriptorSetFactory::init(U32 bindlessTextureCount, U32 bindlessTextureBuffers)
  795. {
  796. m_bindless = newInstance<BindlessDescriptorSet>(GrMemoryPool::getSingleton());
  797. ANKI_CHECK(m_bindless->init(bindlessTextureCount, bindlessTextureBuffers));
  798. m_bindlessTextureCount = bindlessTextureCount;
  799. m_bindlessUniformTexelBufferCount = bindlessTextureBuffers;
  800. return Error::kNone;
  801. }
  802. void DescriptorSetFactory::destroy()
  803. {
  804. for(ThreadLocal* threadLocal : m_allThreadLocals)
  805. {
  806. for(DSAllocator* alloc : threadLocal->m_allocators)
  807. {
  808. deleteInstance(GrMemoryPool::getSingleton(), alloc);
  809. }
  810. deleteInstance(GrMemoryPool::getSingleton(), threadLocal);
  811. }
  812. m_allThreadLocals.destroy();
  813. for(DSLayoutCacheEntry* l : m_caches)
  814. {
  815. deleteInstance(GrMemoryPool::getSingleton(), l);
  816. }
  817. m_caches.destroy();
  818. if(m_bindless)
  819. {
  820. deleteInstance(GrMemoryPool::getSingleton(), m_bindless);
  821. }
  822. }
  823. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  824. {
  825. // Compute the hash for the layout
  826. Array<DescriptorBinding, kMaxBindingsPerDescriptorSet> bindings;
  827. const U32 bindingCount = init.m_bindings.getSize();
  828. U64 hash;
  829. if(init.m_bindings.getSize() > 0)
  830. {
  831. memcpy(bindings.getBegin(), init.m_bindings.getBegin(), init.m_bindings.getSizeInBytes());
  832. std::sort(bindings.getBegin(), bindings.getBegin() + bindingCount, [](const DescriptorBinding& a, const DescriptorBinding& b) {
  833. return a.m_binding < b.m_binding;
  834. });
  835. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  836. ANKI_ASSERT(hash != 1);
  837. }
  838. else
  839. {
  840. hash = 1;
  841. }
  842. // Identify if the DS is the bindless one. It is if there is at least one binding that matches the criteria
  843. Bool isBindless = false;
  844. if(bindingCount > 0)
  845. {
  846. isBindless = true;
  847. for(U32 i = 0; i < bindingCount; ++i)
  848. {
  849. const DescriptorBinding& binding = bindings[i];
  850. if(binding.m_binding == 0 && binding.m_type == DescriptorType::kTexture && binding.m_arraySize == m_bindlessTextureCount)
  851. {
  852. // All good
  853. }
  854. else if(binding.m_binding == 1 && binding.m_type == DescriptorType::kReadTextureBuffer
  855. && binding.m_arraySize == m_bindlessUniformTexelBufferCount)
  856. {
  857. // All good
  858. }
  859. else
  860. {
  861. isBindless = false;
  862. }
  863. }
  864. }
  865. // Find or create the cache entry
  866. if(isBindless)
  867. {
  868. layout.m_handle = m_bindless->getDescriptorSetLayout();
  869. layout.m_entry = nullptr;
  870. }
  871. else
  872. {
  873. LockGuard<SpinLock> lock(m_cachesMtx);
  874. DSLayoutCacheEntry* cache = nullptr;
  875. U count = 0;
  876. for(DSLayoutCacheEntry* it : m_caches)
  877. {
  878. if(it->m_hash == hash)
  879. {
  880. cache = it;
  881. break;
  882. }
  883. ++count;
  884. }
  885. if(cache == nullptr)
  886. {
  887. cache = newInstance<DSLayoutCacheEntry>(GrMemoryPool::getSingleton(), this, m_caches.getSize());
  888. ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
  889. m_caches.emplaceBack(cache);
  890. }
  891. // Set the layout
  892. layout.m_handle = cache->m_layoutHandle;
  893. layout.m_entry = cache;
  894. }
  895. return Error::kNone;
  896. }
  897. Error DescriptorSetFactory::newDescriptorSet(StackMemoryPool& tmpPool, DescriptorSetState& state, DescriptorSet& set, Bool& dirty,
  898. Array<PtrSize, kMaxBindingsPerDescriptorSet>& dynamicOffsets, U32& dynamicOffsetCount)
  899. {
  900. ANKI_TRACE_SCOPED_EVENT(VkDescriptorSetGetOrCreate);
  901. U64 hash;
  902. Bool bindlessDSet;
  903. state.flush(hash, dynamicOffsets, dynamicOffsetCount, bindlessDSet);
  904. if(hash == 0)
  905. {
  906. dirty = false;
  907. return Error::kNone;
  908. }
  909. else
  910. {
  911. dirty = true;
  912. if(!bindlessDSet)
  913. {
  914. DescriptorSetLayout layout = state.m_layout;
  915. DSLayoutCacheEntry& entry = *layout.m_entry;
  916. // Get thread allocator
  917. DSAllocator* alloc;
  918. ANKI_CHECK(entry.getOrCreateDSAllocator(alloc));
  919. // Finally, allocate
  920. const DS* s;
  921. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpPool, s));
  922. set.m_handle = s->m_handle;
  923. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  924. }
  925. else
  926. {
  927. set = m_bindless->getDescriptorSet();
  928. }
  929. }
  930. return Error::kNone;
  931. }
  932. U32 DescriptorSetFactory::bindBindlessTexture(const VkImageView view, const VkImageLayout layout)
  933. {
  934. ANKI_ASSERT(m_bindless);
  935. return m_bindless->bindTexture(view, layout);
  936. }
  937. U32 DescriptorSetFactory::bindBindlessUniformTexelBuffer(const VkBufferView view)
  938. {
  939. ANKI_ASSERT(m_bindless);
  940. return m_bindless->bindUniformTexelBuffer(view);
  941. }
  942. void DescriptorSetFactory::unbindBindlessTexture(U32 idx)
  943. {
  944. ANKI_ASSERT(m_bindless);
  945. m_bindless->unbindTexture(idx);
  946. }
  947. void DescriptorSetFactory::unbindBindlessUniformTexelBuffer(U32 idx)
  948. {
  949. ANKI_ASSERT(m_bindless);
  950. m_bindless->unbindUniformTexelBuffer(idx);
  951. }
  952. } // end namespace anki