DescriptorSet.cpp 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107
  1. // Copyright (C) 2009-2021, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/DescriptorSet.h>
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/BufferImpl.h>
  8. #include <AnKi/Util/List.h>
  9. #include <AnKi/Util/HashMap.h>
  10. #include <AnKi/Util/Tracer.h>
  11. #include <algorithm>
  12. namespace anki
  13. {
  14. /// Wraps a global descriptor set that is used to store bindless textures.
  15. class DescriptorSetFactory::BindlessDescriptorSet
  16. {
  17. public:
  18. ~BindlessDescriptorSet();
  19. Error init(const GrAllocator<U8>& alloc, VkDevice dev, const BindlessLimits& bindlessLimits);
  20. /// Bind a sampled image.
  21. /// @note It's thread-safe.
  22. U32 bindTexture(const VkImageView view, const VkImageLayout layout);
  23. /// Bind a storage image.
  24. /// @note It's thread-safe.
  25. U32 bindImage(const VkImageView view);
  26. /// @note It's thread-safe.
  27. void unbindTexture(U32 idx)
  28. {
  29. unbindCommon(idx, m_freeTexIndices, m_freeTexIndexCount);
  30. }
  31. /// @note It's thread-safe.
  32. void unbindImage(U32 idx)
  33. {
  34. unbindCommon(idx, m_freeImgIndices, m_freeImgIndexCount);
  35. }
  36. DescriptorSet getDescriptorSet() const
  37. {
  38. ANKI_ASSERT(m_dset);
  39. DescriptorSet out;
  40. out.m_handle = m_dset;
  41. return out;
  42. }
  43. VkDescriptorSetLayout getDescriptorSetLayout() const
  44. {
  45. ANKI_ASSERT(m_layout);
  46. return m_layout;
  47. }
  48. private:
  49. GrAllocator<U8> m_alloc;
  50. VkDevice m_dev = VK_NULL_HANDLE;
  51. VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
  52. VkDescriptorPool m_pool = VK_NULL_HANDLE;
  53. VkDescriptorSet m_dset = VK_NULL_HANDLE;
  54. Mutex m_mtx;
  55. DynamicArray<U16> m_freeTexIndices;
  56. DynamicArray<U16> m_freeImgIndices;
  57. U16 m_freeTexIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  58. U16 m_freeImgIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  59. void unbindCommon(U32 idx, DynamicArray<U16>& freeIndices, U16& freeIndexCount);
  60. };
  61. DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
  62. {
  63. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  64. ANKI_ASSERT(m_freeImgIndexCount == m_freeImgIndices.getSize() && "Forgot to unbind some images");
  65. if(m_pool)
  66. {
  67. vkDestroyDescriptorPool(m_dev, m_pool, nullptr);
  68. m_pool = VK_NULL_HANDLE;
  69. m_dset = VK_NULL_HANDLE;
  70. }
  71. if(m_layout)
  72. {
  73. vkDestroyDescriptorSetLayout(m_dev, m_layout, nullptr);
  74. m_layout = VK_NULL_HANDLE;
  75. }
  76. m_freeImgIndices.destroy(m_alloc);
  77. m_freeTexIndices.destroy(m_alloc);
  78. }
  79. Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& alloc, VkDevice dev,
  80. const BindlessLimits& bindlessLimits)
  81. {
  82. ANKI_ASSERT(dev);
  83. ANKI_ASSERT(bindlessLimits.m_bindlessTextureCount <= MAX_U16);
  84. ANKI_ASSERT(bindlessLimits.m_bindlessImageCount <= MAX_U16);
  85. m_alloc = alloc;
  86. m_dev = dev;
  87. // Create the layout
  88. {
  89. Array<VkDescriptorSetLayoutBinding, 2> bindings = {};
  90. bindings[0].binding = 0;
  91. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  92. bindings[0].descriptorCount = bindlessLimits.m_bindlessTextureCount;
  93. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  94. bindings[1].binding = 1;
  95. bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
  96. bindings[1].descriptorCount = bindlessLimits.m_bindlessImageCount;
  97. bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  98. Array<VkDescriptorBindingFlagsEXT, 2> bindingFlags = {};
  99. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT
  100. | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  101. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  102. bindingFlags[1] = bindingFlags[0];
  103. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  104. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  105. extraInfos.bindingCount = bindingFlags.getSize();
  106. extraInfos.pBindingFlags = &bindingFlags[0];
  107. VkDescriptorSetLayoutCreateInfo ci = {};
  108. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  109. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  110. ci.bindingCount = bindings.getSize();
  111. ci.pBindings = &bindings[0];
  112. ci.pNext = &extraInfos;
  113. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_dev, &ci, nullptr, &m_layout));
  114. }
  115. // Create the pool
  116. {
  117. Array<VkDescriptorPoolSize, 2> sizes = {};
  118. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  119. sizes[0].descriptorCount = bindlessLimits.m_bindlessTextureCount;
  120. sizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  121. sizes[1].descriptorCount = bindlessLimits.m_bindlessImageCount;
  122. VkDescriptorPoolCreateInfo ci = {};
  123. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  124. ci.maxSets = 1;
  125. ci.poolSizeCount = sizes.getSize();
  126. ci.pPoolSizes = &sizes[0];
  127. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  128. ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_pool));
  129. }
  130. // Create the descriptor set
  131. {
  132. VkDescriptorSetAllocateInfo ci = {};
  133. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  134. ci.descriptorPool = m_pool;
  135. ci.descriptorSetCount = 1;
  136. ci.pSetLayouts = &m_layout;
  137. ANKI_VK_CHECK(vkAllocateDescriptorSets(m_dev, &ci, &m_dset));
  138. }
  139. // Init the free arrays
  140. {
  141. m_freeTexIndices.create(m_alloc, bindlessLimits.m_bindlessTextureCount);
  142. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  143. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  144. {
  145. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  146. }
  147. m_freeImgIndices.create(m_alloc, bindlessLimits.m_bindlessImageCount);
  148. m_freeImgIndexCount = U16(m_freeImgIndices.getSize());
  149. for(U32 i = 0; i < m_freeImgIndices.getSize(); ++i)
  150. {
  151. m_freeImgIndices[i] = U16(m_freeImgIndices.getSize() - i - 1);
  152. }
  153. }
  154. return Error::NONE;
  155. }
  156. U32 DescriptorSetFactory::BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  157. {
  158. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  159. ANKI_ASSERT(view);
  160. LockGuard<Mutex> lock(m_mtx);
  161. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  162. // Get the index
  163. --m_freeTexIndexCount;
  164. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  165. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  166. // Update the set
  167. VkDescriptorImageInfo imageInf = {};
  168. imageInf.imageView = view;
  169. imageInf.imageLayout = layout;
  170. VkWriteDescriptorSet write = {};
  171. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  172. write.pNext = nullptr;
  173. write.dstSet = m_dset;
  174. write.dstBinding = 0;
  175. write.descriptorCount = 1;
  176. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  177. write.dstArrayElement = idx;
  178. write.pImageInfo = &imageInf;
  179. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  180. return idx;
  181. }
  182. U32 DescriptorSetFactory::BindlessDescriptorSet::bindImage(const VkImageView view)
  183. {
  184. ANKI_ASSERT(view);
  185. LockGuard<Mutex> lock(m_mtx);
  186. ANKI_ASSERT(m_freeImgIndexCount > 0 && "Out of indices");
  187. // Get the index
  188. --m_freeImgIndexCount;
  189. const U32 idx = m_freeImgIndices[m_freeImgIndexCount];
  190. ANKI_ASSERT(idx < m_freeImgIndices.getSize());
  191. // Update the set
  192. VkDescriptorImageInfo imageInf = {};
  193. imageInf.imageView = view;
  194. imageInf.imageLayout = VK_IMAGE_LAYOUT_GENERAL; // Storage images are always in general.
  195. VkWriteDescriptorSet write = {};
  196. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  197. write.pNext = nullptr;
  198. write.dstSet = m_dset;
  199. write.dstBinding = 1;
  200. write.descriptorCount = 1;
  201. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  202. write.dstArrayElement = idx;
  203. write.pImageInfo = &imageInf;
  204. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  205. return idx;
  206. }
  207. void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, DynamicArray<U16>& freeIndices,
  208. U16& freeIndexCount)
  209. {
  210. ANKI_ASSERT(idx < freeIndices.getSize());
  211. LockGuard<Mutex> lock(m_mtx);
  212. ANKI_ASSERT(freeIndexCount < freeIndices.getSize());
  213. freeIndices[freeIndexCount] = U16(idx);
  214. ++freeIndexCount;
  215. // Sort the free indices to minimize fragmentation
  216. std::sort(&freeIndices[0], &freeIndices[0] + freeIndexCount, std::greater<U16>());
  217. // Make sure there are no duplicates
  218. for(U32 i = 1; i < freeIndexCount; ++i)
  219. {
  220. ANKI_ASSERT(freeIndices[i] != freeIndices[i - 1]);
  221. }
  222. }
  223. /// Descriptor set internal class.
  224. class DS : public IntrusiveListEnabled<DS>
  225. {
  226. public:
  227. VkDescriptorSet m_handle = {};
  228. U64 m_lastFrameUsed = MAX_U64;
  229. U64 m_hash;
  230. };
  231. /// Per thread allocator.
  232. class alignas(ANKI_CACHE_LINE_SIZE) DSThreadAllocator : public NonCopyable
  233. {
  234. public:
  235. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  236. ThreadId m_tid;
  237. DynamicArray<VkDescriptorPool> m_pools;
  238. U32 m_lastPoolDSCount = 0;
  239. U32 m_lastPoolFreeDSCount = 0;
  240. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  241. HashMap<U64, DS*> m_hashmap;
  242. DSThreadAllocator(const DSLayoutCacheEntry* layout, ThreadId tid)
  243. : m_layoutEntry(layout)
  244. , m_tid(tid)
  245. {
  246. ANKI_ASSERT(m_layoutEntry);
  247. }
  248. ~DSThreadAllocator();
  249. ANKI_USE_RESULT Error init();
  250. ANKI_USE_RESULT Error createNewPool();
  251. ANKI_USE_RESULT Error getOrCreateSet(U64 hash,
  252. const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  253. StackAllocator<U8>& tmpAlloc, const DS*& out)
  254. {
  255. out = tryFindSet(hash);
  256. if(out == nullptr)
  257. {
  258. ANKI_CHECK(newSet(hash, bindings, tmpAlloc, out));
  259. }
  260. return Error::NONE;
  261. }
  262. private:
  263. ANKI_USE_RESULT const DS* tryFindSet(U64 hash);
  264. ANKI_USE_RESULT Error newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  265. StackAllocator<U8>& tmpAlloc, const DS*& out);
  266. void writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  267. StackAllocator<U8>& tmpAlloc);
  268. };
  269. /// Cache entry. It's built around a specific descriptor set layout.
  270. class DSLayoutCacheEntry
  271. {
  272. public:
  273. DescriptorSetFactory* m_factory;
  274. U64 m_hash = 0; ///< Layout hash.
  275. VkDescriptorSetLayout m_layoutHandle = {};
  276. BitSet<MAX_BINDINGS_PER_DESCRIPTOR_SET, U32> m_activeBindings = {false};
  277. Array<U32, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingArraySize = {};
  278. Array<DescriptorType, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingType = {};
  279. U32 m_minBinding = MAX_U32;
  280. U32 m_maxBinding = 0;
  281. // Cache the create info
  282. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> m_poolSizesCreateInf = {};
  283. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  284. DynamicArray<DSThreadAllocator*> m_threadAllocs;
  285. RWMutex m_threadAllocsMtx;
  286. DSLayoutCacheEntry(DescriptorSetFactory* factory)
  287. : m_factory(factory)
  288. {
  289. }
  290. ~DSLayoutCacheEntry();
  291. ANKI_USE_RESULT Error init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash);
  292. /// @note Thread-safe.
  293. ANKI_USE_RESULT Error getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc);
  294. };
  295. DSThreadAllocator::~DSThreadAllocator()
  296. {
  297. auto alloc = m_layoutEntry->m_factory->m_alloc;
  298. while(!m_list.isEmpty())
  299. {
  300. DS* ds = &m_list.getFront();
  301. m_list.popFront();
  302. alloc.deleteInstance(ds);
  303. }
  304. for(VkDescriptorPool pool : m_pools)
  305. {
  306. vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
  307. }
  308. m_pools.destroy(alloc);
  309. m_hashmap.destroy(alloc);
  310. }
  311. Error DSThreadAllocator::init()
  312. {
  313. ANKI_CHECK(createNewPool());
  314. return Error::NONE;
  315. }
  316. Error DSThreadAllocator::createNewPool()
  317. {
  318. m_lastPoolDSCount = (m_lastPoolDSCount != 0) ? U32(F32(m_lastPoolDSCount) * DESCRIPTOR_POOL_SIZE_SCALE)
  319. : DESCRIPTOR_POOL_INITIAL_SIZE;
  320. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  321. // Set the create info
  322. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> poolSizes;
  323. memcpy(&poolSizes[0], &m_layoutEntry->m_poolSizesCreateInf[0],
  324. sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  325. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  326. {
  327. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  328. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  329. }
  330. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  331. ci.pPoolSizes = &poolSizes[0];
  332. ci.maxSets = m_lastPoolDSCount;
  333. // Create
  334. VkDescriptorPool pool;
  335. ANKI_VK_CHECK(vkCreateDescriptorPool(m_layoutEntry->m_factory->m_dev, &ci, nullptr, &pool));
  336. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
  337. // Push back
  338. m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
  339. m_pools[m_pools.getSize() - 1] = pool;
  340. return Error::NONE;
  341. }
  342. const DS* DSThreadAllocator::tryFindSet(U64 hash)
  343. {
  344. ANKI_ASSERT(hash > 0);
  345. auto it = m_hashmap.find(hash);
  346. if(it == m_hashmap.getEnd())
  347. {
  348. return nullptr;
  349. }
  350. else
  351. {
  352. DS* ds = *it;
  353. // Remove from the list and place at the end of the list
  354. m_list.erase(ds);
  355. m_list.pushBack(ds);
  356. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  357. return ds;
  358. }
  359. }
  360. Error DSThreadAllocator::newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  361. StackAllocator<U8>& tmpAlloc, const DS*& out_)
  362. {
  363. DS* out = nullptr;
  364. // First try to see if there are unused to recycle
  365. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  366. auto it = m_list.getBegin();
  367. const auto end = m_list.getEnd();
  368. while(it != end)
  369. {
  370. DS* set = &(*it);
  371. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  372. if(frameDiff > DESCRIPTOR_FRAME_BUFFERING)
  373. {
  374. // Found something, recycle
  375. auto it2 = m_hashmap.find(set->m_hash);
  376. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  377. m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
  378. m_list.erase(set);
  379. m_list.pushBack(set);
  380. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
  381. out = set;
  382. break;
  383. }
  384. ++it;
  385. }
  386. if(out == nullptr)
  387. {
  388. // Need to allocate one
  389. if(m_lastPoolFreeDSCount == 0)
  390. {
  391. // Can't allocate one from the current pool, create new
  392. ANKI_CHECK(createNewPool());
  393. }
  394. --m_lastPoolFreeDSCount;
  395. VkDescriptorSetAllocateInfo ci = {};
  396. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  397. ci.descriptorPool = m_pools.getBack();
  398. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  399. ci.descriptorSetCount = 1;
  400. VkDescriptorSet handle;
  401. VkResult rez = vkAllocateDescriptorSets(m_layoutEntry->m_factory->m_dev, &ci, &handle);
  402. (void)rez;
  403. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  404. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
  405. out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
  406. out->m_handle = handle;
  407. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
  408. m_list.pushBack(out);
  409. }
  410. ANKI_ASSERT(out);
  411. out->m_lastFrameUsed = crntFrame;
  412. out->m_hash = hash;
  413. // Finally, write it
  414. writeSet(bindings, *out, tmpAlloc);
  415. out_ = out;
  416. return Error::NONE;
  417. }
  418. void DSThreadAllocator::writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  419. const DS& set, StackAllocator<U8>& tmpAlloc)
  420. {
  421. DynamicArrayAuto<VkWriteDescriptorSet> writeInfos(tmpAlloc);
  422. DynamicArrayAuto<VkDescriptorImageInfo> texInfos(tmpAlloc);
  423. DynamicArrayAuto<VkDescriptorBufferInfo> buffInfos(tmpAlloc);
  424. DynamicArrayAuto<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(tmpAlloc);
  425. // First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
  426. for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  427. {
  428. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  429. {
  430. for(U arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  431. {
  432. ANKI_ASSERT(bindings[bindingIdx].m_arraySize >= m_layoutEntry->m_bindingArraySize[bindingIdx]);
  433. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  434. : bindings[bindingIdx].m_array[arrIdx];
  435. switch(b.m_type)
  436. {
  437. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  438. {
  439. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  440. info.sampler = b.m_texAndSampler.m_samplerHandle;
  441. info.imageView = b.m_texAndSampler.m_imgViewHandle;
  442. info.imageLayout = b.m_texAndSampler.m_layout;
  443. break;
  444. }
  445. case DescriptorType::TEXTURE:
  446. {
  447. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  448. info.sampler = VK_NULL_HANDLE;
  449. info.imageView = b.m_tex.m_imgViewHandle;
  450. info.imageLayout = b.m_tex.m_layout;
  451. break;
  452. }
  453. case DescriptorType::SAMPLER:
  454. {
  455. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  456. info.sampler = b.m_sampler.m_samplerHandle;
  457. info.imageView = VK_NULL_HANDLE;
  458. info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  459. break;
  460. }
  461. case DescriptorType::UNIFORM_BUFFER:
  462. case DescriptorType::STORAGE_BUFFER:
  463. {
  464. VkDescriptorBufferInfo& info = *buffInfos.emplaceBack();
  465. info.buffer = b.m_buff.m_buffHandle;
  466. info.offset = 0;
  467. info.range = (b.m_buff.m_range == MAX_PTR_SIZE) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  468. break;
  469. }
  470. case DescriptorType::IMAGE:
  471. {
  472. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  473. info.sampler = VK_NULL_HANDLE;
  474. info.imageView = b.m_image.m_imgViewHandle;
  475. info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  476. break;
  477. }
  478. case DescriptorType::ACCELERATION_STRUCTURE:
  479. {
  480. VkWriteDescriptorSetAccelerationStructureKHR& info = *asInfos.emplaceBack();
  481. info.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
  482. info.pNext = nullptr;
  483. info.accelerationStructureCount = 1;
  484. info.pAccelerationStructures = &b.m_accelerationStructure.m_accelerationStructureHandle;
  485. break;
  486. }
  487. default:
  488. ANKI_ASSERT(0);
  489. }
  490. }
  491. }
  492. }
  493. // Second pass: Populate the VkWriteDescriptorSet with VkDescriptorImageInfo and VkDescriptorBufferInfo
  494. U32 texCounter = 0;
  495. U32 buffCounter = 0;
  496. U32 asCounter = 0;
  497. VkWriteDescriptorSet writeTemplate{};
  498. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  499. writeTemplate.pNext = nullptr;
  500. writeTemplate.dstSet = set.m_handle;
  501. writeTemplate.descriptorCount = 1;
  502. for(U32 bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  503. {
  504. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  505. {
  506. for(U32 arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  507. {
  508. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  509. : bindings[bindingIdx].m_array[arrIdx];
  510. VkWriteDescriptorSet& writeInfo = *writeInfos.emplaceBack(writeTemplate);
  511. writeInfo.descriptorType = convertDescriptorType(b.m_type);
  512. writeInfo.dstArrayElement = arrIdx;
  513. writeInfo.dstBinding = bindingIdx;
  514. switch(b.m_type)
  515. {
  516. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  517. case DescriptorType::TEXTURE:
  518. case DescriptorType::SAMPLER:
  519. case DescriptorType::IMAGE:
  520. writeInfo.pImageInfo = &texInfos[texCounter++];
  521. break;
  522. case DescriptorType::UNIFORM_BUFFER:
  523. case DescriptorType::STORAGE_BUFFER:
  524. writeInfo.pBufferInfo = &buffInfos[buffCounter++];
  525. break;
  526. case DescriptorType::ACCELERATION_STRUCTURE:
  527. writeInfo.pNext = &asInfos[asCounter++];
  528. break;
  529. default:
  530. ANKI_ASSERT(0);
  531. }
  532. }
  533. }
  534. }
  535. // Write
  536. vkUpdateDescriptorSets(m_layoutEntry->m_factory->m_dev, writeInfos.getSize(),
  537. (writeInfos.getSize() > 0) ? &writeInfos[0] : nullptr, 0, nullptr);
  538. }
  539. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  540. {
  541. auto alloc = m_factory->m_alloc;
  542. for(DSThreadAllocator* a : m_threadAllocs)
  543. {
  544. alloc.deleteInstance(a);
  545. }
  546. m_threadAllocs.destroy(alloc);
  547. if(m_layoutHandle)
  548. {
  549. vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
  550. }
  551. }
  552. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash)
  553. {
  554. ANKI_ASSERT(bindings);
  555. ANKI_ASSERT(hash > 0);
  556. m_hash = hash;
  557. // Create the VK layout
  558. Array<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> vkBindings;
  559. VkDescriptorSetLayoutCreateInfo ci = {};
  560. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  561. for(U i = 0; i < bindingCount; ++i)
  562. {
  563. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  564. const DescriptorBinding& ak = bindings[i];
  565. vk.binding = ak.m_binding;
  566. vk.descriptorCount = ak.m_arraySizeMinusOne + 1;
  567. vk.descriptorType = convertDescriptorType(ak.m_type);
  568. vk.pImmutableSamplers = nullptr;
  569. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  570. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  571. m_activeBindings.set(ak.m_binding);
  572. m_bindingType[ak.m_binding] = ak.m_type;
  573. m_bindingArraySize[ak.m_binding] = ak.m_arraySizeMinusOne + 1;
  574. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  575. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  576. }
  577. ci.bindingCount = bindingCount;
  578. ci.pBindings = &vkBindings[0];
  579. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_factory->m_dev, &ci, nullptr, &m_layoutHandle));
  580. // Create the pool info
  581. U32 poolSizeCount = 0;
  582. for(U i = 0; i < bindingCount; ++i)
  583. {
  584. U j;
  585. for(j = 0; j < poolSizeCount; ++j)
  586. {
  587. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  588. {
  589. m_poolSizesCreateInf[j].descriptorCount += bindings[i].m_arraySizeMinusOne + 1;
  590. break;
  591. }
  592. }
  593. if(j == poolSizeCount)
  594. {
  595. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  596. m_poolSizesCreateInf[poolSizeCount].descriptorCount = bindings[i].m_arraySizeMinusOne + 1;
  597. ++poolSizeCount;
  598. }
  599. }
  600. if(poolSizeCount == 0)
  601. {
  602. // If the poolSizeCount it means that the DS layout has 0 descriptors. Since the pool sizes can't be zero put
  603. // something in them
  604. m_poolSizesCreateInf[0].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  605. m_poolSizesCreateInf[0].descriptorCount = 1;
  606. ++poolSizeCount;
  607. }
  608. ANKI_ASSERT(poolSizeCount > 0);
  609. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  610. m_poolCreateInf.poolSizeCount = poolSizeCount;
  611. return Error::NONE;
  612. }
  613. Error DSLayoutCacheEntry::getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc)
  614. {
  615. alloc = nullptr;
  616. class Comp
  617. {
  618. public:
  619. Bool operator()(const DSThreadAllocator* a, ThreadId tid) const
  620. {
  621. return a->m_tid < tid;
  622. }
  623. Bool operator()(ThreadId tid, const DSThreadAllocator* a) const
  624. {
  625. return tid < a->m_tid;
  626. }
  627. };
  628. // Find using binary search
  629. {
  630. RLockGuard<RWMutex> lock(m_threadAllocsMtx);
  631. auto it = binarySearch(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(), tid, Comp());
  632. alloc = (it != m_threadAllocs.getEnd()) ? *it : nullptr;
  633. }
  634. if(alloc == nullptr)
  635. {
  636. // Need to create one
  637. WLockGuard<RWMutex> lock(m_threadAllocsMtx);
  638. // Search again
  639. auto it = binarySearch(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(), tid, Comp());
  640. alloc = (it != m_threadAllocs.getEnd()) ? *it : nullptr;
  641. // Create
  642. if(alloc == nullptr)
  643. {
  644. alloc = m_factory->m_alloc.newInstance<DSThreadAllocator>(this, tid);
  645. ANKI_CHECK(alloc->init());
  646. m_threadAllocs.resize(m_factory->m_alloc, m_threadAllocs.getSize() + 1);
  647. m_threadAllocs[m_threadAllocs.getSize() - 1] = alloc;
  648. // Sort for fast find
  649. std::sort(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(),
  650. [](const DSThreadAllocator* a, const DSThreadAllocator* b) { return a->m_tid < b->m_tid; });
  651. }
  652. }
  653. ANKI_ASSERT(alloc);
  654. ANKI_ASSERT(alloc->m_tid == tid);
  655. return Error::NONE;
  656. }
  657. void DescriptorSetState::flush(U64& hash, Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  658. U32& dynamicOffsetCount, Bool& bindlessDSet)
  659. {
  660. // Set some values
  661. hash = 0;
  662. dynamicOffsetCount = 0;
  663. bindlessDSet = false;
  664. if(!m_bindlessDSetBound)
  665. {
  666. // Get cache entry
  667. ANKI_ASSERT(m_layout.m_entry);
  668. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  669. // Early out if nothing happened
  670. const Bool anyActiveBindingDirty = !!(entry.m_activeBindings & m_dirtyBindings);
  671. if(!anyActiveBindingDirty && !m_layoutDirty)
  672. {
  673. return;
  674. }
  675. Bool dynamicOffsetsDirty = false;
  676. // Compute the hash
  677. Array<U64, MAX_BINDINGS_PER_DESCRIPTOR_SET * 2 * 2> toHash;
  678. U toHashCount = 0;
  679. const U minBinding = entry.m_minBinding;
  680. const U maxBinding = entry.m_maxBinding;
  681. for(U i = minBinding; i <= maxBinding; ++i)
  682. {
  683. if(entry.m_activeBindings.get(i))
  684. {
  685. ANKI_ASSERT(m_bindingSet.get(i) && "Forgot to bind");
  686. ANKI_ASSERT(m_bindings[i].m_arraySize >= entry.m_bindingArraySize[i] && "Bound less");
  687. const Bool crntBindingDirty = m_dirtyBindings.get(i);
  688. m_dirtyBindings.unset(i);
  689. for(U arrIdx = 0; arrIdx < entry.m_bindingArraySize[i]; ++arrIdx)
  690. {
  691. ANKI_ASSERT(arrIdx < m_bindings[i].m_arraySize);
  692. if(arrIdx > 1)
  693. {
  694. ANKI_ASSERT(m_bindings[i].m_array[arrIdx].m_type == m_bindings[i].m_array[arrIdx - 1].m_type);
  695. }
  696. const AnyBinding& anyBinding =
  697. (m_bindings[i].m_arraySize == 1) ? m_bindings[i].m_single : m_bindings[i].m_array[arrIdx];
  698. ANKI_ASSERT(anyBinding.m_uuids[0] != 0 && "Forgot to bind");
  699. toHash[toHashCount++] = anyBinding.m_uuids[0];
  700. switch(entry.m_bindingType[i])
  701. {
  702. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  703. ANKI_ASSERT(anyBinding.m_type == DescriptorType::COMBINED_TEXTURE_SAMPLER
  704. && "Have bound the wrong type");
  705. toHash[toHashCount++] = anyBinding.m_uuids[1];
  706. toHash[toHashCount++] = U64(anyBinding.m_texAndSampler.m_layout);
  707. break;
  708. case DescriptorType::TEXTURE:
  709. ANKI_ASSERT(anyBinding.m_type == DescriptorType::TEXTURE && "Have bound the wrong type");
  710. toHash[toHashCount++] = U64(anyBinding.m_tex.m_layout);
  711. break;
  712. case DescriptorType::SAMPLER:
  713. ANKI_ASSERT(anyBinding.m_type == DescriptorType::SAMPLER && "Have bound the wrong type");
  714. break;
  715. case DescriptorType::UNIFORM_BUFFER:
  716. ANKI_ASSERT(anyBinding.m_type == DescriptorType::UNIFORM_BUFFER && "Have bound the wrong type");
  717. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  718. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  719. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  720. break;
  721. case DescriptorType::STORAGE_BUFFER:
  722. ANKI_ASSERT(anyBinding.m_type == DescriptorType::STORAGE_BUFFER && "Have bound the wrong type");
  723. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  724. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  725. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  726. break;
  727. case DescriptorType::IMAGE:
  728. ANKI_ASSERT(anyBinding.m_type == DescriptorType::IMAGE && "Have bound the wrong type");
  729. break;
  730. case DescriptorType::ACCELERATION_STRUCTURE:
  731. ANKI_ASSERT(anyBinding.m_type == DescriptorType::ACCELERATION_STRUCTURE
  732. && "Have bound the wrong type");
  733. break;
  734. default:
  735. ANKI_ASSERT(0);
  736. }
  737. }
  738. }
  739. }
  740. const U64 newHash = computeHash(&toHash[0], toHashCount * sizeof(U64));
  741. if(newHash != m_lastHash || dynamicOffsetsDirty || m_layoutDirty)
  742. {
  743. // DS needs rebind
  744. m_lastHash = newHash;
  745. hash = newHash;
  746. }
  747. else
  748. {
  749. // All clean, keep hash equal to 0
  750. }
  751. m_layoutDirty = false;
  752. }
  753. else
  754. {
  755. // Custom set
  756. if(!m_bindlessDSetDirty && !m_layoutDirty)
  757. {
  758. return;
  759. }
  760. bindlessDSet = true;
  761. hash = 1;
  762. m_bindlessDSetDirty = false;
  763. m_layoutDirty = false;
  764. }
  765. }
  766. DescriptorSetFactory::~DescriptorSetFactory()
  767. {
  768. }
  769. Error DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev, const BindlessLimits& bindlessLimits)
  770. {
  771. m_alloc = alloc;
  772. m_dev = dev;
  773. m_bindless = m_alloc.newInstance<BindlessDescriptorSet>();
  774. ANKI_CHECK(m_bindless->init(alloc, dev, bindlessLimits));
  775. m_bindlessLimits = bindlessLimits;
  776. return Error::NONE;
  777. }
  778. void DescriptorSetFactory::destroy()
  779. {
  780. for(DSLayoutCacheEntry* l : m_caches)
  781. {
  782. m_alloc.deleteInstance(l);
  783. }
  784. m_caches.destroy(m_alloc);
  785. if(m_bindless)
  786. {
  787. m_alloc.deleteInstance(m_bindless);
  788. }
  789. }
  790. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  791. {
  792. // Compute the hash for the layout
  793. Array<DescriptorBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> bindings;
  794. const U32 bindingCount = init.m_bindings.getSize();
  795. U64 hash;
  796. if(init.m_bindings.getSize() > 0)
  797. {
  798. memcpy(bindings.getBegin(), init.m_bindings.getBegin(), init.m_bindings.getSizeInBytes());
  799. std::sort(bindings.getBegin(), bindings.getBegin() + bindingCount,
  800. [](const DescriptorBinding& a, const DescriptorBinding& b) { return a.m_binding < b.m_binding; });
  801. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  802. ANKI_ASSERT(hash != 1);
  803. }
  804. else
  805. {
  806. hash = 1;
  807. }
  808. // Identify if the DS is the bindless one. It is if there is at least one binding that matches the criteria
  809. Bool isBindless = false;
  810. if(bindingCount > 0)
  811. {
  812. isBindless = true;
  813. for(U32 i = 0; i < bindingCount; ++i)
  814. {
  815. const DescriptorBinding& binding = bindings[i];
  816. if(binding.m_binding == 0 && binding.m_type == DescriptorType::TEXTURE
  817. && binding.m_arraySizeMinusOne == m_bindlessLimits.m_bindlessTextureCount - 1)
  818. {
  819. // All good
  820. }
  821. else if(binding.m_binding == 1 && binding.m_type == DescriptorType::IMAGE
  822. && binding.m_arraySizeMinusOne == m_bindlessLimits.m_bindlessImageCount - 1)
  823. {
  824. // All good
  825. }
  826. else
  827. {
  828. isBindless = false;
  829. }
  830. }
  831. }
  832. // Find or create the cache entry
  833. if(isBindless)
  834. {
  835. layout.m_handle = m_bindless->getDescriptorSetLayout();
  836. layout.m_entry = nullptr;
  837. }
  838. else
  839. {
  840. LockGuard<SpinLock> lock(m_cachesMtx);
  841. DSLayoutCacheEntry* cache = nullptr;
  842. U count = 0;
  843. for(DSLayoutCacheEntry* it : m_caches)
  844. {
  845. if(it->m_hash == hash)
  846. {
  847. cache = it;
  848. break;
  849. }
  850. ++count;
  851. }
  852. if(cache == nullptr)
  853. {
  854. cache = m_alloc.newInstance<DSLayoutCacheEntry>(this);
  855. ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
  856. m_caches.resize(m_alloc, m_caches.getSize() + 1);
  857. m_caches[m_caches.getSize() - 1] = cache;
  858. }
  859. // Set the layout
  860. layout.m_handle = cache->m_layoutHandle;
  861. layout.m_entry = cache;
  862. }
  863. return Error::NONE;
  864. }
  865. Error DescriptorSetFactory::newDescriptorSet(ThreadId tid, StackAllocator<U8>& tmpAlloc, DescriptorSetState& state,
  866. DescriptorSet& set, Bool& dirty,
  867. Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  868. U32& dynamicOffsetCount)
  869. {
  870. ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  871. U64 hash;
  872. Bool bindlessDSet;
  873. state.flush(hash, dynamicOffsets, dynamicOffsetCount, bindlessDSet);
  874. if(hash == 0)
  875. {
  876. dirty = false;
  877. return Error::NONE;
  878. }
  879. else
  880. {
  881. dirty = true;
  882. if(!bindlessDSet)
  883. {
  884. DescriptorSetLayout layout = state.m_layout;
  885. DSLayoutCacheEntry& entry = *layout.m_entry;
  886. // Get thread allocator
  887. DSThreadAllocator* alloc;
  888. ANKI_CHECK(entry.getOrCreateThreadAllocator(tid, alloc));
  889. // Finally, allocate
  890. const DS* s;
  891. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpAlloc, s));
  892. set.m_handle = s->m_handle;
  893. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  894. }
  895. else
  896. {
  897. set = m_bindless->getDescriptorSet();
  898. }
  899. }
  900. return Error::NONE;
  901. }
  902. U32 DescriptorSetFactory::bindBindlessTexture(const VkImageView view, const VkImageLayout layout)
  903. {
  904. ANKI_ASSERT(m_bindless);
  905. return m_bindless->bindTexture(view, layout);
  906. }
  907. U32 DescriptorSetFactory::bindBindlessImage(const VkImageView view)
  908. {
  909. ANKI_ASSERT(m_bindless);
  910. return m_bindless->bindImage(view);
  911. }
  912. void DescriptorSetFactory::unbindBindlessTexture(U32 idx)
  913. {
  914. ANKI_ASSERT(m_bindless);
  915. m_bindless->unbindTexture(idx);
  916. }
  917. void DescriptorSetFactory::unbindBindlessImage(U32 idx)
  918. {
  919. ANKI_ASSERT(m_bindless);
  920. m_bindless->unbindImage(idx);
  921. }
  922. } // end namespace anki