DescriptorSet.cpp 31 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114
  1. // Copyright (C) 2009-2022, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/DescriptorSet.h>
  6. #include <AnKi/Gr/Buffer.h>
  7. #include <AnKi/Gr/Vulkan/BufferImpl.h>
  8. #include <AnKi/Util/List.h>
  9. #include <AnKi/Util/HashMap.h>
  10. #include <AnKi/Util/Tracer.h>
  11. #include <algorithm>
  12. namespace anki {
  13. /// Wraps a global descriptor set that is used to store bindless textures.
  14. class DescriptorSetFactory::BindlessDescriptorSet
  15. {
  16. public:
  17. ~BindlessDescriptorSet();
  18. Error init(const GrAllocator<U8>& alloc, VkDevice dev, const U32 bindlessTextureCount, U32 bindlessImageCount);
  19. /// Bind a sampled image.
  20. /// @note It's thread-safe.
  21. U32 bindTexture(const VkImageView view, const VkImageLayout layout);
  22. /// Bind a storage image.
  23. /// @note It's thread-safe.
  24. U32 bindImage(const VkImageView view);
  25. /// @note It's thread-safe.
  26. void unbindTexture(U32 idx)
  27. {
  28. unbindCommon(idx, m_freeTexIndices, m_freeTexIndexCount);
  29. }
  30. /// @note It's thread-safe.
  31. void unbindImage(U32 idx)
  32. {
  33. unbindCommon(idx, m_freeImgIndices, m_freeImgIndexCount);
  34. }
  35. DescriptorSet getDescriptorSet() const
  36. {
  37. ANKI_ASSERT(m_dset);
  38. DescriptorSet out;
  39. out.m_handle = m_dset;
  40. return out;
  41. }
  42. VkDescriptorSetLayout getDescriptorSetLayout() const
  43. {
  44. ANKI_ASSERT(m_layout);
  45. return m_layout;
  46. }
  47. private:
  48. GrAllocator<U8> m_alloc;
  49. VkDevice m_dev = VK_NULL_HANDLE;
  50. VkDescriptorSetLayout m_layout = VK_NULL_HANDLE;
  51. VkDescriptorPool m_pool = VK_NULL_HANDLE;
  52. VkDescriptorSet m_dset = VK_NULL_HANDLE;
  53. Mutex m_mtx;
  54. DynamicArray<U16> m_freeTexIndices;
  55. DynamicArray<U16> m_freeImgIndices;
  56. U16 m_freeTexIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  57. U16 m_freeImgIndexCount ANKI_DEBUG_CODE(= MAX_U16);
  58. void unbindCommon(U32 idx, DynamicArray<U16>& freeIndices, U16& freeIndexCount);
  59. };
  60. DescriptorSetFactory::BindlessDescriptorSet::~BindlessDescriptorSet()
  61. {
  62. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  63. ANKI_ASSERT(m_freeImgIndexCount == m_freeImgIndices.getSize() && "Forgot to unbind some images");
  64. if(m_pool)
  65. {
  66. vkDestroyDescriptorPool(m_dev, m_pool, nullptr);
  67. m_pool = VK_NULL_HANDLE;
  68. m_dset = VK_NULL_HANDLE;
  69. }
  70. if(m_layout)
  71. {
  72. vkDestroyDescriptorSetLayout(m_dev, m_layout, nullptr);
  73. m_layout = VK_NULL_HANDLE;
  74. }
  75. m_freeImgIndices.destroy(m_alloc);
  76. m_freeTexIndices.destroy(m_alloc);
  77. }
  78. Error DescriptorSetFactory::BindlessDescriptorSet::init(const GrAllocator<U8>& alloc, VkDevice dev,
  79. U32 bindlessTextureCount, U32 bindlessImageCount)
  80. {
  81. ANKI_ASSERT(dev);
  82. m_alloc = alloc;
  83. m_dev = dev;
  84. // Create the layout
  85. {
  86. Array<VkDescriptorSetLayoutBinding, 2> bindings = {};
  87. bindings[0].binding = 0;
  88. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  89. bindings[0].descriptorCount = bindlessTextureCount;
  90. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  91. bindings[1].binding = 1;
  92. bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
  93. bindings[1].descriptorCount = bindlessImageCount;
  94. bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  95. Array<VkDescriptorBindingFlagsEXT, 2> bindingFlags = {};
  96. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT
  97. | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  98. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  99. bindingFlags[1] = bindingFlags[0];
  100. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  101. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  102. extraInfos.bindingCount = bindingFlags.getSize();
  103. extraInfos.pBindingFlags = &bindingFlags[0];
  104. VkDescriptorSetLayoutCreateInfo ci = {};
  105. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  106. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  107. ci.bindingCount = bindings.getSize();
  108. ci.pBindings = &bindings[0];
  109. ci.pNext = &extraInfos;
  110. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_dev, &ci, nullptr, &m_layout));
  111. }
  112. // Create the pool
  113. {
  114. Array<VkDescriptorPoolSize, 2> sizes = {};
  115. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  116. sizes[0].descriptorCount = bindlessTextureCount;
  117. sizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  118. sizes[1].descriptorCount = bindlessImageCount;
  119. VkDescriptorPoolCreateInfo ci = {};
  120. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  121. ci.maxSets = 1;
  122. ci.poolSizeCount = sizes.getSize();
  123. ci.pPoolSizes = &sizes[0];
  124. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  125. ANKI_VK_CHECK(vkCreateDescriptorPool(m_dev, &ci, nullptr, &m_pool));
  126. }
  127. // Create the descriptor set
  128. {
  129. VkDescriptorSetAllocateInfo ci = {};
  130. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  131. ci.descriptorPool = m_pool;
  132. ci.descriptorSetCount = 1;
  133. ci.pSetLayouts = &m_layout;
  134. ANKI_VK_CHECK(vkAllocateDescriptorSets(m_dev, &ci, &m_dset));
  135. }
  136. // Init the free arrays
  137. {
  138. m_freeTexIndices.create(m_alloc, bindlessTextureCount);
  139. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  140. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  141. {
  142. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  143. }
  144. m_freeImgIndices.create(m_alloc, bindlessImageCount);
  145. m_freeImgIndexCount = U16(m_freeImgIndices.getSize());
  146. for(U32 i = 0; i < m_freeImgIndices.getSize(); ++i)
  147. {
  148. m_freeImgIndices[i] = U16(m_freeImgIndices.getSize() - i - 1);
  149. }
  150. }
  151. return Error::NONE;
  152. }
  153. U32 DescriptorSetFactory::BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  154. {
  155. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  156. ANKI_ASSERT(view);
  157. LockGuard<Mutex> lock(m_mtx);
  158. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  159. // Get the index
  160. --m_freeTexIndexCount;
  161. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  162. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  163. // Update the set
  164. VkDescriptorImageInfo imageInf = {};
  165. imageInf.imageView = view;
  166. imageInf.imageLayout = layout;
  167. VkWriteDescriptorSet write = {};
  168. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  169. write.pNext = nullptr;
  170. write.dstSet = m_dset;
  171. write.dstBinding = 0;
  172. write.descriptorCount = 1;
  173. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  174. write.dstArrayElement = idx;
  175. write.pImageInfo = &imageInf;
  176. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  177. return idx;
  178. }
  179. U32 DescriptorSetFactory::BindlessDescriptorSet::bindImage(const VkImageView view)
  180. {
  181. ANKI_ASSERT(view);
  182. LockGuard<Mutex> lock(m_mtx);
  183. ANKI_ASSERT(m_freeImgIndexCount > 0 && "Out of indices");
  184. // Get the index
  185. --m_freeImgIndexCount;
  186. const U32 idx = m_freeImgIndices[m_freeImgIndexCount];
  187. ANKI_ASSERT(idx < m_freeImgIndices.getSize());
  188. // Update the set
  189. VkDescriptorImageInfo imageInf = {};
  190. imageInf.imageView = view;
  191. imageInf.imageLayout = VK_IMAGE_LAYOUT_GENERAL; // Storage images are always in general.
  192. VkWriteDescriptorSet write = {};
  193. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  194. write.pNext = nullptr;
  195. write.dstSet = m_dset;
  196. write.dstBinding = 1;
  197. write.descriptorCount = 1;
  198. write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  199. write.dstArrayElement = idx;
  200. write.pImageInfo = &imageInf;
  201. vkUpdateDescriptorSets(m_dev, 1, &write, 0, nullptr);
  202. return idx;
  203. }
  204. void DescriptorSetFactory::BindlessDescriptorSet::unbindCommon(U32 idx, DynamicArray<U16>& freeIndices,
  205. U16& freeIndexCount)
  206. {
  207. ANKI_ASSERT(idx < freeIndices.getSize());
  208. LockGuard<Mutex> lock(m_mtx);
  209. ANKI_ASSERT(freeIndexCount < freeIndices.getSize());
  210. freeIndices[freeIndexCount] = U16(idx);
  211. ++freeIndexCount;
  212. // Sort the free indices to minimize fragmentation
  213. std::sort(&freeIndices[0], &freeIndices[0] + freeIndexCount, std::greater<U16>());
  214. // Make sure there are no duplicates
  215. for(U32 i = 1; i < freeIndexCount; ++i)
  216. {
  217. ANKI_ASSERT(freeIndices[i] != freeIndices[i - 1]);
  218. }
  219. }
  220. /// Descriptor set internal class.
  221. class DS : public IntrusiveListEnabled<DS>
  222. {
  223. public:
  224. VkDescriptorSet m_handle = {};
  225. U64 m_lastFrameUsed = MAX_U64;
  226. U64 m_hash;
  227. };
  228. /// Per thread allocator.
  229. class alignas(ANKI_CACHE_LINE_SIZE) DSThreadAllocator
  230. {
  231. public:
  232. DSThreadAllocator(const DSThreadAllocator&) = delete; // Non-copyable
  233. DSThreadAllocator& operator=(const DSThreadAllocator&) = delete; // Non-copyable
  234. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  235. ThreadId m_tid;
  236. DynamicArray<VkDescriptorPool> m_pools;
  237. U32 m_lastPoolDSCount = 0;
  238. U32 m_lastPoolFreeDSCount = 0;
  239. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  240. HashMap<U64, DS*> m_hashmap;
  241. DSThreadAllocator(const DSLayoutCacheEntry* layout, ThreadId tid)
  242. : m_layoutEntry(layout)
  243. , m_tid(tid)
  244. {
  245. ANKI_ASSERT(m_layoutEntry);
  246. }
  247. ~DSThreadAllocator();
  248. ANKI_USE_RESULT Error init();
  249. ANKI_USE_RESULT Error createNewPool();
  250. ANKI_USE_RESULT Error getOrCreateSet(U64 hash,
  251. const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  252. StackAllocator<U8>& tmpAlloc, const DS*& out)
  253. {
  254. out = tryFindSet(hash);
  255. if(out == nullptr)
  256. {
  257. ANKI_CHECK(newSet(hash, bindings, tmpAlloc, out));
  258. }
  259. return Error::NONE;
  260. }
  261. private:
  262. ANKI_USE_RESULT const DS* tryFindSet(U64 hash);
  263. ANKI_USE_RESULT Error newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  264. StackAllocator<U8>& tmpAlloc, const DS*& out);
  265. void writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set,
  266. StackAllocator<U8>& tmpAlloc);
  267. };
  268. /// Cache entry. It's built around a specific descriptor set layout.
  269. class DSLayoutCacheEntry
  270. {
  271. public:
  272. DescriptorSetFactory* m_factory;
  273. U64 m_hash = 0; ///< Layout hash.
  274. VkDescriptorSetLayout m_layoutHandle = {};
  275. BitSet<MAX_BINDINGS_PER_DESCRIPTOR_SET, U32> m_activeBindings = {false};
  276. Array<U32, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingArraySize = {};
  277. Array<DescriptorType, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingType = {};
  278. U32 m_minBinding = MAX_U32;
  279. U32 m_maxBinding = 0;
  280. // Cache the create info
  281. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> m_poolSizesCreateInf = {};
  282. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  283. DynamicArray<DSThreadAllocator*> m_threadAllocs;
  284. RWMutex m_threadAllocsMtx;
  285. DSLayoutCacheEntry(DescriptorSetFactory* factory)
  286. : m_factory(factory)
  287. {
  288. }
  289. ~DSLayoutCacheEntry();
  290. ANKI_USE_RESULT Error init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash);
  291. /// @note Thread-safe.
  292. ANKI_USE_RESULT Error getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc);
  293. };
  294. DSThreadAllocator::~DSThreadAllocator()
  295. {
  296. auto alloc = m_layoutEntry->m_factory->m_alloc;
  297. while(!m_list.isEmpty())
  298. {
  299. DS* ds = &m_list.getFront();
  300. m_list.popFront();
  301. alloc.deleteInstance(ds);
  302. }
  303. for(VkDescriptorPool pool : m_pools)
  304. {
  305. vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
  306. }
  307. m_pools.destroy(alloc);
  308. m_hashmap.destroy(alloc);
  309. }
  310. Error DSThreadAllocator::init()
  311. {
  312. ANKI_CHECK(createNewPool());
  313. return Error::NONE;
  314. }
  315. Error DSThreadAllocator::createNewPool()
  316. {
  317. m_lastPoolDSCount = (m_lastPoolDSCount != 0) ? U32(F32(m_lastPoolDSCount) * DESCRIPTOR_POOL_SIZE_SCALE)
  318. : DESCRIPTOR_POOL_INITIAL_SIZE;
  319. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  320. // Set the create info
  321. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> poolSizes;
  322. memcpy(&poolSizes[0], &m_layoutEntry->m_poolSizesCreateInf[0],
  323. sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  324. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  325. {
  326. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  327. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  328. }
  329. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  330. ci.pPoolSizes = &poolSizes[0];
  331. ci.maxSets = m_lastPoolDSCount;
  332. // Create
  333. VkDescriptorPool pool;
  334. ANKI_VK_CHECK(vkCreateDescriptorPool(m_layoutEntry->m_factory->m_dev, &ci, nullptr, &pool));
  335. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
  336. // Push back
  337. m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
  338. m_pools[m_pools.getSize() - 1] = pool;
  339. return Error::NONE;
  340. }
  341. const DS* DSThreadAllocator::tryFindSet(U64 hash)
  342. {
  343. ANKI_ASSERT(hash > 0);
  344. auto it = m_hashmap.find(hash);
  345. if(it == m_hashmap.getEnd())
  346. {
  347. return nullptr;
  348. }
  349. else
  350. {
  351. DS* ds = *it;
  352. // Remove from the list and place at the end of the list
  353. m_list.erase(ds);
  354. m_list.pushBack(ds);
  355. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  356. return ds;
  357. }
  358. }
  359. Error DSThreadAllocator::newSet(U64 hash, const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  360. StackAllocator<U8>& tmpAlloc, const DS*& out_)
  361. {
  362. DS* out = nullptr;
  363. // First try to see if there are unused to recycle
  364. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  365. auto it = m_list.getBegin();
  366. const auto end = m_list.getEnd();
  367. while(it != end)
  368. {
  369. DS* set = &(*it);
  370. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  371. if(frameDiff > DESCRIPTOR_FRAME_BUFFERING)
  372. {
  373. // Found something, recycle
  374. auto it2 = m_hashmap.find(set->m_hash);
  375. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  376. m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
  377. m_list.erase(set);
  378. m_list.pushBack(set);
  379. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
  380. out = set;
  381. break;
  382. }
  383. ++it;
  384. }
  385. if(out == nullptr)
  386. {
  387. // Need to allocate one
  388. if(m_lastPoolFreeDSCount == 0)
  389. {
  390. // Can't allocate one from the current pool, create new
  391. ANKI_CHECK(createNewPool());
  392. }
  393. --m_lastPoolFreeDSCount;
  394. VkDescriptorSetAllocateInfo ci = {};
  395. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  396. ci.descriptorPool = m_pools.getBack();
  397. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  398. ci.descriptorSetCount = 1;
  399. VkDescriptorSet handle;
  400. VkResult rez = vkAllocateDescriptorSets(m_layoutEntry->m_factory->m_dev, &ci, &handle);
  401. (void)rez;
  402. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  403. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
  404. out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
  405. out->m_handle = handle;
  406. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
  407. m_list.pushBack(out);
  408. }
  409. ANKI_ASSERT(out);
  410. out->m_lastFrameUsed = crntFrame;
  411. out->m_hash = hash;
  412. // Finally, write it
  413. writeSet(bindings, *out, tmpAlloc);
  414. out_ = out;
  415. return Error::NONE;
  416. }
  417. void DSThreadAllocator::writeSet(const Array<AnyBindingExtended, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings,
  418. const DS& set, StackAllocator<U8>& tmpAlloc)
  419. {
  420. DynamicArrayAuto<VkWriteDescriptorSet> writeInfos(tmpAlloc);
  421. DynamicArrayAuto<VkDescriptorImageInfo> texInfos(tmpAlloc);
  422. DynamicArrayAuto<VkDescriptorBufferInfo> buffInfos(tmpAlloc);
  423. DynamicArrayAuto<VkWriteDescriptorSetAccelerationStructureKHR> asInfos(tmpAlloc);
  424. // First pass: Populate the VkDescriptorImageInfo and VkDescriptorBufferInfo
  425. for(U bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  426. {
  427. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  428. {
  429. for(U arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  430. {
  431. ANKI_ASSERT(bindings[bindingIdx].m_arraySize >= m_layoutEntry->m_bindingArraySize[bindingIdx]);
  432. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  433. : bindings[bindingIdx].m_array[arrIdx];
  434. switch(b.m_type)
  435. {
  436. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  437. {
  438. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  439. info.sampler = b.m_texAndSampler.m_samplerHandle;
  440. info.imageView = b.m_texAndSampler.m_imgViewHandle;
  441. info.imageLayout = b.m_texAndSampler.m_layout;
  442. break;
  443. }
  444. case DescriptorType::TEXTURE:
  445. {
  446. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  447. info.sampler = VK_NULL_HANDLE;
  448. info.imageView = b.m_tex.m_imgViewHandle;
  449. info.imageLayout = b.m_tex.m_layout;
  450. break;
  451. }
  452. case DescriptorType::SAMPLER:
  453. {
  454. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  455. info.sampler = b.m_sampler.m_samplerHandle;
  456. info.imageView = VK_NULL_HANDLE;
  457. info.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  458. break;
  459. }
  460. case DescriptorType::UNIFORM_BUFFER:
  461. case DescriptorType::STORAGE_BUFFER:
  462. {
  463. VkDescriptorBufferInfo& info = *buffInfos.emplaceBack();
  464. info.buffer = b.m_buff.m_buffHandle;
  465. info.offset = 0;
  466. info.range = (b.m_buff.m_range == MAX_PTR_SIZE) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  467. break;
  468. }
  469. case DescriptorType::IMAGE:
  470. {
  471. VkDescriptorImageInfo& info = *texInfos.emplaceBack();
  472. info.sampler = VK_NULL_HANDLE;
  473. info.imageView = b.m_image.m_imgViewHandle;
  474. info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  475. break;
  476. }
  477. case DescriptorType::ACCELERATION_STRUCTURE:
  478. {
  479. VkWriteDescriptorSetAccelerationStructureKHR& info = *asInfos.emplaceBack();
  480. info.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
  481. info.pNext = nullptr;
  482. info.accelerationStructureCount = 1;
  483. info.pAccelerationStructures = &b.m_accelerationStructure.m_accelerationStructureHandle;
  484. break;
  485. }
  486. default:
  487. ANKI_ASSERT(0);
  488. }
  489. }
  490. }
  491. }
  492. // Second pass: Populate the VkWriteDescriptorSet with VkDescriptorImageInfo and VkDescriptorBufferInfo
  493. U32 texCounter = 0;
  494. U32 buffCounter = 0;
  495. U32 asCounter = 0;
  496. VkWriteDescriptorSet writeTemplate{};
  497. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  498. writeTemplate.pNext = nullptr;
  499. writeTemplate.dstSet = set.m_handle;
  500. writeTemplate.descriptorCount = 1;
  501. for(U32 bindingIdx = m_layoutEntry->m_minBinding; bindingIdx <= m_layoutEntry->m_maxBinding; ++bindingIdx)
  502. {
  503. if(m_layoutEntry->m_activeBindings.get(bindingIdx))
  504. {
  505. for(U32 arrIdx = 0; arrIdx < m_layoutEntry->m_bindingArraySize[bindingIdx]; ++arrIdx)
  506. {
  507. const AnyBinding& b = (bindings[bindingIdx].m_arraySize == 1) ? bindings[bindingIdx].m_single
  508. : bindings[bindingIdx].m_array[arrIdx];
  509. VkWriteDescriptorSet& writeInfo = *writeInfos.emplaceBack(writeTemplate);
  510. writeInfo.descriptorType = convertDescriptorType(b.m_type);
  511. writeInfo.dstArrayElement = arrIdx;
  512. writeInfo.dstBinding = bindingIdx;
  513. switch(b.m_type)
  514. {
  515. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  516. case DescriptorType::TEXTURE:
  517. case DescriptorType::SAMPLER:
  518. case DescriptorType::IMAGE:
  519. writeInfo.pImageInfo = &texInfos[texCounter++];
  520. break;
  521. case DescriptorType::UNIFORM_BUFFER:
  522. case DescriptorType::STORAGE_BUFFER:
  523. writeInfo.pBufferInfo = &buffInfos[buffCounter++];
  524. break;
  525. case DescriptorType::ACCELERATION_STRUCTURE:
  526. writeInfo.pNext = &asInfos[asCounter++];
  527. break;
  528. default:
  529. ANKI_ASSERT(0);
  530. }
  531. }
  532. }
  533. }
  534. // Write
  535. vkUpdateDescriptorSets(m_layoutEntry->m_factory->m_dev, writeInfos.getSize(),
  536. (writeInfos.getSize() > 0) ? &writeInfos[0] : nullptr, 0, nullptr);
  537. }
  538. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  539. {
  540. auto alloc = m_factory->m_alloc;
  541. for(DSThreadAllocator* a : m_threadAllocs)
  542. {
  543. alloc.deleteInstance(a);
  544. }
  545. m_threadAllocs.destroy(alloc);
  546. if(m_layoutHandle)
  547. {
  548. vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
  549. }
  550. }
  551. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U32 bindingCount, U64 hash)
  552. {
  553. ANKI_ASSERT(bindings);
  554. ANKI_ASSERT(hash > 0);
  555. m_hash = hash;
  556. // Create the VK layout
  557. Array<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> vkBindings;
  558. VkDescriptorSetLayoutCreateInfo ci = {};
  559. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  560. for(U i = 0; i < bindingCount; ++i)
  561. {
  562. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  563. const DescriptorBinding& ak = bindings[i];
  564. vk.binding = ak.m_binding;
  565. vk.descriptorCount = ak.m_arraySize;
  566. vk.descriptorType = convertDescriptorType(ak.m_type);
  567. vk.pImmutableSamplers = nullptr;
  568. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  569. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  570. m_activeBindings.set(ak.m_binding);
  571. m_bindingType[ak.m_binding] = ak.m_type;
  572. m_bindingArraySize[ak.m_binding] = ak.m_arraySize;
  573. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  574. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  575. }
  576. ci.bindingCount = bindingCount;
  577. ci.pBindings = &vkBindings[0];
  578. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_factory->m_dev, &ci, nullptr, &m_layoutHandle));
  579. // Create the pool info
  580. U32 poolSizeCount = 0;
  581. for(U i = 0; i < bindingCount; ++i)
  582. {
  583. U j;
  584. for(j = 0; j < poolSizeCount; ++j)
  585. {
  586. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  587. {
  588. m_poolSizesCreateInf[j].descriptorCount += bindings[i].m_arraySize;
  589. break;
  590. }
  591. }
  592. if(j == poolSizeCount)
  593. {
  594. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  595. m_poolSizesCreateInf[poolSizeCount].descriptorCount = bindings[i].m_arraySize;
  596. ++poolSizeCount;
  597. }
  598. }
  599. if(poolSizeCount == 0)
  600. {
  601. // If the poolSizeCount it means that the DS layout has 0 descriptors. Since the pool sizes can't be zero put
  602. // something in them
  603. m_poolSizesCreateInf[0].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  604. m_poolSizesCreateInf[0].descriptorCount = 1;
  605. ++poolSizeCount;
  606. }
  607. ANKI_ASSERT(poolSizeCount > 0);
  608. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  609. m_poolCreateInf.poolSizeCount = poolSizeCount;
  610. return Error::NONE;
  611. }
  612. Error DSLayoutCacheEntry::getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc)
  613. {
  614. alloc = nullptr;
  615. class Comp
  616. {
  617. public:
  618. Bool operator()(const DSThreadAllocator* a, ThreadId tid) const
  619. {
  620. return a->m_tid < tid;
  621. }
  622. Bool operator()(ThreadId tid, const DSThreadAllocator* a) const
  623. {
  624. return tid < a->m_tid;
  625. }
  626. };
  627. // Find using binary search
  628. {
  629. RLockGuard<RWMutex> lock(m_threadAllocsMtx);
  630. auto it = binarySearch(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(), tid, Comp());
  631. alloc = (it != m_threadAllocs.getEnd()) ? *it : nullptr;
  632. }
  633. if(alloc == nullptr)
  634. {
  635. // Need to create one
  636. WLockGuard<RWMutex> lock(m_threadAllocsMtx);
  637. // Search again
  638. auto it = binarySearch(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(), tid, Comp());
  639. alloc = (it != m_threadAllocs.getEnd()) ? *it : nullptr;
  640. // Create
  641. if(alloc == nullptr)
  642. {
  643. alloc = m_factory->m_alloc.newInstance<DSThreadAllocator>(this, tid);
  644. ANKI_CHECK(alloc->init());
  645. m_threadAllocs.resize(m_factory->m_alloc, m_threadAllocs.getSize() + 1);
  646. m_threadAllocs[m_threadAllocs.getSize() - 1] = alloc;
  647. // Sort for fast find
  648. std::sort(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(),
  649. [](const DSThreadAllocator* a, const DSThreadAllocator* b) {
  650. return a->m_tid < b->m_tid;
  651. });
  652. }
  653. }
  654. ANKI_ASSERT(alloc);
  655. ANKI_ASSERT(alloc->m_tid == tid);
  656. return Error::NONE;
  657. }
  658. void DescriptorSetState::flush(U64& hash, Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  659. U32& dynamicOffsetCount, Bool& bindlessDSet)
  660. {
  661. // Set some values
  662. hash = 0;
  663. dynamicOffsetCount = 0;
  664. bindlessDSet = false;
  665. if(!m_bindlessDSetBound)
  666. {
  667. // Get cache entry
  668. ANKI_ASSERT(m_layout.m_entry);
  669. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  670. // Early out if nothing happened
  671. const Bool anyActiveBindingDirty = !!(entry.m_activeBindings & m_dirtyBindings);
  672. if(!anyActiveBindingDirty && !m_layoutDirty)
  673. {
  674. return;
  675. }
  676. Bool dynamicOffsetsDirty = false;
  677. // Compute the hash
  678. Array<U64, MAX_BINDINGS_PER_DESCRIPTOR_SET * 2 * 2> toHash;
  679. U toHashCount = 0;
  680. const U minBinding = entry.m_minBinding;
  681. const U maxBinding = entry.m_maxBinding;
  682. for(U i = minBinding; i <= maxBinding; ++i)
  683. {
  684. if(entry.m_activeBindings.get(i))
  685. {
  686. ANKI_ASSERT(m_bindingSet.get(i) && "Forgot to bind");
  687. ANKI_ASSERT(m_bindings[i].m_arraySize >= entry.m_bindingArraySize[i] && "Bound less");
  688. const Bool crntBindingDirty = m_dirtyBindings.get(i);
  689. m_dirtyBindings.unset(i);
  690. for(U arrIdx = 0; arrIdx < entry.m_bindingArraySize[i]; ++arrIdx)
  691. {
  692. ANKI_ASSERT(arrIdx < m_bindings[i].m_arraySize);
  693. if(arrIdx > 1)
  694. {
  695. ANKI_ASSERT(m_bindings[i].m_array[arrIdx].m_type == m_bindings[i].m_array[arrIdx - 1].m_type);
  696. }
  697. const AnyBinding& anyBinding =
  698. (m_bindings[i].m_arraySize == 1) ? m_bindings[i].m_single : m_bindings[i].m_array[arrIdx];
  699. ANKI_ASSERT(anyBinding.m_uuids[0] != 0 && "Forgot to bind");
  700. toHash[toHashCount++] = anyBinding.m_uuids[0];
  701. switch(entry.m_bindingType[i])
  702. {
  703. case DescriptorType::COMBINED_TEXTURE_SAMPLER:
  704. ANKI_ASSERT(anyBinding.m_type == DescriptorType::COMBINED_TEXTURE_SAMPLER
  705. && "Have bound the wrong type");
  706. toHash[toHashCount++] = anyBinding.m_uuids[1];
  707. toHash[toHashCount++] = U64(anyBinding.m_texAndSampler.m_layout);
  708. break;
  709. case DescriptorType::TEXTURE:
  710. ANKI_ASSERT(anyBinding.m_type == DescriptorType::TEXTURE && "Have bound the wrong type");
  711. toHash[toHashCount++] = U64(anyBinding.m_tex.m_layout);
  712. break;
  713. case DescriptorType::SAMPLER:
  714. ANKI_ASSERT(anyBinding.m_type == DescriptorType::SAMPLER && "Have bound the wrong type");
  715. break;
  716. case DescriptorType::UNIFORM_BUFFER:
  717. ANKI_ASSERT(anyBinding.m_type == DescriptorType::UNIFORM_BUFFER && "Have bound the wrong type");
  718. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  719. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  720. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  721. break;
  722. case DescriptorType::STORAGE_BUFFER:
  723. ANKI_ASSERT(anyBinding.m_type == DescriptorType::STORAGE_BUFFER && "Have bound the wrong type");
  724. toHash[toHashCount++] = anyBinding.m_buff.m_range;
  725. dynamicOffsets[dynamicOffsetCount++] = anyBinding.m_buff.m_offset;
  726. dynamicOffsetsDirty = dynamicOffsetsDirty || crntBindingDirty;
  727. break;
  728. case DescriptorType::IMAGE:
  729. ANKI_ASSERT(anyBinding.m_type == DescriptorType::IMAGE && "Have bound the wrong type");
  730. break;
  731. case DescriptorType::ACCELERATION_STRUCTURE:
  732. ANKI_ASSERT(anyBinding.m_type == DescriptorType::ACCELERATION_STRUCTURE
  733. && "Have bound the wrong type");
  734. break;
  735. default:
  736. ANKI_ASSERT(0);
  737. }
  738. }
  739. }
  740. }
  741. const U64 newHash = computeHash(&toHash[0], toHashCount * sizeof(U64));
  742. if(newHash != m_lastHash || dynamicOffsetsDirty || m_layoutDirty)
  743. {
  744. // DS needs rebind
  745. m_lastHash = newHash;
  746. hash = newHash;
  747. }
  748. else
  749. {
  750. // All clean, keep hash equal to 0
  751. }
  752. m_layoutDirty = false;
  753. }
  754. else
  755. {
  756. // Custom set
  757. if(!m_bindlessDSetDirty && !m_layoutDirty)
  758. {
  759. return;
  760. }
  761. bindlessDSet = true;
  762. hash = 1;
  763. m_bindlessDSetDirty = false;
  764. m_layoutDirty = false;
  765. }
  766. }
  767. DescriptorSetFactory::~DescriptorSetFactory()
  768. {
  769. }
  770. Error DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev, U32 bindlessTextureCount,
  771. U32 bindlessImageCount)
  772. {
  773. m_alloc = alloc;
  774. m_dev = dev;
  775. m_bindless = m_alloc.newInstance<BindlessDescriptorSet>();
  776. ANKI_CHECK(m_bindless->init(alloc, dev, bindlessTextureCount, bindlessImageCount));
  777. m_bindlessTextureCount = bindlessTextureCount;
  778. m_bindlessImageCount = bindlessImageCount;
  779. return Error::NONE;
  780. }
  781. void DescriptorSetFactory::destroy()
  782. {
  783. for(DSLayoutCacheEntry* l : m_caches)
  784. {
  785. m_alloc.deleteInstance(l);
  786. }
  787. m_caches.destroy(m_alloc);
  788. if(m_bindless)
  789. {
  790. m_alloc.deleteInstance(m_bindless);
  791. }
  792. }
  793. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  794. {
  795. // Compute the hash for the layout
  796. Array<DescriptorBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> bindings;
  797. const U32 bindingCount = init.m_bindings.getSize();
  798. U64 hash;
  799. if(init.m_bindings.getSize() > 0)
  800. {
  801. memcpy(bindings.getBegin(), init.m_bindings.getBegin(), init.m_bindings.getSizeInBytes());
  802. std::sort(bindings.getBegin(), bindings.getBegin() + bindingCount,
  803. [](const DescriptorBinding& a, const DescriptorBinding& b) {
  804. return a.m_binding < b.m_binding;
  805. });
  806. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  807. ANKI_ASSERT(hash != 1);
  808. }
  809. else
  810. {
  811. hash = 1;
  812. }
  813. // Identify if the DS is the bindless one. It is if there is at least one binding that matches the criteria
  814. Bool isBindless = false;
  815. if(bindingCount > 0)
  816. {
  817. isBindless = true;
  818. for(U32 i = 0; i < bindingCount; ++i)
  819. {
  820. const DescriptorBinding& binding = bindings[i];
  821. if(binding.m_binding == 0 && binding.m_type == DescriptorType::TEXTURE
  822. && binding.m_arraySize == m_bindlessTextureCount)
  823. {
  824. // All good
  825. }
  826. else if(binding.m_binding == 1 && binding.m_type == DescriptorType::IMAGE
  827. && binding.m_arraySize == m_bindlessImageCount)
  828. {
  829. // All good
  830. }
  831. else
  832. {
  833. isBindless = false;
  834. }
  835. }
  836. }
  837. // Find or create the cache entry
  838. if(isBindless)
  839. {
  840. layout.m_handle = m_bindless->getDescriptorSetLayout();
  841. layout.m_entry = nullptr;
  842. }
  843. else
  844. {
  845. LockGuard<SpinLock> lock(m_cachesMtx);
  846. DSLayoutCacheEntry* cache = nullptr;
  847. U count = 0;
  848. for(DSLayoutCacheEntry* it : m_caches)
  849. {
  850. if(it->m_hash == hash)
  851. {
  852. cache = it;
  853. break;
  854. }
  855. ++count;
  856. }
  857. if(cache == nullptr)
  858. {
  859. cache = m_alloc.newInstance<DSLayoutCacheEntry>(this);
  860. ANKI_CHECK(cache->init(bindings.getBegin(), bindingCount, hash));
  861. m_caches.resize(m_alloc, m_caches.getSize() + 1);
  862. m_caches[m_caches.getSize() - 1] = cache;
  863. }
  864. // Set the layout
  865. layout.m_handle = cache->m_layoutHandle;
  866. layout.m_entry = cache;
  867. }
  868. return Error::NONE;
  869. }
  870. Error DescriptorSetFactory::newDescriptorSet(ThreadId tid, StackAllocator<U8>& tmpAlloc, DescriptorSetState& state,
  871. DescriptorSet& set, Bool& dirty,
  872. Array<PtrSize, MAX_BINDINGS_PER_DESCRIPTOR_SET>& dynamicOffsets,
  873. U32& dynamicOffsetCount)
  874. {
  875. ANKI_TRACE_SCOPED_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  876. U64 hash;
  877. Bool bindlessDSet;
  878. state.flush(hash, dynamicOffsets, dynamicOffsetCount, bindlessDSet);
  879. if(hash == 0)
  880. {
  881. dirty = false;
  882. return Error::NONE;
  883. }
  884. else
  885. {
  886. dirty = true;
  887. if(!bindlessDSet)
  888. {
  889. DescriptorSetLayout layout = state.m_layout;
  890. DSLayoutCacheEntry& entry = *layout.m_entry;
  891. // Get thread allocator
  892. DSThreadAllocator* alloc;
  893. ANKI_CHECK(entry.getOrCreateThreadAllocator(tid, alloc));
  894. // Finally, allocate
  895. const DS* s;
  896. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, tmpAlloc, s));
  897. set.m_handle = s->m_handle;
  898. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  899. }
  900. else
  901. {
  902. set = m_bindless->getDescriptorSet();
  903. }
  904. }
  905. return Error::NONE;
  906. }
  907. U32 DescriptorSetFactory::bindBindlessTexture(const VkImageView view, const VkImageLayout layout)
  908. {
  909. ANKI_ASSERT(m_bindless);
  910. return m_bindless->bindTexture(view, layout);
  911. }
  912. U32 DescriptorSetFactory::bindBindlessImage(const VkImageView view)
  913. {
  914. ANKI_ASSERT(m_bindless);
  915. return m_bindless->bindImage(view);
  916. }
  917. void DescriptorSetFactory::unbindBindlessTexture(U32 idx)
  918. {
  919. ANKI_ASSERT(m_bindless);
  920. m_bindless->unbindTexture(idx);
  921. }
  922. void DescriptorSetFactory::unbindBindlessImage(U32 idx)
  923. {
  924. ANKI_ASSERT(m_bindless);
  925. m_bindless->unbindImage(idx);
  926. }
  927. } // end namespace anki