DescriptorSet.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653
  1. // Copyright (C) 2009-2017, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <anki/gr/vulkan/DescriptorSet.h>
  6. #include <anki/gr/Buffer.h>
  7. #include <anki/gr/vulkan/BufferImpl.h>
  8. #include <anki/util/List.h>
  9. #include <anki/util/HashMap.h>
  10. #include <anki/core/Trace.h>
  11. #include <algorithm>
  12. namespace anki
  13. {
  14. /// Descriptor set internal class.
  15. class DS : public IntrusiveListEnabled<DS>
  16. {
  17. public:
  18. VkDescriptorSet m_handle = {};
  19. U64 m_lastFrameUsed = MAX_U64;
  20. U64 m_hash;
  21. };
  22. /// Per thread allocator.
  23. class DSThreadAllocator : public NonCopyable
  24. {
  25. public:
  26. const DSLayoutCacheEntry* m_layoutEntry; ///< Know your father.
  27. ThreadId m_tid;
  28. DynamicArray<VkDescriptorPool> m_pools;
  29. U32 m_lastPoolDSCount = 0;
  30. U32 m_lastPoolFreeDSCount = 0;
  31. IntrusiveList<DS> m_list; ///< At the left of the list are the least used sets.
  32. HashMap<U64, DS*> m_hashmap;
  33. DSThreadAllocator(const DSLayoutCacheEntry* layout, ThreadId tid)
  34. : m_layoutEntry(layout)
  35. , m_tid(tid)
  36. {
  37. ANKI_ASSERT(m_layoutEntry);
  38. }
  39. ~DSThreadAllocator();
  40. ANKI_USE_RESULT Error init();
  41. ANKI_USE_RESULT Error createNewPool();
  42. ANKI_USE_RESULT Error getOrCreateSet(
  43. U64 hash, const Array<AnyBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS*& out)
  44. {
  45. out = tryFindSet(hash);
  46. if(out == nullptr)
  47. {
  48. ANKI_CHECK(newSet(hash, bindings, out));
  49. }
  50. return Error::NONE;
  51. }
  52. private:
  53. ANKI_USE_RESULT const DS* tryFindSet(U64 hash);
  54. ANKI_USE_RESULT Error newSet(
  55. U64 hash, const Array<AnyBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS*& out);
  56. void writeSet(const Array<AnyBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set);
  57. };
  58. /// Cache entry. It's built around a specific descriptor set layout.
  59. class DSLayoutCacheEntry
  60. {
  61. public:
  62. DescriptorSetFactory* m_factory;
  63. U64 m_hash = 0; ///< Layout hash.
  64. VkDescriptorSetLayout m_layoutHandle = {};
  65. BitSet<MAX_BINDINGS_PER_DESCRIPTOR_SET, U8> m_activeBindings = {false};
  66. Array<DescriptorType, MAX_BINDINGS_PER_DESCRIPTOR_SET> m_bindingType = {};
  67. U32 m_minBinding = MAX_U32;
  68. U32 m_maxBinding = 0;
  69. // Cache the create info
  70. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> m_poolSizesCreateInf = {};
  71. VkDescriptorPoolCreateInfo m_poolCreateInf = {};
  72. DynamicArray<DSThreadAllocator*> m_threadAllocs;
  73. SpinLock m_threadAllocsMtx;
  74. DSLayoutCacheEntry(DescriptorSetFactory* factory)
  75. : m_factory(factory)
  76. {
  77. }
  78. ~DSLayoutCacheEntry();
  79. ANKI_USE_RESULT Error init(const DescriptorBinding* bindings, U bindingCount, U64 hash);
  80. ANKI_USE_RESULT Error getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc);
  81. };
  82. DSThreadAllocator::~DSThreadAllocator()
  83. {
  84. auto alloc = m_layoutEntry->m_factory->m_alloc;
  85. while(!m_list.isEmpty())
  86. {
  87. DS* ds = &m_list.getFront();
  88. m_list.popFront();
  89. alloc.deleteInstance(ds);
  90. }
  91. for(VkDescriptorPool pool : m_pools)
  92. {
  93. vkDestroyDescriptorPool(m_layoutEntry->m_factory->m_dev, pool, nullptr);
  94. }
  95. m_pools.destroy(alloc);
  96. m_hashmap.destroy(alloc);
  97. }
  98. Error DSThreadAllocator::init()
  99. {
  100. ANKI_CHECK(createNewPool());
  101. return Error::NONE;
  102. }
  103. Error DSThreadAllocator::createNewPool()
  104. {
  105. m_lastPoolDSCount =
  106. (m_lastPoolDSCount != 0) ? (m_lastPoolDSCount * DESCRIPTOR_POOL_SIZE_SCALE) : DESCRIPTOR_POOL_INITIAL_SIZE;
  107. m_lastPoolFreeDSCount = m_lastPoolDSCount;
  108. // Set the create info
  109. Array<VkDescriptorPoolSize, U(DescriptorType::COUNT)> poolSizes;
  110. memcpy(&poolSizes[0],
  111. &m_layoutEntry->m_poolSizesCreateInf[0],
  112. sizeof(poolSizes[0]) * m_layoutEntry->m_poolCreateInf.poolSizeCount);
  113. for(U i = 0; i < m_layoutEntry->m_poolCreateInf.poolSizeCount; ++i)
  114. {
  115. poolSizes[i].descriptorCount *= m_lastPoolDSCount;
  116. ANKI_ASSERT(poolSizes[i].descriptorCount > 0);
  117. }
  118. VkDescriptorPoolCreateInfo ci = m_layoutEntry->m_poolCreateInf;
  119. ci.pPoolSizes = &poolSizes[0];
  120. ci.maxSets = m_lastPoolDSCount;
  121. // Create
  122. VkDescriptorPool pool;
  123. ANKI_VK_CHECK(vkCreateDescriptorPool(m_layoutEntry->m_factory->m_dev, &ci, nullptr, &pool));
  124. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_POOL_CREATE, 1);
  125. // Push back
  126. m_pools.resize(m_layoutEntry->m_factory->m_alloc, m_pools.getSize() + 1);
  127. m_pools[m_pools.getSize() - 1] = pool;
  128. return Error::NONE;
  129. }
  130. const DS* DSThreadAllocator::tryFindSet(U64 hash)
  131. {
  132. ANKI_ASSERT(hash > 0);
  133. auto it = m_hashmap.find(hash);
  134. if(it == m_hashmap.getEnd())
  135. {
  136. return nullptr;
  137. }
  138. else
  139. {
  140. DS* ds = *it;
  141. // Remove from the list and place at the end of the list
  142. m_list.erase(ds);
  143. m_list.pushBack(ds);
  144. ds->m_lastFrameUsed = m_layoutEntry->m_factory->m_frameCount;
  145. return ds;
  146. }
  147. }
  148. Error DSThreadAllocator::newSet(
  149. U64 hash, const Array<AnyBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS*& out_)
  150. {
  151. DS* out = nullptr;
  152. // First try to see if there are unused to recycle
  153. const U64 crntFrame = m_layoutEntry->m_factory->m_frameCount;
  154. auto it = m_list.getBegin();
  155. const auto end = m_list.getEnd();
  156. while(it != end)
  157. {
  158. DS* set = &(*it);
  159. U64 frameDiff = crntFrame - set->m_lastFrameUsed;
  160. if(frameDiff > DESCRIPTOR_FRAME_BUFFERING)
  161. {
  162. // Found something, recycle
  163. auto it2 = m_hashmap.find(set->m_hash);
  164. ANKI_ASSERT(it2 != m_hashmap.getEnd());
  165. m_hashmap.erase(m_layoutEntry->m_factory->m_alloc, it2);
  166. m_list.erase(set);
  167. m_list.pushBack(set);
  168. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, set);
  169. out = set;
  170. break;
  171. }
  172. ++it;
  173. }
  174. if(out == nullptr)
  175. {
  176. // Need to allocate one
  177. if(m_lastPoolFreeDSCount == 0)
  178. {
  179. // Can't allocate one from the current pool, create new
  180. ANKI_CHECK(createNewPool());
  181. }
  182. --m_lastPoolFreeDSCount;
  183. VkDescriptorSetAllocateInfo ci = {};
  184. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  185. ci.descriptorPool = m_pools.getBack();
  186. ci.pSetLayouts = &m_layoutEntry->m_layoutHandle;
  187. ci.descriptorSetCount = 1;
  188. VkDescriptorSet handle;
  189. VkResult rez = vkAllocateDescriptorSets(m_layoutEntry->m_factory->m_dev, &ci, &handle);
  190. (void)rez;
  191. ANKI_ASSERT(rez == VK_SUCCESS && "That allocation can't fail");
  192. ANKI_TRACE_INC_COUNTER(VK_DESCRIPTOR_SET_CREATE, 1);
  193. out = m_layoutEntry->m_factory->m_alloc.newInstance<DS>();
  194. out->m_handle = handle;
  195. m_hashmap.emplace(m_layoutEntry->m_factory->m_alloc, hash, out);
  196. m_list.pushBack(out);
  197. }
  198. ANKI_ASSERT(out);
  199. out->m_lastFrameUsed = crntFrame;
  200. out->m_hash = hash;
  201. // Finally, write it
  202. writeSet(bindings, *out);
  203. out_ = out;
  204. return Error::NONE;
  205. }
  206. void DSThreadAllocator::writeSet(const Array<AnyBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET>& bindings, const DS& set)
  207. {
  208. Array<VkWriteDescriptorSet, MAX_BINDINGS_PER_DESCRIPTOR_SET> writes;
  209. U writeCount = 0;
  210. Array<VkDescriptorImageInfo, MAX_TEXTURE_BINDINGS + MAX_IMAGE_BINDINGS> tex;
  211. U texCount = 0;
  212. Array<VkDescriptorBufferInfo, MAX_UNIFORM_BUFFER_BINDINGS + MAX_STORAGE_BUFFER_BINDINGS> buff;
  213. U buffCount = 0;
  214. VkWriteDescriptorSet writeTemplate = {};
  215. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  216. writeTemplate.pNext = nullptr;
  217. writeTemplate.dstSet = set.m_handle;
  218. writeTemplate.descriptorCount = 1;
  219. for(U i = m_layoutEntry->m_minBinding; i <= m_layoutEntry->m_maxBinding; ++i)
  220. {
  221. if(m_layoutEntry->m_activeBindings.get(i))
  222. {
  223. const AnyBinding& b = bindings[i];
  224. VkWriteDescriptorSet& w = writes[writeCount++];
  225. w = writeTemplate;
  226. w.dstBinding = i;
  227. w.descriptorType = convertDescriptorType(b.m_type);
  228. switch(b.m_type)
  229. {
  230. case DescriptorType::TEXTURE:
  231. tex[texCount].sampler = b.m_tex.m_sampler->getHandle();
  232. tex[texCount].imageView = b.m_tex.m_tex->getOrCreateResourceGroupView(b.m_tex.m_aspect);
  233. tex[texCount].imageLayout = b.m_tex.m_layout;
  234. w.pImageInfo = &tex[texCount];
  235. ++texCount;
  236. break;
  237. case DescriptorType::UNIFORM_BUFFER:
  238. case DescriptorType::STORAGE_BUFFER:
  239. buff[buffCount].buffer = b.m_buff.m_buff->getHandle();
  240. buff[buffCount].offset = 0;
  241. buff[buffCount].range = (b.m_buff.m_range == MAX_PTR_SIZE) ? VK_WHOLE_SIZE : b.m_buff.m_range;
  242. w.pBufferInfo = &buff[buffCount];
  243. ++buffCount;
  244. break;
  245. case DescriptorType::IMAGE:
  246. tex[texCount].sampler = VK_NULL_HANDLE;
  247. tex[texCount].imageView = b.m_image.m_texView->m_handle;
  248. tex[texCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  249. w.pImageInfo = &tex[texCount];
  250. ++texCount;
  251. break;
  252. default:
  253. ANKI_ASSERT(0);
  254. }
  255. }
  256. }
  257. vkUpdateDescriptorSets(m_layoutEntry->m_factory->m_dev, writeCount, &writes[0], 0, nullptr);
  258. }
  259. DSLayoutCacheEntry::~DSLayoutCacheEntry()
  260. {
  261. auto alloc = m_factory->m_alloc;
  262. for(DSThreadAllocator* a : m_threadAllocs)
  263. {
  264. alloc.deleteInstance(a);
  265. }
  266. m_threadAllocs.destroy(alloc);
  267. if(m_layoutHandle)
  268. {
  269. vkDestroyDescriptorSetLayout(m_factory->m_dev, m_layoutHandle, nullptr);
  270. }
  271. }
  272. Error DSLayoutCacheEntry::init(const DescriptorBinding* bindings, U bindingCount, U64 hash)
  273. {
  274. ANKI_ASSERT(bindings);
  275. ANKI_ASSERT(hash > 0);
  276. m_hash = hash;
  277. // Create the VK layout
  278. Array<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> vkBindings;
  279. VkDescriptorSetLayoutCreateInfo ci = {};
  280. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  281. for(U i = 0; i < bindingCount; ++i)
  282. {
  283. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  284. const DescriptorBinding& ak = bindings[i];
  285. vk.binding = ak.m_binding;
  286. vk.descriptorCount = 1;
  287. vk.descriptorType = convertDescriptorType(ak.m_type);
  288. vk.pImmutableSamplers = nullptr;
  289. vk.stageFlags = convertShaderTypeBit(ak.m_stageMask);
  290. ANKI_ASSERT(m_activeBindings.get(ak.m_binding) == false);
  291. m_activeBindings.set(ak.m_binding);
  292. m_bindingType[ak.m_binding] = ak.m_type;
  293. m_minBinding = min<U32>(m_minBinding, ak.m_binding);
  294. m_maxBinding = max<U32>(m_maxBinding, ak.m_binding);
  295. }
  296. ci.bindingCount = bindingCount;
  297. ci.pBindings = &vkBindings[0];
  298. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(m_factory->m_dev, &ci, nullptr, &m_layoutHandle));
  299. // Create the pool info
  300. U poolSizeCount = 0;
  301. for(U i = 0; i < bindingCount; ++i)
  302. {
  303. U j;
  304. for(j = 0; j < poolSizeCount; ++j)
  305. {
  306. if(m_poolSizesCreateInf[j].type == convertDescriptorType(bindings[i].m_type))
  307. {
  308. ++m_poolSizesCreateInf[j].descriptorCount;
  309. break;
  310. }
  311. }
  312. if(j == poolSizeCount)
  313. {
  314. m_poolSizesCreateInf[poolSizeCount].type = convertDescriptorType(bindings[i].m_type);
  315. switch(m_poolSizesCreateInf[poolSizeCount].type)
  316. {
  317. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  318. m_poolSizesCreateInf[poolSizeCount].descriptorCount = MAX_TEXTURE_BINDINGS;
  319. break;
  320. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  321. m_poolSizesCreateInf[poolSizeCount].descriptorCount = MAX_UNIFORM_BUFFER_BINDINGS;
  322. break;
  323. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  324. m_poolSizesCreateInf[poolSizeCount].descriptorCount = MAX_STORAGE_BUFFER_BINDINGS;
  325. break;
  326. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  327. m_poolSizesCreateInf[poolSizeCount].descriptorCount = MAX_IMAGE_BINDINGS;
  328. break;
  329. default:
  330. ANKI_ASSERT(0);
  331. }
  332. m_poolSizesCreateInf[poolSizeCount].descriptorCount = 1;
  333. ++poolSizeCount;
  334. }
  335. }
  336. ANKI_ASSERT(poolSizeCount > 0);
  337. m_poolCreateInf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  338. m_poolCreateInf.poolSizeCount = poolSizeCount;
  339. return Error::NONE;
  340. }
  341. Error DSLayoutCacheEntry::getOrCreateThreadAllocator(ThreadId tid, DSThreadAllocator*& alloc)
  342. {
  343. alloc = nullptr;
  344. LockGuard<SpinLock> lock(m_threadAllocsMtx);
  345. class Comp
  346. {
  347. public:
  348. Bool operator()(const DSThreadAllocator* a, ThreadId tid) const
  349. {
  350. return a->m_tid < tid;
  351. }
  352. Bool operator()(ThreadId tid, const DSThreadAllocator* a) const
  353. {
  354. return tid < a->m_tid;
  355. }
  356. };
  357. // Find using binary search
  358. auto it = binarySearch(m_threadAllocs.getBegin(), m_threadAllocs.getEnd(), tid, Comp());
  359. if(it != m_threadAllocs.getEnd())
  360. {
  361. ANKI_ASSERT((*it)->m_tid == tid);
  362. alloc = *it;
  363. }
  364. else
  365. {
  366. // Need to create one
  367. alloc = m_factory->m_alloc.newInstance<DSThreadAllocator>(this, tid);
  368. ANKI_CHECK(alloc->init());
  369. m_threadAllocs.resize(m_factory->m_alloc, m_threadAllocs.getSize() + 1);
  370. m_threadAllocs[m_threadAllocs.getSize() - 1] = alloc;
  371. // Sort for fast find
  372. std::sort(m_threadAllocs.getBegin(),
  373. m_threadAllocs.getEnd(),
  374. [](const DSThreadAllocator* a, const DSThreadAllocator* b) { return a->m_tid < b->m_tid; });
  375. }
  376. ANKI_ASSERT(alloc);
  377. return Error::NONE;
  378. }
  379. void DescriptorSetState::flush(Bool& stateDirty,
  380. U64& hash,
  381. Array<U32, MAX_UNIFORM_BUFFER_BINDINGS + MAX_STORAGE_BUFFER_BINDINGS>& dynamicOffsets,
  382. U& dynamicOffsetCount)
  383. {
  384. dynamicOffsetCount = 0;
  385. // Get cache entry
  386. ANKI_ASSERT(m_layout.m_entry);
  387. const DSLayoutCacheEntry& entry = *m_layout.m_entry;
  388. // Early out if nothing happened
  389. if(!m_anyBindingDirty && !m_layoutDirty)
  390. {
  391. stateDirty = false;
  392. return;
  393. }
  394. Bool dynamicOffsetsDirty = false;
  395. // Compute the hash
  396. Array<U64, MAX_BINDINGS_PER_DESCRIPTOR_SET * 2 * 2> toHash;
  397. U toHashCount = 0;
  398. const U minBinding = entry.m_minBinding;
  399. const U maxBinding = entry.m_maxBinding;
  400. for(U i = minBinding; i <= maxBinding; ++i)
  401. {
  402. if(entry.m_activeBindings.get(i))
  403. {
  404. toHash[toHashCount++] = m_bindings[i].m_uuids[0];
  405. switch(entry.m_bindingType[i])
  406. {
  407. case DescriptorType::TEXTURE:
  408. toHash[toHashCount++] = m_bindings[i].m_uuids[1];
  409. toHash[toHashCount++] = U64(m_bindings[i].m_tex.m_aspect);
  410. toHash[toHashCount++] = U64(m_bindings[i].m_tex.m_layout);
  411. break;
  412. case DescriptorType::UNIFORM_BUFFER:
  413. case DescriptorType::STORAGE_BUFFER:
  414. toHash[toHashCount++] = m_bindings[i].m_buff.m_range;
  415. dynamicOffsets[dynamicOffsetCount++] = m_bindings[i].m_buff.m_offset;
  416. dynamicOffsetsDirty = dynamicOffsetsDirty || m_dynamicOffsetDirty.get(i);
  417. break;
  418. case DescriptorType::IMAGE:
  419. // Nothing
  420. break;
  421. default:
  422. ANKI_ASSERT(0);
  423. }
  424. }
  425. }
  426. hash = (toHashCount == 1) ? toHash[0] : computeHash(&toHash[0], toHashCount * sizeof(U64));
  427. if(hash != m_lastHash || dynamicOffsetsDirty)
  428. {
  429. m_lastHash = hash;
  430. stateDirty = true;
  431. }
  432. else
  433. {
  434. stateDirty = false;
  435. }
  436. m_anyBindingDirty = false;
  437. m_layoutDirty = false;
  438. m_dynamicOffsetDirty.unsetAll();
  439. }
  440. DescriptorSetFactory::~DescriptorSetFactory()
  441. {
  442. }
  443. void DescriptorSetFactory::init(const GrAllocator<U8>& alloc, VkDevice dev)
  444. {
  445. m_alloc = alloc;
  446. m_dev = dev;
  447. }
  448. void DescriptorSetFactory::destroy()
  449. {
  450. for(DSLayoutCacheEntry* l : m_caches)
  451. {
  452. m_alloc.deleteInstance(l);
  453. }
  454. m_caches.destroy(m_alloc);
  455. }
  456. Error DescriptorSetFactory::newDescriptorSetLayout(const DescriptorSetLayoutInitInfo& init, DescriptorSetLayout& layout)
  457. {
  458. // Compute the hash for the layout
  459. Array<DescriptorBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET> bindings;
  460. U bindingCount = init.m_bindings.getSize();
  461. U64 hash;
  462. if(init.m_bindings.getSize() > 0)
  463. {
  464. memcpy(&bindings[0], &init.m_bindings[0], init.m_bindings.getSizeInBytes());
  465. std::sort(&bindings[0],
  466. &bindings[0] + bindingCount,
  467. [](const DescriptorBinding& a, const DescriptorBinding& b) { return a.m_binding < b.m_binding; });
  468. hash = computeHash(&bindings[0], init.m_bindings.getSizeInBytes());
  469. ANKI_ASSERT(hash != 1);
  470. }
  471. else
  472. {
  473. hash = 1;
  474. }
  475. // Find or create the cache entry
  476. LockGuard<SpinLock> lock(m_cachesMtx);
  477. DSLayoutCacheEntry* cache = nullptr;
  478. U count = 0;
  479. for(DSLayoutCacheEntry* it : m_caches)
  480. {
  481. if(it->m_hash == hash)
  482. {
  483. cache = it;
  484. break;
  485. }
  486. ++count;
  487. }
  488. if(cache == nullptr)
  489. {
  490. cache = m_alloc.newInstance<DSLayoutCacheEntry>(this);
  491. ANKI_CHECK(cache->init(&bindings[0], bindingCount, hash));
  492. m_caches.resize(m_alloc, m_caches.getSize() + 1);
  493. m_caches[m_caches.getSize() - 1] = cache;
  494. }
  495. // Set the layout
  496. layout.m_handle = cache->m_layoutHandle;
  497. layout.m_entry = cache;
  498. return Error::NONE;
  499. }
  500. Error DescriptorSetFactory::newDescriptorSet(ThreadId tid,
  501. DescriptorSetState& state,
  502. DescriptorSet& set,
  503. Bool& dirty,
  504. Array<U32, MAX_UNIFORM_BUFFER_BINDINGS + MAX_STORAGE_BUFFER_BINDINGS>& dynamicOffsets,
  505. U& dynamicOffsetCount)
  506. {
  507. ANKI_TRACE_START_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  508. U64 hash;
  509. state.flush(dirty, hash, dynamicOffsets, dynamicOffsetCount);
  510. if(!dirty)
  511. {
  512. ANKI_TRACE_STOP_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  513. return Error::NONE;
  514. }
  515. DescriptorSetLayout layout = state.m_layout;
  516. DSLayoutCacheEntry& entry = *layout.m_entry;
  517. // Get thread allocator
  518. DSThreadAllocator* alloc;
  519. ANKI_CHECK(entry.getOrCreateThreadAllocator(tid, alloc));
  520. // Finally, allocate
  521. const DS* s;
  522. ANKI_CHECK(alloc->getOrCreateSet(hash, state.m_bindings, s));
  523. set.m_handle = s->m_handle;
  524. ANKI_ASSERT(set.m_handle != VK_NULL_HANDLE);
  525. ANKI_TRACE_STOP_EVENT(VK_DESCRIPTOR_SET_GET_OR_CREATE);
  526. return Error::NONE;
  527. }
  528. } // end namespace anki