VkDescriptor.cpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623
  1. // Copyright (C) 2009-present, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/Vulkan/VkDescriptor.h>
  6. #include <AnKi/Gr/Vulkan/VkGrManager.h>
  7. #include <AnKi/Core/StatsSet.h>
  8. #include <AnKi/Util/Tracer.h>
  9. namespace anki {
  10. ANKI_SVAR(DescriptorSetsAllocated, StatCategory::kGr, "DescriptorSets allocated this frame", StatFlag::kZeroEveryFrame)
  11. ANKI_SVAR(DescriptorSetsWritten, StatCategory::kGr, "DescriptorSets written this frame", StatFlag::kZeroEveryFrame)
  12. /// Contains some constants. It's a class to avoid bugs initializing arrays (m_descriptorCount).
  13. class DSAllocatorConstants
  14. {
  15. public:
  16. Array<std::pair<VkDescriptorType, U32>, 8> m_descriptorCount;
  17. U32 m_maxSets;
  18. DSAllocatorConstants()
  19. {
  20. m_descriptorCount[0] = {VK_DESCRIPTOR_TYPE_SAMPLER, 8};
  21. m_descriptorCount[1] = {VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 64};
  22. m_descriptorCount[2] = {VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 8};
  23. m_descriptorCount[3] = {VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 32};
  24. m_descriptorCount[4] = {VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 8};
  25. m_descriptorCount[5] = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 8};
  26. m_descriptorCount[6] = {VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 64};
  27. m_descriptorCount[7] = {VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 4};
  28. m_maxSets = 64;
  29. }
  30. };
  31. static const DSAllocatorConstants g_dsAllocatorConsts;
  32. static U32 powu(U32 base, U32 exponent)
  33. {
  34. U32 out = 1;
  35. for(U32 i = 1; i <= exponent; i++)
  36. {
  37. out *= base;
  38. }
  39. return out;
  40. }
  41. DescriptorAllocator::~DescriptorAllocator()
  42. {
  43. destroy();
  44. }
  45. void DescriptorAllocator::createNewBlock()
  46. {
  47. ANKI_TRACE_SCOPED_EVENT(GrDescriptorSetCreate);
  48. const Bool rtEnabled = GrManager::getSingleton().getDeviceCapabilities().m_rayTracingEnabled;
  49. Array<VkDescriptorPoolSize, g_dsAllocatorConsts.m_descriptorCount.getSize()> poolSizes;
  50. for(U32 i = 0; i < g_dsAllocatorConsts.m_descriptorCount.getSize(); ++i)
  51. {
  52. VkDescriptorPoolSize& size = poolSizes[i];
  53. size.descriptorCount = g_dsAllocatorConsts.m_descriptorCount[i].second * powu(kDescriptorSetGrowScale, m_blocks.getSize());
  54. size.type = g_dsAllocatorConsts.m_descriptorCount[i].first;
  55. }
  56. VkDescriptorPoolCreateInfo inf = {};
  57. inf.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  58. inf.flags = 0;
  59. inf.maxSets = g_dsAllocatorConsts.m_maxSets * powu(kDescriptorSetGrowScale, m_blocks.getSize());
  60. ANKI_ASSERT(g_dsAllocatorConsts.m_descriptorCount.getBack().first == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR
  61. && "Needs to be the last for the bellow to work");
  62. inf.poolSizeCount = rtEnabled ? g_dsAllocatorConsts.m_descriptorCount.getSize() : g_dsAllocatorConsts.m_descriptorCount.getSize() - 1;
  63. inf.pPoolSizes = poolSizes.getBegin();
  64. VkDescriptorPool handle;
  65. ANKI_VK_CHECKF(vkCreateDescriptorPool(getVkDevice(), &inf, nullptr, &handle));
  66. Block& block = *m_blocks.emplaceBack();
  67. block.m_pool = handle;
  68. block.m_maxDsets = inf.maxSets;
  69. g_svarDescriptorSetsAllocated.increment(1);
  70. }
  71. void DescriptorAllocator::allocate(VkDescriptorSetLayout layout, VkDescriptorSet& set)
  72. {
  73. ANKI_TRACE_SCOPED_EVENT(GrAllocateDescriptorSet);
  74. // Lazy init
  75. if(m_blocks.getSize() == 0)
  76. {
  77. createNewBlock();
  78. ANKI_ASSERT(m_activeBlock == 0);
  79. }
  80. U32 iterationCount = 0;
  81. do
  82. {
  83. VkResult res;
  84. if(m_blocks[m_activeBlock].m_dsetsAllocatedCount > m_blocks[m_activeBlock].m_maxDsets * 2)
  85. {
  86. // The driver doesn't respect VkDescriptorPoolCreateInfo::maxSets. It should have thrown OoM already. To avoid growing the same DS forever
  87. // force OoM
  88. res = VK_ERROR_OUT_OF_POOL_MEMORY;
  89. }
  90. else
  91. {
  92. VkDescriptorSetAllocateInfo ci = {};
  93. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  94. ci.descriptorPool = m_blocks[m_activeBlock].m_pool;
  95. ci.descriptorSetCount = 1;
  96. ci.pSetLayouts = &layout;
  97. res = vkAllocateDescriptorSets(getVkDevice(), &ci, &set);
  98. }
  99. if(res == VK_SUCCESS)
  100. {
  101. ++m_blocks[m_activeBlock].m_dsetsAllocatedCount;
  102. break;
  103. }
  104. else if(res == VK_ERROR_OUT_OF_POOL_MEMORY)
  105. {
  106. ++m_activeBlock;
  107. if(m_activeBlock >= m_blocks.getSize())
  108. {
  109. createNewBlock();
  110. }
  111. }
  112. else
  113. {
  114. ANKI_VK_CHECKF(res);
  115. }
  116. ++iterationCount;
  117. } while(iterationCount < 10);
  118. if(iterationCount == 10)
  119. {
  120. ANKI_VK_LOGF("Failed to allocate descriptor set");
  121. }
  122. }
  123. void DescriptorAllocator::reset()
  124. {
  125. // Trim blocks that were not used last time
  126. const U32 blocksInUse = m_activeBlock + 1;
  127. if(blocksInUse < m_blocks.getSize())
  128. {
  129. for(U32 i = blocksInUse; i < m_blocks.getSize(); ++i)
  130. {
  131. vkDestroyDescriptorPool(getVkDevice(), m_blocks[i].m_pool, nullptr);
  132. }
  133. m_blocks.resize(blocksInUse);
  134. }
  135. // Reset the remaining pools
  136. for(Block& b : m_blocks)
  137. {
  138. if(b.m_dsetsAllocatedCount > 0)
  139. {
  140. vkResetDescriptorPool(getVkDevice(), b.m_pool, 0);
  141. }
  142. b.m_dsetsAllocatedCount = 0;
  143. }
  144. m_activeBlock = 0;
  145. }
  146. BindlessDescriptorSet::~BindlessDescriptorSet()
  147. {
  148. ANKI_ASSERT(m_freeTexIndexCount == m_freeTexIndices.getSize() && "Forgot to unbind some textures");
  149. if(m_dsPool)
  150. {
  151. vkDestroyDescriptorPool(getVkDevice(), m_dsPool, nullptr);
  152. m_dsPool = VK_NULL_HANDLE;
  153. m_dset = VK_NULL_HANDLE;
  154. }
  155. if(m_layout)
  156. {
  157. vkDestroyDescriptorSetLayout(getVkDevice(), m_layout, nullptr);
  158. m_layout = VK_NULL_HANDLE;
  159. }
  160. }
  161. Error BindlessDescriptorSet::init()
  162. {
  163. const U32 bindlessTextureCount = g_cvarGrMaxBindlessSampledTextureCount;
  164. // Create the layout
  165. {
  166. Array<VkDescriptorSetLayoutBinding, 1> bindings = {};
  167. bindings[0].binding = 0;
  168. bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
  169. bindings[0].descriptorCount = bindlessTextureCount;
  170. bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  171. Array<VkDescriptorBindingFlagsEXT, 1> bindingFlags = {};
  172. bindingFlags[0] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT
  173. | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
  174. VkDescriptorSetLayoutBindingFlagsCreateInfoEXT extraInfos = {};
  175. extraInfos.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
  176. extraInfos.bindingCount = bindingFlags.getSize();
  177. extraInfos.pBindingFlags = &bindingFlags[0];
  178. VkDescriptorSetLayoutCreateInfo ci = {};
  179. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  180. ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
  181. ci.bindingCount = bindings.getSize();
  182. ci.pBindings = &bindings[0];
  183. ci.pNext = &extraInfos;
  184. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(getVkDevice(), &ci, nullptr, &m_layout));
  185. }
  186. // Create the pool
  187. {
  188. Array<VkDescriptorPoolSize, 1> sizes = {};
  189. sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  190. sizes[0].descriptorCount = bindlessTextureCount;
  191. VkDescriptorPoolCreateInfo ci = {};
  192. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  193. ci.maxSets = 1;
  194. ci.poolSizeCount = sizes.getSize();
  195. ci.pPoolSizes = &sizes[0];
  196. ci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
  197. ANKI_VK_CHECK(vkCreateDescriptorPool(getVkDevice(), &ci, nullptr, &m_dsPool));
  198. }
  199. // Create the descriptor set
  200. {
  201. VkDescriptorSetAllocateInfo ci = {};
  202. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  203. ci.descriptorPool = m_dsPool;
  204. ci.descriptorSetCount = 1;
  205. ci.pSetLayouts = &m_layout;
  206. ANKI_VK_CHECK(vkAllocateDescriptorSets(getVkDevice(), &ci, &m_dset));
  207. }
  208. // Init the free arrays
  209. {
  210. m_freeTexIndices.resize(bindlessTextureCount);
  211. m_freeTexIndexCount = U16(m_freeTexIndices.getSize());
  212. for(U32 i = 0; i < m_freeTexIndices.getSize(); ++i)
  213. {
  214. m_freeTexIndices[i] = U16(m_freeTexIndices.getSize() - i - 1);
  215. }
  216. }
  217. return Error::kNone;
  218. }
  219. U32 BindlessDescriptorSet::bindTexture(const VkImageView view, const VkImageLayout layout)
  220. {
  221. ANKI_ASSERT(layout == VK_IMAGE_LAYOUT_GENERAL || layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
  222. ANKI_ASSERT(view);
  223. LockGuard<Mutex> lock(m_mtx);
  224. ANKI_ASSERT(m_freeTexIndexCount > 0 && "Out of indices");
  225. // Pop the index
  226. --m_freeTexIndexCount;
  227. const U16 idx = m_freeTexIndices[m_freeTexIndexCount];
  228. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  229. // Update the set
  230. VkDescriptorImageInfo imageInf = {};
  231. imageInf.imageView = view;
  232. imageInf.imageLayout = layout;
  233. VkWriteDescriptorSet write = {};
  234. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  235. write.pNext = nullptr;
  236. write.dstSet = m_dset;
  237. write.dstBinding = 0;
  238. write.descriptorCount = 1;
  239. write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  240. write.dstArrayElement = idx;
  241. write.pImageInfo = &imageInf;
  242. vkUpdateDescriptorSets(getVkDevice(), 1, &write, 0, nullptr);
  243. return idx;
  244. }
  245. void BindlessDescriptorSet::unbindTexture(U32 idx)
  246. {
  247. LockGuard<Mutex> lock(m_mtx);
  248. ANKI_ASSERT(idx < m_freeTexIndices.getSize());
  249. ANKI_ASSERT(m_freeTexIndexCount < m_freeTexIndices.getSize());
  250. m_freeTexIndices[m_freeTexIndexCount] = U16(idx);
  251. ++m_freeTexIndexCount;
  252. // Sort the free indices to minimize fragmentation
  253. std::sort(&m_freeTexIndices[0], &m_freeTexIndices[0] + m_freeTexIndexCount, std::greater<U16>());
  254. // Make sure there are no duplicates
  255. for(U32 i = 1; i < m_freeTexIndexCount; ++i)
  256. {
  257. ANKI_ASSERT(m_freeTexIndices[i] != m_freeTexIndices[i - 1]);
  258. }
  259. }
  260. PipelineLayoutFactory2::~PipelineLayoutFactory2()
  261. {
  262. for(auto it : m_pplLayouts)
  263. {
  264. vkDestroyPipelineLayout(getVkDevice(), it->m_handle, nullptr);
  265. deleteInstance(GrMemoryPool::getSingleton(), it);
  266. }
  267. for(auto it : m_dsLayouts)
  268. {
  269. vkDestroyDescriptorSetLayout(getVkDevice(), it->m_handle, nullptr);
  270. deleteInstance(GrMemoryPool::getSingleton(), it);
  271. }
  272. }
  273. Error PipelineLayoutFactory2::getOrCreateDescriptorSetLayout(ConstWeakArray<ShaderReflectionBinding> reflBindings, DescriptorSetLayout*& layout)
  274. {
  275. ANKI_BEGIN_PACKED_STRUCT
  276. class DSBinding
  277. {
  278. public:
  279. VkDescriptorType m_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
  280. U16 m_arraySize = 0;
  281. U16 m_binding = kMaxU8;
  282. };
  283. ANKI_END_PACKED_STRUCT
  284. // Compute the hash for the layout
  285. Array<DSBinding, kMaxBindingsPerRegisterSpace> bindings;
  286. U64 hash;
  287. if(reflBindings.getSize())
  288. {
  289. // Copy to a new place because we want to sort
  290. U32 count = 0;
  291. for(const ShaderReflectionBinding& reflBinding : reflBindings)
  292. {
  293. bindings[count].m_type = convertDescriptorType(reflBinding.m_type);
  294. bindings[count].m_arraySize = reflBinding.m_arraySize;
  295. bindings[count].m_binding = reflBinding.m_vkBinding;
  296. ++count;
  297. }
  298. std::sort(bindings.getBegin(), bindings.getBegin() + count, [](const DSBinding& a, const DSBinding& b) {
  299. return a.m_binding < b.m_binding;
  300. });
  301. hash = computeHash(&bindings[0], sizeof(bindings[0]) * count);
  302. ANKI_ASSERT(hash != 1);
  303. }
  304. else
  305. {
  306. hash = 1;
  307. }
  308. // Search the cache or create it
  309. auto it = m_dsLayouts.find(hash);
  310. if(it != m_dsLayouts.getEnd())
  311. {
  312. layout = *it;
  313. }
  314. else
  315. {
  316. const U32 bindingCount = reflBindings.getSize();
  317. layout = newInstance<DescriptorSetLayout>(GrMemoryPool::getSingleton());
  318. m_dsLayouts.emplace(hash, layout);
  319. Array<VkDescriptorSetLayoutBinding, kMaxBindingsPerRegisterSpace> vkBindings;
  320. VkDescriptorSetLayoutCreateInfo ci = {};
  321. ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  322. for(U i = 0; i < bindingCount; ++i)
  323. {
  324. VkDescriptorSetLayoutBinding& vk = vkBindings[i];
  325. const DSBinding& ak = bindings[i];
  326. vk.binding = ak.m_binding;
  327. vk.descriptorCount = ak.m_arraySize;
  328. vk.descriptorType = ak.m_type;
  329. vk.pImmutableSamplers = nullptr;
  330. vk.stageFlags = VK_SHADER_STAGE_ALL;
  331. }
  332. ci.bindingCount = bindingCount;
  333. ci.pBindings = &vkBindings[0];
  334. ANKI_VK_CHECK(vkCreateDescriptorSetLayout(getVkDevice(), &ci, nullptr, &layout->m_handle));
  335. }
  336. return Error::kNone;
  337. }
  338. Error PipelineLayoutFactory2::getOrCreatePipelineLayout(const ShaderReflectionDescriptorRelated& refl, PipelineLayout2*& layout)
  339. {
  340. // Compute the hash
  341. const U64 hash = computeHash(&refl, sizeof(refl));
  342. LockGuard lock(m_mtx);
  343. auto it = m_pplLayouts.find(hash);
  344. if(it != m_pplLayouts.getEnd())
  345. {
  346. layout = *it;
  347. }
  348. else
  349. {
  350. // Create new
  351. layout = newInstance<PipelineLayout2>(GrMemoryPool::getSingleton());
  352. m_pplLayouts.emplace(hash, layout);
  353. layout->m_refl = refl;
  354. // Find dset count
  355. layout->m_dsetCount = 0;
  356. for(U8 iset = 0; iset < kMaxRegisterSpaces; ++iset)
  357. {
  358. if(refl.m_bindingCounts[iset])
  359. {
  360. layout->m_dsetCount = max<U8>(iset + 1u, layout->m_dsetCount);
  361. for(U32 i = 0; i < refl.m_bindingCounts[iset]; ++i)
  362. {
  363. layout->m_descriptorCounts[iset] += refl.m_bindings[iset][i].m_arraySize;
  364. }
  365. }
  366. }
  367. if(refl.m_vkBindlessDescriptorSet != kMaxU8)
  368. {
  369. layout->m_dsetCount = max<U8>(refl.m_vkBindlessDescriptorSet + 1, layout->m_dsetCount);
  370. }
  371. // Create the DS layouts
  372. for(U32 iset = 0; iset < layout->m_dsetCount; ++iset)
  373. {
  374. if(refl.m_vkBindlessDescriptorSet == iset)
  375. {
  376. layout->m_dsetLayouts[iset] = BindlessDescriptorSet::getSingleton().m_layout;
  377. }
  378. else
  379. {
  380. DescriptorSetLayout* dlayout;
  381. ANKI_CHECK(getOrCreateDescriptorSetLayout({refl.m_bindings[iset].getBegin(), refl.m_bindingCounts[iset]}, dlayout));
  382. layout->m_dsetLayouts[iset] = dlayout->m_handle;
  383. }
  384. }
  385. VkPipelineLayoutCreateInfo ci = {};
  386. ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  387. ci.pSetLayouts = &layout->m_dsetLayouts[0];
  388. ci.setLayoutCount = layout->m_dsetCount;
  389. VkPushConstantRange pushConstantRange;
  390. if(refl.m_fastConstantsSize > 0)
  391. {
  392. pushConstantRange.offset = 0;
  393. pushConstantRange.size = refl.m_fastConstantsSize;
  394. pushConstantRange.stageFlags = VK_SHADER_STAGE_ALL;
  395. ci.pushConstantRangeCount = 1;
  396. ci.pPushConstantRanges = &pushConstantRange;
  397. }
  398. ANKI_VK_CHECK(vkCreatePipelineLayout(getVkDevice(), &ci, nullptr, &layout->m_handle));
  399. }
  400. return Error::kNone;
  401. }
  402. void DescriptorState::flush(VkCommandBuffer cmdb, DescriptorAllocator& dalloc)
  403. {
  404. ANKI_ASSERT(m_pipelineLayout);
  405. const ShaderReflectionDescriptorRelated& refl = m_pipelineLayout->getReflection();
  406. // Small opt to bind the high frequency sets as little as possible
  407. BitSet<kMaxRegisterSpaces> dirtySets(false);
  408. for(U32 iset = 0; iset < m_pipelineLayout->m_dsetCount; ++iset)
  409. {
  410. DescriptorSet& set = m_sets[iset];
  411. if(iset == refl.m_vkBindlessDescriptorSet)
  412. {
  413. if(m_vkDsets[iset] != BindlessDescriptorSet::getSingleton().m_dset)
  414. {
  415. dirtySets.set(iset);
  416. m_vkDsets[iset] = BindlessDescriptorSet::getSingleton().m_dset;
  417. }
  418. }
  419. else if(m_sets[iset].m_dirty)
  420. {
  421. // Need to allocate and populate a new DS
  422. dirtySets.set(iset);
  423. set.m_dirty = false;
  424. if(set.m_writeInfos.getSize() < m_pipelineLayout->m_descriptorCounts[iset])
  425. {
  426. set.m_writeInfos.resize(m_pipelineLayout->m_descriptorCounts[iset]);
  427. }
  428. dalloc.allocate(m_pipelineLayout->m_dsetLayouts[iset], m_vkDsets[iset]);
  429. // Write the DS
  430. VkWriteDescriptorSet writeTemplate = {};
  431. writeTemplate.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  432. writeTemplate.pNext = nullptr;
  433. writeTemplate.dstSet = m_vkDsets[iset];
  434. writeTemplate.descriptorCount = 1;
  435. U32 writeInfoCount = 0;
  436. for(U32 ibinding = 0; ibinding < refl.m_bindingCounts[iset]; ++ibinding)
  437. {
  438. const ShaderReflectionBinding& binding = refl.m_bindings[iset][ibinding];
  439. for(U32 arrayIdx = 0; arrayIdx < binding.m_arraySize; ++arrayIdx)
  440. {
  441. VkWriteDescriptorSet& writeInfo = set.m_writeInfos[writeInfoCount++];
  442. const HlslResourceType hlslType = descriptorTypeToHlslResourceType(binding.m_type);
  443. ANKI_ASSERT(binding.m_registerBindingPoint + arrayIdx < set.m_descriptors[hlslType].getSize() && "Forgot to bind something");
  444. const Descriptor& desc = set.m_descriptors[hlslType][binding.m_registerBindingPoint + arrayIdx];
  445. ANKI_ASSERT(desc.m_type == binding.m_type && "Have bound the wrong type");
  446. writeInfo = writeTemplate;
  447. writeInfo.descriptorType = convertDescriptorType(binding.m_type);
  448. writeInfo.dstArrayElement = arrayIdx;
  449. writeInfo.dstBinding = binding.m_vkBinding;
  450. switch(writeInfo.descriptorType)
  451. {
  452. case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
  453. case VK_DESCRIPTOR_TYPE_SAMPLER:
  454. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  455. {
  456. writeInfo.pImageInfo = &desc.m_image;
  457. break;
  458. }
  459. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  460. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
  461. {
  462. writeInfo.pBufferInfo = &desc.m_buffer;
  463. break;
  464. }
  465. case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
  466. case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
  467. {
  468. writeInfo.pTexelBufferView = &desc.m_bufferView;
  469. break;
  470. }
  471. case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
  472. {
  473. writeInfo.pNext = &desc.m_as;
  474. break;
  475. }
  476. default:
  477. ANKI_ASSERT(0);
  478. }
  479. }
  480. }
  481. if(writeInfoCount > 0)
  482. {
  483. vkUpdateDescriptorSets(getVkDevice(), writeInfoCount, set.m_writeInfos.getBegin(), 0, nullptr);
  484. g_svarDescriptorSetsWritten.increment(1);
  485. }
  486. }
  487. else
  488. {
  489. // Do nothing
  490. }
  491. ANKI_ASSERT(m_vkDsets[iset] != VK_NULL_HANDLE);
  492. }
  493. // Bind the descriptor sets
  494. if(dirtySets.getAnySet())
  495. {
  496. const U32 minSetThatNeedsRebind = dirtySets.getLeastSignificantBit();
  497. const U32 dsetCount = m_pipelineLayout->m_dsetCount - minSetThatNeedsRebind;
  498. ANKI_ASSERT(dsetCount <= m_pipelineLayout->m_dsetCount);
  499. vkCmdBindDescriptorSets(cmdb, m_pipelineBindPoint, m_pipelineLayout->m_handle, minSetThatNeedsRebind, dsetCount,
  500. &m_vkDsets[minSetThatNeedsRebind], 0, nullptr);
  501. }
  502. // Set push consts
  503. if(refl.m_fastConstantsSize)
  504. {
  505. ANKI_ASSERT(refl.m_fastConstantsSize == m_pushConstSize && "Possibly forgot to set push constants");
  506. if(m_pushConstantsDirty)
  507. {
  508. vkCmdPushConstants(cmdb, m_pipelineLayout->m_handle, VK_SHADER_STAGE_ALL, 0, m_pushConstSize, m_pushConsts.getBegin());
  509. m_pushConstantsDirty = false;
  510. }
  511. }
  512. }
  513. } // end namespace anki