BsVulkanGpuParams.cpp 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanGpuParams.h"
  4. #include "BsVulkanUtility.h"
  5. #include "BsVulkanRenderAPI.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanGpuParamBlockBuffer.h"
  8. #include "BsVulkanGpuBuffer.h"
  9. #include "BsVulkanTexture.h"
  10. #include "BsVulkanHardwareBuffer.h"
  11. #include "BsVulkanDescriptorSet.h"
  12. #include "BsVulkanDescriptorLayout.h"
  13. #include "BsVulkanSamplerState.h"
  14. #include "BsVulkanGpuPipelineParamInfo.h"
  15. #include "BsVulkanCommandBuffer.h"
  16. #include "BsVulkanTextureManager.h"
  17. #include "BsVulkanHardwareBufferManager.h"
  18. #include "BsGpuParamDesc.h"
  19. namespace bs
  20. {
  21. VulkanGpuParams::VulkanGpuParams(const SPtr<GpuPipelineParamInfoCore>& paramInfo, GpuDeviceFlags deviceMask)
  22. : GpuParamsCore(paramInfo, deviceMask), mPerDeviceData(), mDeviceMask(deviceMask), mSetsDirty(nullptr)
  23. {
  24. }
  25. VulkanGpuParams::~VulkanGpuParams()
  26. {
  27. Lock lock(mMutex);
  28. UINT32 numSets = mParamInfo->getNumSets();
  29. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  30. {
  31. if (mPerDeviceData[i].perSetData == nullptr)
  32. continue;
  33. for (UINT32 j = 0; j < numSets; j++)
  34. {
  35. for (auto& entry : mPerDeviceData[i].perSetData[j].sets)
  36. entry->destroy();
  37. mPerDeviceData[i].perSetData[j].sets.~Vector<VulkanDescriptorSet*>();
  38. }
  39. }
  40. }
  41. void VulkanGpuParams::initialize()
  42. {
  43. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  44. VulkanRenderAPI& rapi = static_cast<VulkanRenderAPI&>(RenderAPICore::instance());
  45. VulkanDevice* devices[BS_MAX_DEVICES];
  46. VulkanUtility::getDevices(rapi, mDeviceMask, devices);
  47. UINT32 numDevices = 0;
  48. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  49. {
  50. if (devices[i] != nullptr)
  51. numDevices++;
  52. }
  53. UINT32 numParamBlocks = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::ParamBlock);
  54. UINT32 numTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Texture);
  55. UINT32 numStorageTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::LoadStoreTexture);
  56. UINT32 numBuffers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Buffer);
  57. UINT32 numSamplers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::SamplerState);
  58. UINT32 numSets = vkParamInfo.getNumSets();
  59. UINT32 numBindings = vkParamInfo.getNumElements();
  60. // Note: I'm assuming a single WriteInfo per binding, but if arrays sizes larger than 1 are eventually supported
  61. // I'll need to adjust the code.
  62. mAlloc.reserve<bool>(numSets)
  63. .reserve<PerSetData>(numSets * numDevices)
  64. .reserve<VkWriteDescriptorSet>(numBindings * numDevices)
  65. .reserve<WriteInfo>(numBindings * numDevices)
  66. .reserve<VkImage>(numTextures * numDevices)
  67. .reserve<VkImage>(numStorageTextures * numDevices)
  68. .reserve<VkBuffer>(numParamBlocks * numDevices)
  69. .reserve<VkBuffer>(numBuffers * numDevices)
  70. .reserve<VkSampler>(numSamplers * numDevices)
  71. .init();
  72. Lock lock(mMutex); // Set write operations need to be thread safe
  73. mSetsDirty = mAlloc.alloc<bool>(numSets);
  74. bs_zero_out(mSetsDirty, numSets);
  75. VulkanSamplerStateCore* defaultSampler = static_cast<VulkanSamplerStateCore*>(SamplerStateCore::getDefault().get());
  76. VulkanTextureCoreManager& vkTexManager = static_cast<VulkanTextureCoreManager&>(TextureCoreManager::instance());
  77. VulkanHardwareBufferCoreManager& vkBufManager = static_cast<VulkanHardwareBufferCoreManager&>(
  78. HardwareBufferCoreManager::instance());
  79. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  80. {
  81. if (devices[i] == nullptr)
  82. {
  83. mPerDeviceData[i].perSetData = nullptr;
  84. continue;
  85. }
  86. mPerDeviceData[i].perSetData = mAlloc.alloc<PerSetData>(numSets);
  87. mPerDeviceData[i].sampledImages = mAlloc.alloc<VkImage>(numTextures);
  88. mPerDeviceData[i].storageImages = mAlloc.alloc<VkImage>(numStorageTextures);
  89. mPerDeviceData[i].uniformBuffers = mAlloc.alloc<VkBuffer>(numParamBlocks);
  90. mPerDeviceData[i].buffers = mAlloc.alloc<VkBuffer>(numBuffers);
  91. mPerDeviceData[i].samplers = mAlloc.alloc<VkSampler>(numSamplers);
  92. bs_zero_out(mPerDeviceData[i].sampledImages, numTextures);
  93. bs_zero_out(mPerDeviceData[i].storageImages, numStorageTextures);
  94. bs_zero_out(mPerDeviceData[i].uniformBuffers, numParamBlocks);
  95. bs_zero_out(mPerDeviceData[i].buffers, numBuffers);
  96. bs_zero_out(mPerDeviceData[i].samplers, numSamplers);
  97. VulkanDescriptorManager& descManager = devices[i]->getDescriptorManager();
  98. VulkanSampler* vkDefaultSampler = defaultSampler->getResource(i);
  99. for (UINT32 j = 0; j < numSets; j++)
  100. {
  101. UINT32 numBindingsPerSet = vkParamInfo.getNumBindings(j);
  102. PerSetData& perSetData = mPerDeviceData[i].perSetData[j];
  103. new (&perSetData.sets) Vector<VulkanDescriptorSet*>();
  104. perSetData.writeSetInfos = mAlloc.alloc<VkWriteDescriptorSet>(numBindingsPerSet);
  105. perSetData.writeInfos = mAlloc.alloc<WriteInfo>(numBindingsPerSet);
  106. VulkanDescriptorLayout* layout = vkParamInfo.getLayout(i, j);
  107. perSetData.numElements = numBindingsPerSet;
  108. perSetData.latestSet = descManager.createSet(layout);
  109. perSetData.sets.push_back(perSetData.latestSet);
  110. VkDescriptorSetLayoutBinding* perSetBindings = vkParamInfo.getBindings(j);
  111. for (UINT32 k = 0; k < numBindingsPerSet; k++)
  112. {
  113. // Note: Instead of using one structure per binding, it's possible to update multiple at once
  114. // by specifying larger descriptorCount, if they all share type and shader stages.
  115. VkWriteDescriptorSet& writeSetInfo = perSetData.writeSetInfos[k];
  116. writeSetInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  117. writeSetInfo.pNext = nullptr;
  118. writeSetInfo.dstSet = VK_NULL_HANDLE;
  119. writeSetInfo.dstBinding = perSetBindings[k].binding;
  120. writeSetInfo.dstArrayElement = 0;
  121. writeSetInfo.descriptorCount = perSetBindings[k].descriptorCount;
  122. writeSetInfo.descriptorType = perSetBindings[k].descriptorType;
  123. bool isImage = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
  124. writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
  125. writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  126. if (isImage)
  127. {
  128. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  129. VkDescriptorImageInfo& imageInfo = perSetData.writeInfos[k].image;
  130. imageInfo.sampler = vkDefaultSampler->getHandle();
  131. if(isLoadStore)
  132. {
  133. imageInfo.imageView = vkTexManager.getDummyStorageImageView(i);
  134. imageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  135. }
  136. else
  137. {
  138. imageInfo.imageView = vkTexManager.getDummyReadImageView(i);
  139. imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  140. }
  141. writeSetInfo.pImageInfo = &imageInfo;
  142. writeSetInfo.pBufferInfo = nullptr;
  143. writeSetInfo.pTexelBufferView = nullptr;
  144. }
  145. else
  146. {
  147. bool isUniform = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  148. if (isUniform)
  149. {
  150. VkDescriptorBufferInfo& bufferInfo = perSetData.writeInfos[k].buffer;
  151. bufferInfo.buffer = vkBufManager.getDummyUniformBuffer(i);
  152. bufferInfo.offset = 0;
  153. bufferInfo.range = VK_WHOLE_SIZE;
  154. writeSetInfo.pBufferInfo = &bufferInfo;
  155. writeSetInfo.pTexelBufferView = nullptr;
  156. }
  157. else
  158. {
  159. writeSetInfo.pBufferInfo = nullptr;
  160. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  161. VkBufferView bufferView;
  162. if(isLoadStore)
  163. bufferView = vkBufManager.getDummyStorageBufferView(i);
  164. else
  165. bufferView = vkBufManager.getDummyReadBufferView(i);
  166. writeSetInfo.pTexelBufferView = &bufferView;
  167. }
  168. writeSetInfo.pImageInfo = nullptr;
  169. }
  170. }
  171. }
  172. }
  173. GpuParamsCore::initialize();
  174. }
  175. void VulkanGpuParams::setParamBlockBuffer(UINT32 set, UINT32 slot, const SPtr<GpuParamBlockBufferCore>& paramBlockBuffer)
  176. {
  177. GpuParamsCore::setParamBlockBuffer(set, slot, paramBlockBuffer);
  178. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  179. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  180. if(bindingIdx == -1)
  181. {
  182. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  183. toString(slot) + ".");
  184. return;
  185. }
  186. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::ParamBlock, set, slot);
  187. Lock(mMutex);
  188. VulkanGpuParamBlockBufferCore* vulkanParamBlockBuffer =
  189. static_cast<VulkanGpuParamBlockBufferCore*>(paramBlockBuffer.get());
  190. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  191. {
  192. if (mPerDeviceData[i].perSetData == nullptr)
  193. continue;
  194. VulkanBuffer* bufferRes;
  195. if (vulkanParamBlockBuffer != nullptr)
  196. bufferRes = vulkanParamBlockBuffer->getResource(i);
  197. else
  198. bufferRes = nullptr;
  199. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  200. if (bufferRes != nullptr)
  201. {
  202. VkBuffer buffer = bufferRes->getHandle();
  203. perSetData.writeInfos[bindingIdx].buffer.buffer = buffer;
  204. mPerDeviceData[i].uniformBuffers[sequentialIdx] = buffer;
  205. }
  206. else
  207. {
  208. VulkanHardwareBufferCoreManager& vkBufManager = static_cast<VulkanHardwareBufferCoreManager&>(
  209. HardwareBufferCoreManager::instance());
  210. perSetData.writeInfos[bindingIdx].buffer.buffer = vkBufManager.getDummyUniformBuffer(i);
  211. mPerDeviceData[i].uniformBuffers[sequentialIdx] = VK_NULL_HANDLE;
  212. }
  213. }
  214. mSetsDirty[set] = true;
  215. }
  216. void VulkanGpuParams::setTexture(UINT32 set, UINT32 slot, const SPtr<TextureCore>& texture)
  217. {
  218. GpuParamsCore::setTexture(set, slot, texture);
  219. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  220. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  221. if (bindingIdx == -1)
  222. {
  223. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  224. toString(slot) + ".");
  225. return;
  226. }
  227. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::Texture, set, slot);
  228. Lock(mMutex);
  229. VulkanTextureCore* vulkanTexture = static_cast<VulkanTextureCore*>(texture.get());
  230. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  231. {
  232. if (mPerDeviceData[i].perSetData == nullptr)
  233. continue;
  234. VulkanImage* imageRes;
  235. if (vulkanTexture != nullptr)
  236. imageRes = vulkanTexture->getResource(i);
  237. else
  238. imageRes = nullptr;
  239. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  240. if (imageRes != nullptr)
  241. {
  242. perSetData.writeInfos[bindingIdx].image.imageView = imageRes->getView(false);
  243. mPerDeviceData[i].sampledImages[sequentialIdx] = imageRes->getHandle();
  244. }
  245. else
  246. {
  247. VulkanTextureCoreManager& vkTexManager = static_cast<VulkanTextureCoreManager&>(
  248. TextureCoreManager::instance());
  249. perSetData.writeInfos[bindingIdx].image.imageView = vkTexManager.getDummyReadImageView(i);
  250. mPerDeviceData[i].sampledImages[sequentialIdx] = VK_NULL_HANDLE;
  251. }
  252. }
  253. mSetsDirty[set] = true;
  254. }
  255. void VulkanGpuParams::setLoadStoreTexture(UINT32 set, UINT32 slot, const SPtr<TextureCore>& texture,
  256. const TextureSurface& surface)
  257. {
  258. GpuParamsCore::setLoadStoreTexture(set, slot, texture, surface);
  259. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  260. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  261. if (bindingIdx == -1)
  262. {
  263. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  264. toString(slot) + ".");
  265. return;
  266. }
  267. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::LoadStoreTexture, set, slot);
  268. Lock(mMutex);
  269. VulkanTextureCore* vulkanTexture = static_cast<VulkanTextureCore*>(texture.get());
  270. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  271. {
  272. if (mPerDeviceData[i].perSetData == nullptr)
  273. continue;
  274. VulkanImage* imageRes;
  275. if (vulkanTexture != nullptr)
  276. imageRes = vulkanTexture->getResource(i);
  277. else
  278. imageRes = nullptr;
  279. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  280. if (imageRes != nullptr)
  281. {
  282. perSetData.writeInfos[bindingIdx].image.imageView = imageRes->getView(surface, false);
  283. mPerDeviceData[i].storageImages[sequentialIdx] = imageRes->getHandle();
  284. }
  285. else
  286. {
  287. VulkanTextureCoreManager& vkTexManager = static_cast<VulkanTextureCoreManager&>(
  288. TextureCoreManager::instance());
  289. perSetData.writeInfos[bindingIdx].image.imageView = vkTexManager.getDummyStorageImageView(i);
  290. mPerDeviceData[i].storageImages[sequentialIdx] = VK_NULL_HANDLE;
  291. }
  292. }
  293. mSetsDirty[set] = true;
  294. }
  295. void VulkanGpuParams::setBuffer(UINT32 set, UINT32 slot, const SPtr<GpuBufferCore>& buffer)
  296. {
  297. GpuParamsCore::setBuffer(set, slot, buffer);
  298. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  299. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  300. if (bindingIdx == -1)
  301. {
  302. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  303. toString(slot) + ".");
  304. return;
  305. }
  306. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::Buffer, set, slot);
  307. Lock(mMutex);
  308. VulkanGpuBufferCore* vulkanBuffer = static_cast<VulkanGpuBufferCore*>(buffer.get());
  309. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  310. {
  311. if (mPerDeviceData[i].perSetData == nullptr)
  312. continue;
  313. VulkanBuffer* bufferRes;
  314. if (vulkanBuffer != nullptr)
  315. bufferRes = vulkanBuffer->getResource(i);
  316. else
  317. bufferRes = nullptr;
  318. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  319. VkWriteDescriptorSet& writeSetInfo = perSetData.writeSetInfos[bindingIdx];
  320. VkBufferView bufferView;
  321. if (bufferRes != nullptr)
  322. {
  323. bufferView = bufferRes->getView();
  324. mPerDeviceData[i].buffers[sequentialIdx] = bufferRes->getHandle();
  325. }
  326. else
  327. {
  328. VulkanHardwareBufferCoreManager& vkBufManager = static_cast<VulkanHardwareBufferCoreManager&>(
  329. HardwareBufferCoreManager::instance());
  330. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  331. if(isLoadStore)
  332. bufferView = vkBufManager.getDummyStorageBufferView(i);
  333. else
  334. bufferView = vkBufManager.getDummyReadBufferView(i);
  335. mPerDeviceData[i].buffers[sequentialIdx] = 0;
  336. }
  337. writeSetInfo.pTexelBufferView = &bufferView;
  338. }
  339. mSetsDirty[set] = true;
  340. }
  341. void VulkanGpuParams::setSamplerState(UINT32 set, UINT32 slot, const SPtr<SamplerStateCore>& sampler)
  342. {
  343. GpuParamsCore::setSamplerState(set, slot, sampler);
  344. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  345. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  346. if (bindingIdx == -1)
  347. {
  348. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  349. toString(slot) + ".");
  350. return;
  351. }
  352. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::SamplerState, set, slot);
  353. Lock(mMutex);
  354. VulkanSamplerStateCore* vulkanSampler = static_cast<VulkanSamplerStateCore*>(sampler.get());
  355. for(UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  356. {
  357. if (mPerDeviceData[i].perSetData == nullptr)
  358. continue;
  359. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  360. VulkanSampler* samplerRes = vulkanSampler->getResource(i);
  361. if (samplerRes != nullptr)
  362. {
  363. VkSampler vkSampler = samplerRes->getHandle();
  364. perSetData.writeInfos[bindingIdx].image.sampler = vkSampler;
  365. mPerDeviceData[i].samplers[sequentialIdx] = vkSampler;
  366. }
  367. else
  368. {
  369. VulkanSamplerStateCore* defaultSampler =
  370. static_cast<VulkanSamplerStateCore*>(SamplerStateCore::getDefault().get());
  371. VkSampler vkSampler = defaultSampler->getResource(i)->getHandle();;
  372. perSetData.writeInfos[bindingIdx].image.sampler = vkSampler;
  373. mPerDeviceData[i].samplers[sequentialIdx] = 0;
  374. }
  375. }
  376. mSetsDirty[set] = true;
  377. }
  378. void VulkanGpuParams::setLoadStoreSurface(UINT32 set, UINT32 slot, const TextureSurface& surface)
  379. {
  380. GpuParamsCore::setLoadStoreSurface(set, slot, surface);
  381. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  382. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  383. if (bindingIdx == -1)
  384. {
  385. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  386. toString(slot) + ".");
  387. return;
  388. }
  389. SPtr<TextureCore> texture = getLoadStoreTexture(set, slot);
  390. if (texture == nullptr)
  391. return;
  392. Lock(mMutex);
  393. VulkanTextureCore* vulkanTexture = static_cast<VulkanTextureCore*>(texture.get());
  394. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  395. {
  396. if (mPerDeviceData[i].perSetData == nullptr)
  397. continue;
  398. VulkanImage* imageRes = vulkanTexture->getResource(i);
  399. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  400. if (imageRes != nullptr)
  401. perSetData.writeInfos[bindingIdx].image.imageView = imageRes->getView(surface, false);
  402. }
  403. mSetsDirty[set] = true;
  404. }
  405. UINT32 VulkanGpuParams::getNumSets() const
  406. {
  407. return mParamInfo->getNumSets();
  408. }
  409. void VulkanGpuParams::prepareForBind(VulkanCmdBuffer& buffer, VkDescriptorSet* sets)
  410. {
  411. UINT32 deviceIdx = buffer.getDeviceIdx();
  412. PerDeviceData& perDeviceData = mPerDeviceData[deviceIdx];
  413. if (perDeviceData.perSetData == nullptr)
  414. return;
  415. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  416. UINT32 numParamBlocks = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::ParamBlock);
  417. UINT32 numTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Texture);
  418. UINT32 numStorageTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::LoadStoreTexture);
  419. UINT32 numBuffers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Buffer);
  420. UINT32 numSamplers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::SamplerState);
  421. UINT32 numSets = vkParamInfo.getNumSets();
  422. Lock(mMutex);
  423. // Registers resources with the command buffer, and check if internal resource handled changed (in which case set
  424. // needs updating - this can happen due to resource writes, as internally system might find it more performant
  425. // to discard used resources and create new ones).
  426. // Note: Makes the assumption that this object (and all of the resources it holds) are externally locked, and will
  427. // not be modified on another thread while being bound.
  428. for (UINT32 i = 0; i < numParamBlocks; i++)
  429. {
  430. if (mParamBlockBuffers[i] == nullptr)
  431. continue;
  432. VulkanGpuParamBlockBufferCore* element = static_cast<VulkanGpuParamBlockBufferCore*>(mParamBlockBuffers[i].get());
  433. VulkanBuffer* resource = element->getResource(deviceIdx);
  434. if (resource == nullptr)
  435. continue;
  436. // Register with command buffer
  437. buffer.registerResource(resource, VK_ACCESS_UNIFORM_READ_BIT, VulkanUseFlag::Read);
  438. // Check if internal resource changed from what was previously bound in the descriptor set
  439. assert(perDeviceData.uniformBuffers[i] != VK_NULL_HANDLE);
  440. VkBuffer vkBuffer = resource->getHandle();
  441. if(perDeviceData.uniformBuffers[i] != vkBuffer)
  442. {
  443. perDeviceData.uniformBuffers[i] = vkBuffer;
  444. UINT32 set, slot;
  445. mParamInfo->getSetSlot(GpuPipelineParamInfo::ParamType::ParamBlock, i, set, slot);
  446. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  447. perDeviceData.perSetData[set].writeInfos[bindingIdx].buffer.buffer = vkBuffer;
  448. mSetsDirty[set] = true;
  449. }
  450. }
  451. for (UINT32 i = 0; i < numBuffers; i++)
  452. {
  453. if (mBuffers[i] == nullptr)
  454. continue;
  455. VulkanGpuBufferCore* element = static_cast<VulkanGpuBufferCore*>(mBuffers[i].get());
  456. VulkanBuffer* resource = element->getResource(deviceIdx);
  457. if (resource == nullptr)
  458. continue;
  459. // Register with command buffer
  460. VkAccessFlags accessFlags = VK_ACCESS_SHADER_READ_BIT;
  461. VulkanUseFlags useFlags = VulkanUseFlag::Read;
  462. if (element->getProperties().getRandomGpuWrite())
  463. {
  464. accessFlags |= VK_ACCESS_SHADER_WRITE_BIT;
  465. useFlags |= VulkanUseFlag::Write;
  466. }
  467. buffer.registerResource(resource, accessFlags, useFlags);
  468. // Check if internal resource changed from what was previously bound in the descriptor set
  469. assert(perDeviceData.buffers[i] != VK_NULL_HANDLE);
  470. VkBuffer vkBuffer = resource->getHandle();
  471. if (perDeviceData.buffers[i] != vkBuffer)
  472. {
  473. perDeviceData.buffers[i] = vkBuffer;
  474. UINT32 set, slot;
  475. mParamInfo->getSetSlot(GpuPipelineParamInfo::ParamType::Buffer, i, set, slot);
  476. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  477. VkBufferView bufferView = resource->getView();
  478. perDeviceData.perSetData[set].writeSetInfos[bindingIdx].pTexelBufferView = &bufferView;
  479. mSetsDirty[set] = true;
  480. }
  481. }
  482. for (UINT32 i = 0; i < numSamplers; i++)
  483. {
  484. if (mSamplerStates[i] == nullptr)
  485. continue;
  486. VulkanSamplerStateCore* element = static_cast<VulkanSamplerStateCore*>(mSamplerStates[i].get());
  487. VulkanSampler* resource = element->getResource(deviceIdx);
  488. if (resource == nullptr)
  489. continue;
  490. // Register with command buffer
  491. buffer.registerResource(resource, VulkanUseFlag::Read);
  492. // Check if internal resource changed from what was previously bound in the descriptor set
  493. assert(perDeviceData.samplers[i] != VK_NULL_HANDLE);
  494. VkSampler vkSampler = resource->getHandle();
  495. if (perDeviceData.samplers[i] != vkSampler)
  496. {
  497. perDeviceData.samplers[i] = vkSampler;
  498. UINT32 set, slot;
  499. mParamInfo->getSetSlot(GpuPipelineParamInfo::ParamType::SamplerState, i, set, slot);
  500. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  501. perDeviceData.perSetData[set].writeInfos[bindingIdx].image.sampler = vkSampler;
  502. mSetsDirty[set] = true;
  503. }
  504. }
  505. for (UINT32 i = 0; i < numStorageTextures; i++)
  506. {
  507. if (mLoadStoreTextures[i] == nullptr)
  508. continue;
  509. VulkanTextureCore* element = static_cast<VulkanTextureCore*>(mLoadStoreTextures[i].get());
  510. VulkanImage* resource = element->getResource(deviceIdx);
  511. if (resource == nullptr)
  512. continue;
  513. // Register with command buffer
  514. VkAccessFlags accessFlags = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  515. VulkanUseFlags useFlags = VulkanUseFlag::Read | VulkanUseFlag::Write;
  516. buffer.registerResource(resource, accessFlags, resource->getLayout(), VK_IMAGE_LAYOUT_GENERAL, useFlags);
  517. // Check if internal resource changed from what was previously bound in the descriptor set
  518. assert(perDeviceData.storageImages[i] != VK_NULL_HANDLE);
  519. VkImage vkImage = resource->getHandle();
  520. if (perDeviceData.storageImages[i] != vkImage)
  521. {
  522. perDeviceData.storageImages[i] = vkImage;
  523. UINT32 set, slot;
  524. mParamInfo->getSetSlot(GpuPipelineParamInfo::ParamType::LoadStoreTexture, i, set, slot);
  525. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  526. const TextureSurface& surface = mLoadStoreSurfaces[i];
  527. perDeviceData.perSetData[set].writeInfos[bindingIdx].image.imageView = resource->getView(surface, false);;
  528. mSetsDirty[set] = true;
  529. }
  530. }
  531. for (UINT32 i = 0; i < numTextures; i++)
  532. {
  533. if (mTextures[i] == nullptr)
  534. continue;
  535. VulkanTextureCore* element = static_cast<VulkanTextureCore*>(mTextures[i].get());
  536. VulkanImage* resource = element->getResource(deviceIdx);
  537. if (resource == nullptr)
  538. continue;
  539. // Register with command buffer
  540. const TextureProperties& props = element->getProperties();
  541. VkImageLayout layout;
  542. // Keep dynamic textures in general layout, so they can be easily mapped by CPU
  543. if (props.getUsage() & TU_DYNAMIC)
  544. layout = VK_IMAGE_LAYOUT_GENERAL;
  545. else
  546. layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  547. buffer.registerResource(resource, VK_ACCESS_SHADER_READ_BIT, resource->getLayout(), layout, VulkanUseFlag::Read);
  548. // Check if internal resource changed from what was previously bound in the descriptor set
  549. assert(perDeviceData.sampledImages[i] != VK_NULL_HANDLE);
  550. VkImage vkImage = resource->getHandle();
  551. if (perDeviceData.sampledImages[i] != vkImage)
  552. {
  553. perDeviceData.sampledImages[i] = vkImage;
  554. UINT32 set, slot;
  555. mParamInfo->getSetSlot(GpuPipelineParamInfo::ParamType::Texture, i, set, slot);
  556. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  557. perDeviceData.perSetData[set].writeInfos[bindingIdx].image.imageView = resource->getView(false);;
  558. mSetsDirty[set] = true;
  559. }
  560. }
  561. // Acquire sets as needed, and updated their contents if dirty
  562. VulkanRenderAPI& rapi = static_cast<VulkanRenderAPI&>(RenderAPICore::instance());
  563. VulkanDevice& device = *rapi._getDevice(deviceIdx);
  564. VulkanDescriptorManager& descManager = device.getDescriptorManager();
  565. for (UINT32 i = 0; i < numSets; i++)
  566. {
  567. PerSetData& perSetData = perDeviceData.perSetData[i];
  568. if (!mSetsDirty[i]) // Set not dirty, just use the last one we wrote (this is fine even across multiple command buffers)
  569. continue;
  570. // Set is dirty, we need to update
  571. //// Use latest unless already used, otherwise try to find an unused one
  572. if(perSetData.latestSet->isBound()) // Checking this is okay, because it's only modified below when we call registerResource, which is under the same lock as this
  573. {
  574. perSetData.latestSet = nullptr;
  575. for(auto& entry : perSetData.sets)
  576. {
  577. if(!entry->isBound())
  578. {
  579. perSetData.latestSet = entry;
  580. break;
  581. }
  582. }
  583. // Cannot find an empty set, allocate a new one
  584. VulkanDescriptorLayout* layout = vkParamInfo.getLayout(deviceIdx, i);
  585. perSetData.latestSet = descManager.createSet(layout);
  586. perSetData.sets.push_back(perSetData.latestSet);
  587. }
  588. // Note: Currently I write to the entire set at once, but it might be beneficial to remember only the exact
  589. // entries that were updated, and only write to them individually.
  590. perSetData.latestSet->write(perSetData.writeSetInfos, perSetData.numElements);
  591. mSetsDirty[i] = false;
  592. }
  593. for (UINT32 i = 0; i < numSets; i++)
  594. {
  595. VulkanDescriptorSet* set = perDeviceData.perSetData[i].latestSet;
  596. buffer.registerResource(set, VulkanUseFlag::Read);
  597. sets[i] = set->getHandle();
  598. }
  599. }
  600. }