BsVulkanGpuParams.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanGpuParams.h"
  4. #include "BsVulkanUtility.h"
  5. #include "BsVulkanRenderAPI.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanGpuParamBlockBuffer.h"
  8. #include "BsVulkanGpuBuffer.h"
  9. #include "BsVulkanTexture.h"
  10. #include "BsVulkanHardwareBuffer.h"
  11. #include "BsVulkanDescriptorSet.h"
  12. #include "BsVulkanDescriptorLayout.h"
  13. #include "BsVulkanSamplerState.h"
  14. #include "BsVulkanGpuPipelineParamInfo.h"
  15. #include "BsVulkanCommandBuffer.h"
  16. #include "Managers/BsVulkanTextureManager.h"
  17. #include "Managers/BsVulkanHardwareBufferManager.h"
  18. #include "RenderAPI/BsGpuParamDesc.h"
  19. namespace bs { namespace ct
  20. {
  21. VulkanGpuParams::VulkanGpuParams(const SPtr<GpuPipelineParamInfo>& paramInfo, GpuDeviceFlags deviceMask)
  22. : GpuParams(paramInfo, deviceMask), mPerDeviceData(), mDeviceMask(deviceMask), mSetsDirty(nullptr)
  23. {
  24. }
  25. VulkanGpuParams::~VulkanGpuParams()
  26. {
  27. Lock Lock lock(mMutex);
  28. UINT32 numSets = mParamInfo->getNumSets();
  29. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  30. {
  31. if (mPerDeviceData[i].perSetData == nullptr)
  32. continue;
  33. for (UINT32 j = 0; j < numSets; j++)
  34. {
  35. for (auto& entry : mPerDeviceData[i].perSetData[j].sets)
  36. entry->destroy();
  37. mPerDeviceData[i].perSetData[j].sets.~Vector<VulkanDescriptorSet*>();
  38. }
  39. }
  40. }
  41. void VulkanGpuParams::initialize()
  42. {
  43. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  44. VulkanRenderAPI& rapi = static_cast<VulkanRenderAPI&>(RenderAPI::instance());
  45. VulkanDevice* devices[BS_MAX_DEVICES];
  46. VulkanUtility::getDevices(rapi, mDeviceMask, devices);
  47. UINT32 numDevices = 0;
  48. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  49. {
  50. if (devices[i] != nullptr)
  51. numDevices++;
  52. }
  53. UINT32 numParamBlocks = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::ParamBlock);
  54. UINT32 numTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Texture);
  55. UINT32 numStorageTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::LoadStoreTexture);
  56. UINT32 numBuffers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Buffer);
  57. UINT32 numSamplers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::SamplerState);
  58. UINT32 numSets = vkParamInfo.getNumSets();
  59. UINT32 numBindings = vkParamInfo.getNumElements();
  60. if (numSets == 0)
  61. return;
  62. // Note: I'm assuming a single WriteInfo per binding, but if arrays sizes larger than 1 are eventually supported
  63. // I'll need to adjust the code.
  64. mAlloc.reserve<bool>(numSets)
  65. .reserve<PerSetData>(numSets * numDevices)
  66. .reserve<VkWriteDescriptorSet>(numBindings * numDevices)
  67. .reserve<WriteInfo>(numBindings * numDevices)
  68. .reserve<VkImage>(numTextures * numDevices)
  69. .reserve<VkImage>(numStorageTextures * numDevices)
  70. .reserve<VkBuffer>(numParamBlocks * numDevices)
  71. .reserve<VkBuffer>(numBuffers * numDevices)
  72. .reserve<VkSampler>(numSamplers * numDevices)
  73. .init();
  74. Lock Lock lock(mMutex); // Set write operations need to be thread safe
  75. mSetsDirty = mAlloc.alloc<bool>(numSets);
  76. bs_zero_out(mSetsDirty, numSets);
  77. VulkanSamplerState* defaultSampler = static_cast<VulkanSamplerState*>(SamplerState::getDefault().get());
  78. VulkanTextureManager& vkTexManager = static_cast<VulkanTextureManager&>(TextureManager::instance());
  79. VulkanHardwareBufferManager& vkBufManager = static_cast<VulkanHardwareBufferManager&>(
  80. HardwareBufferManager::instance());
  81. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  82. {
  83. if (devices[i] == nullptr)
  84. {
  85. mPerDeviceData[i].perSetData = nullptr;
  86. continue;
  87. }
  88. mPerDeviceData[i].perSetData = mAlloc.alloc<PerSetData>(numSets);
  89. mPerDeviceData[i].sampledImages = mAlloc.alloc<VkImage>(numTextures);
  90. mPerDeviceData[i].storageImages = mAlloc.alloc<VkImage>(numStorageTextures);
  91. mPerDeviceData[i].uniformBuffers = mAlloc.alloc<VkBuffer>(numParamBlocks);
  92. mPerDeviceData[i].buffers = mAlloc.alloc<VkBuffer>(numBuffers);
  93. mPerDeviceData[i].samplers = mAlloc.alloc<VkSampler>(numSamplers);
  94. bs_zero_out(mPerDeviceData[i].sampledImages, numTextures);
  95. bs_zero_out(mPerDeviceData[i].storageImages, numStorageTextures);
  96. bs_zero_out(mPerDeviceData[i].uniformBuffers, numParamBlocks);
  97. bs_zero_out(mPerDeviceData[i].buffers, numBuffers);
  98. bs_zero_out(mPerDeviceData[i].samplers, numSamplers);
  99. VulkanDescriptorManager& descManager = devices[i]->getDescriptorManager();
  100. VulkanSampler* vkDefaultSampler = defaultSampler->getResource(i);
  101. for (UINT32 j = 0; j < numSets; j++)
  102. {
  103. UINT32 numBindingsPerSet = vkParamInfo.getNumBindings(j);
  104. PerSetData& perSetData = mPerDeviceData[i].perSetData[j];
  105. new (&perSetData.sets) Vector<VulkanDescriptorSet*>();
  106. perSetData.writeSetInfos = mAlloc.alloc<VkWriteDescriptorSet>(numBindingsPerSet);
  107. perSetData.writeInfos = mAlloc.alloc<WriteInfo>(numBindingsPerSet);
  108. VulkanDescriptorLayout* layout = vkParamInfo.getLayout(i, j);
  109. perSetData.numElements = numBindingsPerSet;
  110. perSetData.latestSet = descManager.createSet(layout);
  111. perSetData.sets.push_back(perSetData.latestSet);
  112. VkDescriptorSetLayoutBinding* perSetBindings = vkParamInfo.getBindings(j);
  113. GpuParamObjectType* types = vkParamInfo.getLayoutTypes(j);
  114. for (UINT32 k = 0; k < numBindingsPerSet; k++)
  115. {
  116. // Note: Instead of using one structure per binding, it's possible to update multiple at once
  117. // by specifying larger descriptorCount, if they all share type and shader stages.
  118. VkWriteDescriptorSet& writeSetInfo = perSetData.writeSetInfos[k];
  119. writeSetInfo.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  120. writeSetInfo.pNext = nullptr;
  121. writeSetInfo.dstSet = VK_NULL_HANDLE;
  122. writeSetInfo.dstBinding = perSetBindings[k].binding;
  123. writeSetInfo.dstArrayElement = 0;
  124. writeSetInfo.descriptorCount = perSetBindings[k].descriptorCount;
  125. writeSetInfo.descriptorType = perSetBindings[k].descriptorType;
  126. bool isImage = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
  127. writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
  128. writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  129. if (isImage)
  130. {
  131. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  132. VkDescriptorImageInfo& imageInfo = perSetData.writeInfos[k].image;
  133. imageInfo.sampler = vkDefaultSampler->getHandle();
  134. if(isLoadStore)
  135. {
  136. imageInfo.imageView = vkTexManager.getDummyImageView(types[k], i);
  137. imageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  138. }
  139. else
  140. {
  141. imageInfo.imageView = vkTexManager.getDummyImageView(types[k], i);
  142. imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  143. }
  144. writeSetInfo.pImageInfo = &imageInfo;
  145. writeSetInfo.pBufferInfo = nullptr;
  146. writeSetInfo.pTexelBufferView = nullptr;
  147. }
  148. else
  149. {
  150. bool useView = writeSetInfo.descriptorType != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER &&
  151. writeSetInfo.descriptorType != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  152. if (!useView)
  153. {
  154. VkDescriptorBufferInfo& bufferInfo = perSetData.writeInfos[k].buffer;
  155. bufferInfo.offset = 0;
  156. bufferInfo.range = VK_WHOLE_SIZE;
  157. if(writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
  158. bufferInfo.buffer = vkBufManager.getDummyUniformBuffer(i);
  159. else
  160. bufferInfo.buffer = vkBufManager.getDummyStructuredBuffer(i);
  161. writeSetInfo.pBufferInfo = &bufferInfo;
  162. writeSetInfo.pTexelBufferView = nullptr;
  163. }
  164. else
  165. {
  166. writeSetInfo.pBufferInfo = nullptr;
  167. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  168. VkBufferView bufferView;
  169. if(isLoadStore)
  170. bufferView = vkBufManager.getDummyStorageBufferView(i);
  171. else
  172. bufferView = vkBufManager.getDummyReadBufferView(i);
  173. perSetData.writeInfos[k].bufferView = bufferView;
  174. writeSetInfo.pBufferInfo = nullptr;
  175. writeSetInfo.pTexelBufferView = &perSetData.writeInfos[k].bufferView;
  176. }
  177. writeSetInfo.pImageInfo = nullptr;
  178. }
  179. }
  180. }
  181. }
  182. GpuParams::initialize();
  183. }
  184. void VulkanGpuParams::setParamBlockBuffer(UINT32 set, UINT32 slot, const SPtr<GpuParamBlockBuffer>& paramBlockBuffer)
  185. {
  186. GpuParams::setParamBlockBuffer(set, slot, paramBlockBuffer);
  187. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  188. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  189. if(bindingIdx == (UINT32)-1)
  190. {
  191. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  192. toString(slot) + ".");
  193. return;
  194. }
  195. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::ParamBlock, set, slot);
  196. Lock lock(mMutex);
  197. VulkanGpuParamBlockBuffer* vulkanParamBlockBuffer =
  198. static_cast<VulkanGpuParamBlockBuffer*>(paramBlockBuffer.get());
  199. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  200. {
  201. if (mPerDeviceData[i].perSetData == nullptr)
  202. continue;
  203. VulkanBuffer* bufferRes;
  204. if (vulkanParamBlockBuffer != nullptr)
  205. bufferRes = vulkanParamBlockBuffer->getResource(i);
  206. else
  207. bufferRes = nullptr;
  208. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  209. if (bufferRes != nullptr)
  210. {
  211. VkBuffer buffer = bufferRes->getHandle();
  212. perSetData.writeInfos[bindingIdx].buffer.buffer = buffer;
  213. mPerDeviceData[i].uniformBuffers[sequentialIdx] = buffer;
  214. }
  215. else
  216. {
  217. VulkanHardwareBufferManager& vkBufManager = static_cast<VulkanHardwareBufferManager&>(
  218. HardwareBufferManager::instance());
  219. perSetData.writeInfos[bindingIdx].buffer.buffer = vkBufManager.getDummyUniformBuffer(i);
  220. mPerDeviceData[i].uniformBuffers[sequentialIdx] = VK_NULL_HANDLE;
  221. }
  222. }
  223. mSetsDirty[set] = true;
  224. }
  225. void VulkanGpuParams::setTexture(UINT32 set, UINT32 slot, const SPtr<Texture>& texture, const TextureSurface& surface)
  226. {
  227. GpuParams::setTexture(set, slot, texture, surface);
  228. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  229. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  230. if (bindingIdx == (UINT32)-1)
  231. {
  232. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  233. toString(slot) + ".");
  234. return;
  235. }
  236. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::Texture, set, slot);
  237. Lock lock(mMutex);
  238. VulkanTexture* vulkanTexture = static_cast<VulkanTexture*>(texture.get());
  239. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  240. {
  241. if (mPerDeviceData[i].perSetData == nullptr)
  242. continue;
  243. VulkanImage* imageRes;
  244. if (vulkanTexture != nullptr)
  245. imageRes = vulkanTexture->getResource(i);
  246. else
  247. imageRes = nullptr;
  248. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  249. if (imageRes != nullptr)
  250. {
  251. auto& texProps = texture->getProperties();
  252. TextureSurface actualSurface = surface;
  253. if (surface.numMipLevels == 0)
  254. actualSurface.numMipLevels = texProps.getNumMipmaps() + 1;
  255. if(surface.numFaces == 0)
  256. actualSurface.numFaces = texProps.getNumFaces();
  257. perSetData.writeInfos[bindingIdx].image.imageView = imageRes->getView(actualSurface, false);
  258. mPerDeviceData[i].sampledImages[sequentialIdx] = imageRes->getHandle();
  259. }
  260. else
  261. {
  262. VulkanTextureManager& vkTexManager = static_cast<VulkanTextureManager&>(
  263. TextureManager::instance());
  264. GpuParamObjectType* types = vkParamInfo.getLayoutTypes(set);
  265. GpuParamObjectType type = types[bindingIdx];
  266. perSetData.writeInfos[bindingIdx].image.imageView = vkTexManager.getDummyImageView(type, i);
  267. mPerDeviceData[i].sampledImages[sequentialIdx] = VK_NULL_HANDLE;
  268. }
  269. }
  270. mSetsDirty[set] = true;
  271. }
  272. void VulkanGpuParams::setLoadStoreTexture(UINT32 set, UINT32 slot, const SPtr<Texture>& texture,
  273. const TextureSurface& surface)
  274. {
  275. GpuParams::setLoadStoreTexture(set, slot, texture, surface);
  276. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  277. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  278. if (bindingIdx == (UINT32)-1)
  279. {
  280. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  281. toString(slot) + ".");
  282. return;
  283. }
  284. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::LoadStoreTexture, set, slot);
  285. Lock lock(mMutex);
  286. VulkanTexture* vulkanTexture = static_cast<VulkanTexture*>(texture.get());
  287. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  288. {
  289. if (mPerDeviceData[i].perSetData == nullptr)
  290. continue;
  291. VulkanImage* imageRes;
  292. if (vulkanTexture != nullptr)
  293. imageRes = vulkanTexture->getResource(i);
  294. else
  295. imageRes = nullptr;
  296. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  297. if (imageRes != nullptr)
  298. {
  299. perSetData.writeInfos[bindingIdx].image.imageView = imageRes->getView(surface, false);
  300. mPerDeviceData[i].storageImages[sequentialIdx] = imageRes->getHandle();
  301. }
  302. else
  303. {
  304. VulkanTextureManager& vkTexManager = static_cast<VulkanTextureManager&>(
  305. TextureManager::instance());
  306. GpuParamObjectType* types = vkParamInfo.getLayoutTypes(set);
  307. GpuParamObjectType type = types[bindingIdx];
  308. perSetData.writeInfos[bindingIdx].image.imageView = vkTexManager.getDummyImageView(type, i);
  309. mPerDeviceData[i].storageImages[sequentialIdx] = VK_NULL_HANDLE;
  310. }
  311. }
  312. mSetsDirty[set] = true;
  313. }
  314. void VulkanGpuParams::setBuffer(UINT32 set, UINT32 slot, const SPtr<GpuBuffer>& buffer)
  315. {
  316. GpuParams::setBuffer(set, slot, buffer);
  317. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  318. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  319. if (bindingIdx == (UINT32)-1)
  320. {
  321. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  322. toString(slot) + ".");
  323. return;
  324. }
  325. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::Buffer, set, slot);
  326. Lock lock(mMutex);
  327. VulkanGpuBuffer* vulkanBuffer = static_cast<VulkanGpuBuffer*>(buffer.get());
  328. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  329. {
  330. if (mPerDeviceData[i].perSetData == nullptr)
  331. continue;
  332. VulkanBuffer* bufferRes;
  333. if (vulkanBuffer != nullptr)
  334. bufferRes = vulkanBuffer->getResource(i);
  335. else
  336. bufferRes = nullptr;
  337. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  338. VkWriteDescriptorSet& writeSetInfo = perSetData.writeSetInfos[bindingIdx];
  339. bool useView = writeSetInfo.descriptorType != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  340. if (useView)
  341. {
  342. VkBufferView bufferView;
  343. if (bufferRes != nullptr)
  344. {
  345. bufferView = bufferRes->getView();
  346. mPerDeviceData[i].buffers[sequentialIdx] = bufferRes->getHandle();
  347. }
  348. else
  349. {
  350. VulkanHardwareBufferManager& vkBufManager = static_cast<VulkanHardwareBufferManager&>(
  351. HardwareBufferManager::instance());
  352. bool isLoadStore = writeSetInfo.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  353. if (isLoadStore)
  354. bufferView = vkBufManager.getDummyStorageBufferView(i);
  355. else
  356. bufferView = vkBufManager.getDummyReadBufferView(i);
  357. mPerDeviceData[i].buffers[sequentialIdx] = nullptr;
  358. }
  359. perSetData.writeInfos[bindingIdx].bufferView = bufferView;
  360. writeSetInfo.pTexelBufferView = &perSetData.writeInfos[bindingIdx].bufferView;
  361. }
  362. else // Structured storage buffer
  363. {
  364. if (bufferRes != nullptr)
  365. {
  366. VkBuffer vkBuffer = bufferRes->getHandle();
  367. perSetData.writeInfos[bindingIdx].buffer.buffer = vkBuffer;
  368. mPerDeviceData[i].buffers[sequentialIdx] = vkBuffer;
  369. }
  370. else
  371. {
  372. VulkanHardwareBufferManager& vkBufManager = static_cast<VulkanHardwareBufferManager&>(
  373. HardwareBufferManager::instance());
  374. perSetData.writeInfos[bindingIdx].buffer.buffer = vkBufManager.getDummyStructuredBuffer(i);
  375. mPerDeviceData[i].buffers[sequentialIdx] = nullptr;
  376. }
  377. writeSetInfo.pTexelBufferView = nullptr;
  378. }
  379. }
  380. mSetsDirty[set] = true;
  381. }
  382. void VulkanGpuParams::setSamplerState(UINT32 set, UINT32 slot, const SPtr<SamplerState>& sampler)
  383. {
  384. GpuParams::setSamplerState(set, slot, sampler);
  385. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  386. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  387. if (bindingIdx == (UINT32)-1)
  388. {
  389. LOGERR("Provided set/slot combination is not used by the GPU program: " + toString(set) + "," +
  390. toString(slot) + ".");
  391. return;
  392. }
  393. UINT32 sequentialIdx = vkParamInfo.getSequentialSlot(GpuPipelineParamInfo::ParamType::SamplerState, set, slot);
  394. Lock lock(mMutex);
  395. VulkanSamplerState* vulkanSampler = static_cast<VulkanSamplerState*>(sampler.get());
  396. for(UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  397. {
  398. if (mPerDeviceData[i].perSetData == nullptr)
  399. continue;
  400. PerSetData& perSetData = mPerDeviceData[i].perSetData[set];
  401. VulkanSampler* samplerRes;
  402. if (vulkanSampler != nullptr)
  403. samplerRes = vulkanSampler->getResource(i);
  404. else
  405. samplerRes = nullptr;
  406. if (samplerRes != nullptr)
  407. {
  408. VkSampler vkSampler = samplerRes->getHandle();
  409. perSetData.writeInfos[bindingIdx].image.sampler = vkSampler;
  410. mPerDeviceData[i].samplers[sequentialIdx] = vkSampler;
  411. }
  412. else
  413. {
  414. VulkanSamplerState* defaultSampler =
  415. static_cast<VulkanSamplerState*>(SamplerState::getDefault().get());
  416. VkSampler vkSampler = defaultSampler->getResource(i)->getHandle();;
  417. perSetData.writeInfos[bindingIdx].image.sampler = vkSampler;
  418. mPerDeviceData[i].samplers[sequentialIdx] = 0;
  419. }
  420. }
  421. mSetsDirty[set] = true;
  422. }
  423. UINT32 VulkanGpuParams::getNumSets() const
  424. {
  425. return mParamInfo->getNumSets();
  426. }
  427. void VulkanGpuParams::prepareForBind(VulkanCmdBuffer& buffer, VkDescriptorSet* sets)
  428. {
  429. UINT32 deviceIdx = buffer.getDeviceIdx();
  430. PerDeviceData& perDeviceData = mPerDeviceData[deviceIdx];
  431. if (perDeviceData.perSetData == nullptr)
  432. return;
  433. VulkanGpuPipelineParamInfo& vkParamInfo = static_cast<VulkanGpuPipelineParamInfo&>(*mParamInfo);
  434. UINT32 numParamBlocks = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::ParamBlock);
  435. UINT32 numTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Texture);
  436. UINT32 numStorageTextures = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::LoadStoreTexture);
  437. UINT32 numBuffers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::Buffer);
  438. UINT32 numSamplers = vkParamInfo.getNumElements(GpuPipelineParamInfo::ParamType::SamplerState);
  439. UINT32 numSets = vkParamInfo.getNumSets();
  440. Lock lock(mMutex);
  441. // Registers resources with the command buffer, and check if internal resource handled changed (in which case set
  442. // needs updating - this can happen due to resource writes, as internally system might find it more performant
  443. // to discard used resources and create new ones).
  444. // Note: Makes the assumption that this object (and all of the resources it holds) are externally locked, and will
  445. // not be modified on another thread while being bound.
  446. for (UINT32 i = 0; i < numParamBlocks; i++)
  447. {
  448. if (mParamBlockBuffers[i] == nullptr)
  449. continue;
  450. VulkanGpuParamBlockBuffer* element = static_cast<VulkanGpuParamBlockBuffer*>(mParamBlockBuffers[i].get());
  451. VulkanBuffer* resource = element->getResource(deviceIdx);
  452. if (resource == nullptr)
  453. continue;
  454. // Register with command buffer
  455. buffer.registerResource(resource, VK_ACCESS_UNIFORM_READ_BIT, VulkanUseFlag::Read);
  456. // Check if internal resource changed from what was previously bound in the descriptor set
  457. assert(perDeviceData.uniformBuffers[i] != VK_NULL_HANDLE);
  458. VkBuffer vkBuffer = resource->getHandle();
  459. if(perDeviceData.uniformBuffers[i] != vkBuffer)
  460. {
  461. perDeviceData.uniformBuffers[i] = vkBuffer;
  462. UINT32 set, slot;
  463. mParamInfo->getBinding(GpuPipelineParamInfo::ParamType::ParamBlock, i, set, slot);
  464. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  465. perDeviceData.perSetData[set].writeInfos[bindingIdx].buffer.buffer = vkBuffer;
  466. mSetsDirty[set] = true;
  467. }
  468. }
  469. for (UINT32 i = 0; i < numBuffers; i++)
  470. {
  471. if (mBuffers[i] == nullptr)
  472. continue;
  473. VulkanGpuBuffer* element = static_cast<VulkanGpuBuffer*>(mBuffers[i].get());
  474. VulkanBuffer* resource = element->getResource(deviceIdx);
  475. if (resource == nullptr)
  476. continue;
  477. // Register with command buffer
  478. VkAccessFlags accessFlags = VK_ACCESS_SHADER_READ_BIT;
  479. VulkanUseFlags useFlags = VulkanUseFlag::Read;
  480. if (element->getProperties().getRandomGpuWrite())
  481. {
  482. accessFlags |= VK_ACCESS_SHADER_WRITE_BIT;
  483. useFlags |= VulkanUseFlag::Write;
  484. }
  485. buffer.registerResource(resource, accessFlags, useFlags);
  486. // Check if internal resource changed from what was previously bound in the descriptor set
  487. assert(perDeviceData.buffers[i] != VK_NULL_HANDLE);
  488. VkBuffer vkBuffer = resource->getHandle();
  489. if (perDeviceData.buffers[i] != vkBuffer)
  490. {
  491. perDeviceData.buffers[i] = vkBuffer;
  492. UINT32 set, slot;
  493. mParamInfo->getBinding(GpuPipelineParamInfo::ParamType::Buffer, i, set, slot);
  494. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  495. PerSetData& perSetData = perDeviceData.perSetData[set];
  496. VkWriteDescriptorSet& writeSetInfo = perSetData.writeSetInfos[bindingIdx];
  497. bool useView = writeSetInfo.descriptorType != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  498. if (useView)
  499. {
  500. perSetData.writeInfos[bindingIdx].bufferView = resource->getView();
  501. perSetData.writeSetInfos[bindingIdx].pTexelBufferView = &perSetData.writeInfos[bindingIdx].bufferView;
  502. }
  503. else // Structured storage buffer
  504. {
  505. perSetData.writeInfos[bindingIdx].buffer.buffer = vkBuffer;
  506. perSetData.writeSetInfos[bindingIdx].pTexelBufferView = nullptr;
  507. }
  508. mSetsDirty[set] = true;
  509. }
  510. }
  511. for (UINT32 i = 0; i < numSamplers; i++)
  512. {
  513. if (mSamplerStates[i] == nullptr)
  514. continue;
  515. VulkanSamplerState* element = static_cast<VulkanSamplerState*>(mSamplerStates[i].get());
  516. VulkanSampler* resource = element->getResource(deviceIdx);
  517. if (resource == nullptr)
  518. continue;
  519. // Register with command buffer
  520. buffer.registerResource(resource, VulkanUseFlag::Read);
  521. // Check if internal resource changed from what was previously bound in the descriptor set
  522. assert(perDeviceData.samplers[i] != VK_NULL_HANDLE);
  523. VkSampler vkSampler = resource->getHandle();
  524. if (perDeviceData.samplers[i] != vkSampler)
  525. {
  526. perDeviceData.samplers[i] = vkSampler;
  527. UINT32 set, slot;
  528. mParamInfo->getBinding(GpuPipelineParamInfo::ParamType::SamplerState, i, set, slot);
  529. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  530. perDeviceData.perSetData[set].writeInfos[bindingIdx].image.sampler = vkSampler;
  531. mSetsDirty[set] = true;
  532. }
  533. }
  534. for (UINT32 i = 0; i < numStorageTextures; i++)
  535. {
  536. if (mLoadStoreTextureData[i].texture == nullptr)
  537. continue;
  538. VulkanTexture* element = static_cast<VulkanTexture*>(mLoadStoreTextureData[i].texture.get());
  539. VulkanImage* resource = element->getResource(deviceIdx);
  540. if (resource == nullptr)
  541. continue;
  542. const TextureSurface& surface = mLoadStoreTextureData[i].surface;
  543. VkImageSubresourceRange range = resource->getRange(surface);
  544. // Register with command buffer
  545. VulkanUseFlags useFlags = VulkanUseFlag::Read | VulkanUseFlag::Write;
  546. buffer.registerResource(resource, range, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL, useFlags,
  547. ResourceUsage::ShaderBind);
  548. // Check if internal resource changed from what was previously bound in the descriptor set
  549. assert(perDeviceData.storageImages[i] != VK_NULL_HANDLE);
  550. VkImage vkImage = resource->getHandle();
  551. if (perDeviceData.storageImages[i] != vkImage)
  552. {
  553. perDeviceData.storageImages[i] = vkImage;
  554. UINT32 set, slot;
  555. mParamInfo->getBinding(GpuPipelineParamInfo::ParamType::LoadStoreTexture, i, set, slot);
  556. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  557. perDeviceData.perSetData[set].writeInfos[bindingIdx].image.imageView = resource->getView(surface, false);;
  558. mSetsDirty[set] = true;
  559. }
  560. }
  561. for (UINT32 i = 0; i < numTextures; i++)
  562. {
  563. if (mSampledTextureData[i].texture == nullptr)
  564. continue;
  565. VulkanTexture* element = static_cast<VulkanTexture*>(mSampledTextureData[i].texture.get());
  566. VulkanImage* resource = element->getResource(deviceIdx);
  567. if (resource == nullptr)
  568. continue;
  569. // Register with command buffer
  570. const TextureProperties& props = element->getProperties();
  571. const TextureSurface& surface = mSampledTextureData[i].surface;
  572. VkImageSubresourceRange range = resource->getRange(surface);
  573. // Keep dynamic textures in general layout, so they can be easily mapped by CPU
  574. VkImageLayout layout;
  575. if (props.getUsage() & TU_DYNAMIC)
  576. layout = VK_IMAGE_LAYOUT_GENERAL;
  577. else
  578. layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  579. buffer.registerResource(resource, range, layout, layout, VulkanUseFlag::Read, ResourceUsage::ShaderBind);
  580. // Actual layout might be different than requested if the image is also used as a FB attachment
  581. layout = buffer.getCurrentLayout(resource, range, true);
  582. // Check if internal resource changed from what was previously bound in the descriptor set
  583. assert(perDeviceData.sampledImages[i] != VK_NULL_HANDLE);
  584. UINT32 set, slot;
  585. mParamInfo->getBinding(GpuPipelineParamInfo::ParamType::Texture, i, set, slot);
  586. UINT32 bindingIdx = vkParamInfo.getBindingIdx(set, slot);
  587. VkDescriptorImageInfo& imgInfo = perDeviceData.perSetData[set].writeInfos[bindingIdx].image;
  588. VkImage vkImage = resource->getHandle();
  589. if (perDeviceData.sampledImages[i] != vkImage)
  590. {
  591. perDeviceData.sampledImages[i] = vkImage;
  592. imgInfo.imageView = resource->getView(surface, false);
  593. mSetsDirty[set] = true;
  594. }
  595. if(imgInfo.imageLayout != layout)
  596. {
  597. imgInfo.imageLayout = layout;
  598. mSetsDirty[set] = true;
  599. }
  600. }
  601. // Acquire sets as needed, and updated their contents if dirty
  602. VulkanRenderAPI& rapi = static_cast<VulkanRenderAPI&>(RenderAPI::instance());
  603. VulkanDevice& device = *rapi._getDevice(deviceIdx);
  604. VulkanDescriptorManager& descManager = device.getDescriptorManager();
  605. for (UINT32 i = 0; i < numSets; i++)
  606. {
  607. PerSetData& perSetData = perDeviceData.perSetData[i];
  608. if (!mSetsDirty[i]) // Set not dirty, just use the last one we wrote (this is fine even across multiple command buffers)
  609. continue;
  610. // Set is dirty, we need to update
  611. //// Use latest unless already used, otherwise try to find an unused one
  612. if(perSetData.latestSet->isBound()) // Checking this is okay, because it's only modified below when we call registerResource, which is under the same lock as this
  613. {
  614. perSetData.latestSet = nullptr;
  615. for(auto& entry : perSetData.sets)
  616. {
  617. if(!entry->isBound())
  618. {
  619. perSetData.latestSet = entry;
  620. break;
  621. }
  622. }
  623. // Cannot find an empty set, allocate a new one
  624. if (perSetData.latestSet == nullptr)
  625. {
  626. VulkanDescriptorLayout* layout = vkParamInfo.getLayout(deviceIdx, i);
  627. perSetData.latestSet = descManager.createSet(layout);
  628. perSetData.sets.push_back(perSetData.latestSet);
  629. }
  630. }
  631. // Note: Currently I write to the entire set at once, but it might be beneficial to remember only the exact
  632. // entries that were updated, and only write to them individually.
  633. perSetData.latestSet->write(perSetData.writeSetInfos, perSetData.numElements);
  634. mSetsDirty[i] = false;
  635. }
  636. for (UINT32 i = 0; i < numSets; i++)
  637. {
  638. VulkanDescriptorSet* set = perDeviceData.perSetData[i].latestSet;
  639. buffer.registerResource(set, VulkanUseFlag::Read);
  640. sets[i] = set->getHandle();
  641. }
  642. }
  643. }}