BsVulkanGpuPipelineParamInfo.cpp 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanGpuPipelineParamInfo.h"
  4. #include "BsVulkanUtility.h"
  5. #include "BsVulkanRenderAPI.h"
  6. #include "BsVulkanDevice.h"
  7. #include "Renderapi/BsGpuParamDesc.h"
  8. namespace bs { namespace ct
  9. {
  10. VulkanGpuPipelineParamInfo::VulkanGpuPipelineParamInfo(const GPU_PIPELINE_PARAMS_DESC& desc, GpuDeviceFlags deviceMask)
  11. : GpuPipelineParamInfo(desc, deviceMask), mDeviceMask(deviceMask), mSetExtraInfos(nullptr), mLayouts()
  12. , mLayoutInfos()
  13. { }
  14. VulkanGpuPipelineParamInfo::~VulkanGpuPipelineParamInfo()
  15. {
  16. }
  17. void VulkanGpuPipelineParamInfo::initialize()
  18. {
  19. VulkanRenderAPI& rapi = static_cast<VulkanRenderAPI&>(RenderAPI::instance());
  20. VulkanDevice* devices[BS_MAX_DEVICES];
  21. VulkanUtility::getDevices(rapi, mDeviceMask, devices);
  22. UINT32 numDevices = 0;
  23. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  24. {
  25. if (devices[i] != nullptr)
  26. numDevices++;
  27. }
  28. UINT32 totalNumSlots = 0;
  29. for (UINT32 i = 0; i < mNumSets; i++)
  30. totalNumSlots += mSetInfos[i].numSlots;
  31. mAlloc.reserve<VkDescriptorSetLayoutBinding>(mNumElements)
  32. .reserve<GpuParamObjectType>(mNumElements)
  33. .reserve<LayoutInfo>(mNumSets)
  34. .reserve<VulkanDescriptorLayout*>(mNumSets * numDevices)
  35. .reserve<SetExtraInfo>(mNumSets)
  36. .reserve<UINT32>(totalNumSlots)
  37. .init();
  38. mLayoutInfos = mAlloc.alloc<LayoutInfo>(mNumSets);
  39. VkDescriptorSetLayoutBinding* bindings = mAlloc.alloc<VkDescriptorSetLayoutBinding>(mNumElements);
  40. GpuParamObjectType* types = mAlloc.alloc<GpuParamObjectType>(mNumElements);
  41. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  42. {
  43. if (devices[i] == nullptr)
  44. {
  45. mLayouts[i] = nullptr;
  46. continue;
  47. }
  48. mLayouts[i] = mAlloc.alloc<VulkanDescriptorLayout*>(mNumSets);
  49. }
  50. mSetExtraInfos = mAlloc.alloc<SetExtraInfo>(mNumSets);
  51. if(bindings != nullptr)
  52. bs_zero_out(bindings, mNumElements);
  53. if (types != nullptr)
  54. bs_zero_out(types, mNumElements);
  55. UINT32 globalBindingIdx = 0;
  56. for (UINT32 i = 0; i < mNumSets; i++)
  57. {
  58. mSetExtraInfos[i].slotIndices = mAlloc.alloc<UINT32>(mSetInfos[i].numSlots);
  59. mLayoutInfos[i].numBindings = 0;
  60. mLayoutInfos[i].bindings = nullptr;
  61. mLayoutInfos[i].types = nullptr;
  62. for (UINT32 j = 0; j < mSetInfos[i].numSlots; j++)
  63. {
  64. if (mSetInfos[i].slotIndices[j] == -1)
  65. {
  66. mSetExtraInfos[i].slotIndices[j] = -1;
  67. continue;
  68. }
  69. VkDescriptorSetLayoutBinding& binding = bindings[globalBindingIdx];
  70. binding.binding = j;
  71. mSetExtraInfos[i].slotIndices[j] = globalBindingIdx;
  72. mLayoutInfos[i].numBindings++;
  73. globalBindingIdx++;
  74. }
  75. }
  76. UINT32 offset = 0;
  77. for (UINT32 i = 0; i < mNumSets; i++)
  78. {
  79. mLayoutInfos[i].bindings = &bindings[offset];
  80. mLayoutInfos[i].types = &types[offset];
  81. offset += mLayoutInfos[i].numBindings;
  82. }
  83. VkShaderStageFlags stageFlagsLookup[6];
  84. stageFlagsLookup[GPT_VERTEX_PROGRAM] = VK_SHADER_STAGE_VERTEX_BIT;
  85. stageFlagsLookup[GPT_HULL_PROGRAM] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
  86. stageFlagsLookup[GPT_DOMAIN_PROGRAM] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
  87. stageFlagsLookup[GPT_GEOMETRY_PROGRAM] = VK_SHADER_STAGE_GEOMETRY_BIT;
  88. stageFlagsLookup[GPT_FRAGMENT_PROGRAM] = VK_SHADER_STAGE_FRAGMENT_BIT;
  89. stageFlagsLookup[GPT_COMPUTE_PROGRAM] = VK_SHADER_STAGE_COMPUTE_BIT;
  90. UINT32 numParamDescs = sizeof(mParamDescs) / sizeof(mParamDescs[0]);
  91. for (UINT32 i = 0; i < numParamDescs; i++)
  92. {
  93. const SPtr<GpuParamDesc>& paramDesc = mParamDescs[i];
  94. if (paramDesc == nullptr)
  95. continue;
  96. auto setUpBlockBindings = [&](auto& params, VkDescriptorType descType)
  97. {
  98. for (auto& entry : params)
  99. {
  100. UINT32 bindingIdx = getBindingIdx(entry.second.set, entry.second.slot);
  101. assert(bindingIdx != -1);
  102. LayoutInfo& layoutInfo = mLayoutInfos[entry.second.set];
  103. VkDescriptorSetLayoutBinding& binding = layoutInfo.bindings[bindingIdx];
  104. binding.descriptorCount = 1;
  105. binding.stageFlags |= stageFlagsLookup[i];
  106. binding.descriptorType = descType;
  107. }
  108. };
  109. auto setUpBindings = [&](auto& params, VkDescriptorType descType)
  110. {
  111. for (auto& entry : params)
  112. {
  113. UINT32 bindingIdx = getBindingIdx(entry.second.set, entry.second.slot);
  114. assert(bindingIdx != -1);
  115. LayoutInfo& layoutInfo = mLayoutInfos[entry.second.set];
  116. VkDescriptorSetLayoutBinding& binding = layoutInfo.bindings[bindingIdx];
  117. binding.descriptorCount = 1;
  118. binding.stageFlags |= stageFlagsLookup[i];
  119. binding.descriptorType = descType;
  120. layoutInfo.types[bindingIdx] = entry.second.type;
  121. }
  122. };
  123. // Note: Assuming all textures and samplers use the same set/slot combination, and that they're combined
  124. setUpBlockBindings(paramDesc->paramBlocks, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
  125. setUpBindings(paramDesc->textures, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
  126. setUpBindings(paramDesc->loadStoreTextures, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
  127. //setUpBindings(paramDesc->samplers, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
  128. // Set up buffer bindings
  129. for (auto& entry : paramDesc->buffers)
  130. {
  131. UINT32 bindingIdx = getBindingIdx(entry.second.set, entry.second.slot);
  132. assert(bindingIdx != -1);
  133. LayoutInfo& layoutInfo = mLayoutInfos[entry.second.set];
  134. VkDescriptorSetLayoutBinding& binding = layoutInfo.bindings[bindingIdx];
  135. binding.descriptorCount = 1;
  136. binding.stageFlags |= stageFlagsLookup[i];
  137. switch(entry.second.type)
  138. {
  139. default:
  140. case GPOT_BYTE_BUFFER:
  141. binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
  142. break;
  143. case GPOT_RWBYTE_BUFFER:
  144. binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
  145. break;
  146. case GPOT_STRUCTURED_BUFFER:
  147. case GPOT_RWSTRUCTURED_BUFFER:
  148. binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  149. break;
  150. }
  151. layoutInfo.types[bindingIdx] = entry.second.type;
  152. }
  153. }
  154. // Allocate layouts per-device
  155. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  156. {
  157. if (mLayouts[i] == nullptr)
  158. continue;
  159. VulkanDescriptorManager& descManager = devices[i]->getDescriptorManager();
  160. for (UINT32 j = 0; j < mNumSets; j++)
  161. mLayouts[i][j] = descManager.getLayout(mLayoutInfos[j].bindings, mLayoutInfos[j].numBindings);
  162. }
  163. }
  164. VulkanDescriptorLayout* VulkanGpuPipelineParamInfo::getLayout(UINT32 deviceIdx, UINT32 layoutIdx) const
  165. {
  166. if (deviceIdx >= BS_MAX_DEVICES || mLayouts[deviceIdx] == nullptr)
  167. return nullptr;
  168. return mLayouts[deviceIdx][layoutIdx];
  169. }
  170. }}