BsVulkanCommandBufferManager.cpp 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "Managers/BsVulkanCommandBufferManager.h"
  4. #include "BsVulkanCommandBuffer.h"
  5. #include "BsVulkanRenderAPI.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanQueue.h"
  8. #include "BsVulkanTexture.h"
  9. namespace bs { namespace ct
  10. {
  11. VulkanTransferBuffer::VulkanTransferBuffer()
  12. :mDevice(nullptr), mType(GQT_GRAPHICS), mQueueIdx(0), mQueue(nullptr), mQueueMask(0), mCB(nullptr), mSyncMask(0)
  13. { }
  14. VulkanTransferBuffer::VulkanTransferBuffer(VulkanDevice* device, GpuQueueType type, UINT32 queueIdx)
  15. :mDevice(device), mType(type), mQueueIdx(queueIdx), mQueue(nullptr), mQueueMask(0), mCB(nullptr), mSyncMask(0)
  16. {
  17. UINT32 numQueues = device->getNumQueues(mType);
  18. if (numQueues == 0)
  19. {
  20. mType = GQT_GRAPHICS;
  21. numQueues = device->getNumQueues(GQT_GRAPHICS);
  22. }
  23. UINT32 physicalQueueIdx = queueIdx % numQueues;
  24. mQueue = device->getQueue(mType, physicalQueueIdx);
  25. mQueueMask = device->getQueueMask(mType, queueIdx);
  26. }
  27. VulkanTransferBuffer::~VulkanTransferBuffer()
  28. {
  29. if (mCB != nullptr)
  30. mCB->end();
  31. }
  32. void VulkanTransferBuffer::allocate()
  33. {
  34. if (mCB != nullptr)
  35. return;
  36. UINT32 queueFamily = mDevice->getQueueFamily(mType);
  37. mCB = mDevice->getCmdBufferPool().getBuffer(queueFamily, false);
  38. }
  39. void VulkanTransferBuffer::memoryBarrier(VkBuffer buffer, VkAccessFlags srcAccessFlags, VkAccessFlags dstAccessFlags,
  40. VkPipelineStageFlags srcStage, VkPipelineStageFlags dstStage)
  41. {
  42. mCB->memoryBarrier(buffer, srcAccessFlags, dstAccessFlags, srcStage, dstStage);
  43. }
  44. void VulkanTransferBuffer::setLayout(VkImage image, VkAccessFlags srcAccessFlags, VkAccessFlags dstAccessFlags,
  45. VkImageLayout oldLayout, VkImageLayout newLayout, const VkImageSubresourceRange& range)
  46. {
  47. mCB->setLayout(image, srcAccessFlags, dstAccessFlags, oldLayout, newLayout, range);
  48. }
  49. void VulkanTransferBuffer::setLayout(VulkanImage* image, const VkImageSubresourceRange& range,
  50. VkAccessFlags newAccessMask, VkImageLayout newLayout)
  51. {
  52. image->getBarriers(range, mBarriersTemp);
  53. if (mBarriersTemp.size() == 0)
  54. return;
  55. INT32 count = (INT32)mBarriersTemp.size();
  56. for(INT32 i = 0; i < count; i++)
  57. {
  58. VkImageMemoryBarrier& barrier = mBarriersTemp[i];
  59. // Remove barriers that don't signify a layout change
  60. if(barrier.oldLayout == newLayout)
  61. {
  62. if(i < (count - 1))
  63. std::swap(mBarriersTemp[i], mBarriersTemp[count - 1]);
  64. mBarriersTemp.erase(mBarriersTemp.begin() + count - 1);
  65. count--;
  66. i--;
  67. }
  68. }
  69. for(auto& entry : mBarriersTemp)
  70. {
  71. entry.dstAccessMask = newAccessMask;
  72. entry.newLayout = newLayout;
  73. }
  74. vkCmdPipelineBarrier(mCB->getHandle(),
  75. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  76. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  77. 0, 0, nullptr,
  78. 0, nullptr,
  79. (UINT32)mBarriersTemp.size(), mBarriersTemp.data());
  80. mBarriersTemp.clear();
  81. }
  82. void VulkanTransferBuffer::flush(bool wait)
  83. {
  84. if (mCB == nullptr)
  85. return;
  86. UINT32 syncMask = mSyncMask & ~mQueueMask; // Don't sync with itself
  87. mCB->end();
  88. mCB->submit(mQueue, mQueueIdx, syncMask);
  89. if (wait)
  90. {
  91. mQueue->waitIdle();
  92. gVulkanCBManager().refreshStates(mDevice->getIndex(), true);
  93. assert(!mCB->isSubmitted());
  94. }
  95. mCB = nullptr;
  96. }
  97. VulkanCommandBufferManager::VulkanCommandBufferManager(const VulkanRenderAPI& rapi)
  98. :mRapi(rapi), mDeviceData(nullptr), mNumDevices(rapi.getNumDevices())
  99. {
  100. mDeviceData = bs_newN<PerDeviceData>(mNumDevices);
  101. for (UINT32 i = 0; i < mNumDevices; i++)
  102. {
  103. SPtr<VulkanDevice> device = rapi._getDevice(i);
  104. for (UINT32 j = 0; j < GQT_COUNT; j++)
  105. {
  106. GpuQueueType queueType = (GpuQueueType)j;
  107. for (UINT32 k = 0; k < BS_MAX_QUEUES_PER_TYPE; k++)
  108. mDeviceData[i].transferBuffers[j][k] = VulkanTransferBuffer(device.get(), queueType, k);
  109. }
  110. }
  111. }
  112. VulkanCommandBufferManager::~VulkanCommandBufferManager()
  113. {
  114. bs_deleteN(mDeviceData, mNumDevices);
  115. }
  116. SPtr<CommandBuffer> VulkanCommandBufferManager::createInternal(GpuQueueType type, UINT32 deviceIdx,
  117. UINT32 queueIdx, bool secondary)
  118. {
  119. UINT32 numDevices = mRapi._getNumDevices();
  120. if(deviceIdx >= numDevices)
  121. {
  122. LOGERR("Cannot create command buffer, invalid device index: " + toString(deviceIdx) +
  123. ". Valid range: [0, " + toString(numDevices) + ").");
  124. return nullptr;
  125. }
  126. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  127. CommandBuffer* buffer =
  128. new (bs_alloc<VulkanCommandBuffer>()) VulkanCommandBuffer(*device, type, deviceIdx, queueIdx, secondary);
  129. return bs_shared_ptr(buffer);
  130. }
  131. void VulkanCommandBufferManager::getSyncSemaphores(UINT32 deviceIdx, UINT32 syncMask, VulkanSemaphore** semaphores,
  132. UINT32& count)
  133. {
  134. bool semaphoreRequestFailed = false;
  135. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  136. UINT32 semaphoreIdx = 0;
  137. for (UINT32 i = 0; i < GQT_COUNT; i++)
  138. {
  139. GpuQueueType queueType = (GpuQueueType)i;
  140. UINT32 numQueues = device->getNumQueues(queueType);
  141. for (UINT32 j = 0; j < numQueues; j++)
  142. {
  143. VulkanQueue* queue = device->getQueue(queueType, j);
  144. VulkanCmdBuffer* lastCB = queue->getLastCommandBuffer();
  145. // Check if a buffer is currently executing on the queue
  146. if (lastCB == nullptr || !lastCB->isSubmitted())
  147. continue;
  148. // Check if we care about this specific queue
  149. UINT32 queueMask = device->getQueueMask(queueType, j);
  150. if ((syncMask & queueMask) == 0)
  151. continue;
  152. VulkanSemaphore* semaphore = lastCB->requestInterQueueSemaphore();
  153. if (semaphore == nullptr)
  154. {
  155. semaphoreRequestFailed = true;
  156. continue;
  157. }
  158. semaphores[semaphoreIdx++] = semaphore;
  159. }
  160. }
  161. count = semaphoreIdx;
  162. if (semaphoreRequestFailed)
  163. {
  164. LOGERR("Failed to allocate semaphores for a command buffer sync. This means some of the dependency requests "
  165. "will not be fulfilled. This happened because a command buffer has too many dependant command "
  166. "buffers. The maximum allowed number is " + toString(BS_MAX_VULKAN_CB_DEPENDENCIES) + " but can be "
  167. "increased by incrementing the value of BS_MAX_VULKAN_CB_DEPENDENCIES.");
  168. }
  169. }
  170. void VulkanCommandBufferManager::refreshStates(UINT32 deviceIdx, bool forceWait)
  171. {
  172. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  173. for (UINT32 i = 0; i < GQT_COUNT; i++)
  174. {
  175. UINT32 numQueues = device->getNumQueues((GpuQueueType)i);
  176. for (UINT32 j = 0; j < numQueues; j++)
  177. {
  178. VulkanQueue* queue = device->getQueue((GpuQueueType)i, j);
  179. queue->refreshStates(forceWait, false);
  180. }
  181. }
  182. }
  183. VulkanTransferBuffer* VulkanCommandBufferManager::getTransferBuffer(UINT32 deviceIdx, GpuQueueType type,
  184. UINT32 queueIdx)
  185. {
  186. assert(deviceIdx < mNumDevices);
  187. PerDeviceData& deviceData = mDeviceData[deviceIdx];
  188. VulkanTransferBuffer* transferBuffer = &deviceData.transferBuffers[type][queueIdx];
  189. transferBuffer->allocate();
  190. return transferBuffer;
  191. }
  192. void VulkanCommandBufferManager::flushTransferBuffers(UINT32 deviceIdx)
  193. {
  194. assert(deviceIdx < mNumDevices);
  195. PerDeviceData& deviceData = mDeviceData[deviceIdx];
  196. for (UINT32 i = 0; i < GQT_COUNT; i++)
  197. {
  198. for (UINT32 j = 0; j < BS_MAX_QUEUES_PER_TYPE; j++)
  199. deviceData.transferBuffers[i][j].flush(false);
  200. }
  201. }
  202. VulkanCommandBufferManager& gVulkanCBManager()
  203. {
  204. return static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  205. }
  206. }}