BsVulkanCommandBufferManager.cpp 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanCommandBufferManager.h"
  4. #include "BsVulkanCommandBuffer.h"
  5. #include "BsVulkanRenderAPI.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanQueue.h"
  8. namespace bs
  9. {
  10. VulkanTransferBuffer::VulkanTransferBuffer()
  11. :mDevice(nullptr), mType(GQT_GRAPHICS), mQueueIdx(0), mQueue(nullptr), mCB(nullptr), mSyncMask(0), mQueueMask(0)
  12. { }
  13. VulkanTransferBuffer::VulkanTransferBuffer(VulkanDevice* device, GpuQueueType type, UINT32 queueIdx)
  14. :mDevice(device), mType(type), mQueueIdx(queueIdx), mQueue(nullptr), mCB(nullptr), mSyncMask(0), mQueueMask(0)
  15. {
  16. UINT32 numQueues = device->getNumQueues(mType);
  17. if (numQueues == 0)
  18. {
  19. mType = GQT_GRAPHICS;
  20. numQueues = device->getNumQueues(GQT_GRAPHICS);
  21. }
  22. UINT32 physicalQueueIdx = queueIdx % numQueues;
  23. mQueue = device->getQueue(mType, physicalQueueIdx);
  24. mQueueMask = device->getQueueMask(mType, queueIdx);
  25. }
  26. VulkanTransferBuffer::~VulkanTransferBuffer()
  27. {
  28. if (mCB != nullptr)
  29. mCB->end();
  30. }
  31. void VulkanTransferBuffer::allocate()
  32. {
  33. if (mCB != nullptr)
  34. return;
  35. UINT32 queueFamily = mDevice->getQueueFamily(mType);
  36. mCB = mDevice->getCmdBufferPool().getBuffer(queueFamily, false);
  37. }
  38. void VulkanTransferBuffer::memoryBarrier(VkBuffer buffer, VkAccessFlags srcAccessFlags, VkAccessFlags dstAccessFlags,
  39. VkPipelineStageFlags srcStage, VkPipelineStageFlags dstStage)
  40. {
  41. VkBufferMemoryBarrier barrier;
  42. barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  43. barrier.pNext = nullptr;
  44. barrier.srcAccessMask = srcAccessFlags;
  45. barrier.dstAccessMask = dstAccessFlags;
  46. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  47. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  48. barrier.buffer = buffer;
  49. barrier.offset = 0;
  50. barrier.size = VK_WHOLE_SIZE;
  51. vkCmdPipelineBarrier(mCB->getHandle(),
  52. srcStage,
  53. dstStage,
  54. 0, 0, nullptr,
  55. 1, &barrier,
  56. 0, nullptr);
  57. }
  58. void VulkanTransferBuffer::setLayout(VkImage image, VkAccessFlags srcAccessFlags, VkAccessFlags dstAccessFlags,
  59. VkImageLayout oldLayout, VkImageLayout newLayout, const VkImageSubresourceRange& range)
  60. {
  61. VkImageMemoryBarrier barrier;
  62. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  63. barrier.pNext = nullptr;
  64. barrier.srcAccessMask = srcAccessFlags;
  65. barrier.dstAccessMask = dstAccessFlags;
  66. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  67. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  68. barrier.oldLayout = oldLayout;
  69. barrier.newLayout = newLayout;
  70. barrier.image = image;
  71. barrier.subresourceRange = range;
  72. vkCmdPipelineBarrier(mCB->getHandle(),
  73. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  74. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  75. 0, 0, nullptr,
  76. 0, nullptr,
  77. 1, &barrier);
  78. }
  79. void VulkanTransferBuffer::flush(bool wait)
  80. {
  81. if (mCB == nullptr)
  82. return;
  83. UINT32 syncMask = mSyncMask & ~mQueueMask; // Don't sync with itself
  84. mCB->end();
  85. mCB->submit(mQueue, mQueueIdx, syncMask);
  86. if (wait)
  87. {
  88. mQueue->waitIdle();
  89. gVulkanCBManager().refreshStates(mDevice->getIndex());
  90. }
  91. mCB = nullptr;
  92. }
  93. VulkanCommandBufferManager::VulkanCommandBufferManager(const VulkanRenderAPI& rapi)
  94. :mRapi(rapi), mDeviceData(nullptr), mNumDevices(rapi.getNumDevices())
  95. {
  96. mDeviceData = bs_newN<PerDeviceData>(mNumDevices);
  97. for (UINT32 i = 0; i < mNumDevices; i++)
  98. {
  99. SPtr<VulkanDevice> device = rapi._getDevice(i);
  100. for (UINT32 j = 0; j < GQT_COUNT; j++)
  101. {
  102. GpuQueueType queueType = (GpuQueueType)j;
  103. for (UINT32 k = 0; k < BS_MAX_QUEUES_PER_TYPE; k++)
  104. mDeviceData[i].transferBuffers[j][k] = VulkanTransferBuffer(device.get(), queueType, k);
  105. }
  106. }
  107. }
  108. VulkanCommandBufferManager::~VulkanCommandBufferManager()
  109. {
  110. bs_deleteN(mDeviceData, mNumDevices);
  111. }
  112. SPtr<CommandBuffer> VulkanCommandBufferManager::createInternal(GpuQueueType type, UINT32 deviceIdx,
  113. UINT32 queueIdx, bool secondary)
  114. {
  115. UINT32 numDevices = mRapi._getNumDevices();
  116. if(deviceIdx >= numDevices)
  117. {
  118. LOGERR("Cannot create command buffer, invalid device index: " + toString(deviceIdx) +
  119. ". Valid range: [0, " + toString(numDevices) + ").");
  120. return nullptr;
  121. }
  122. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  123. CommandBuffer* buffer =
  124. new (bs_alloc<VulkanCommandBuffer>()) VulkanCommandBuffer(*device, type, deviceIdx, queueIdx, secondary);
  125. return bs_shared_ptr(buffer);
  126. }
  127. void VulkanCommandBufferManager::getSyncSemaphores(UINT32 deviceIdx, UINT32 syncMask, VulkanSemaphore** semaphores,
  128. UINT32& count)
  129. {
  130. bool semaphoreRequestFailed = false;
  131. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  132. UINT32 semaphoreIdx = 0;
  133. for (UINT32 i = 0; i < GQT_COUNT; i++)
  134. {
  135. GpuQueueType queueType = (GpuQueueType)i;
  136. UINT32 numQueues = device->getNumQueues(queueType);
  137. for (UINT32 j = 0; j < numQueues; j++)
  138. {
  139. VulkanQueue* queue = device->getQueue(queueType, j);
  140. VulkanCmdBuffer* lastCB = queue->getLastCommandBuffer();
  141. // Check if a buffer is currently executing on the queue
  142. if (lastCB == nullptr || !lastCB->isSubmitted())
  143. continue;
  144. // Check if we care about this specific queue
  145. UINT32 queueMask = device->getQueueMask(queueType, j);
  146. if ((syncMask & queueMask) == 0)
  147. continue;
  148. VulkanSemaphore* semaphore = lastCB->requestInterQueueSemaphore();
  149. if (semaphore == nullptr)
  150. {
  151. semaphoreRequestFailed = true;
  152. continue;
  153. }
  154. semaphores[semaphoreIdx++] = semaphore;
  155. }
  156. }
  157. count = semaphoreIdx;
  158. if (semaphoreRequestFailed)
  159. {
  160. LOGERR("Failed to allocate semaphores for a command buffer sync. This means some of the dependency requests "
  161. "will not be fulfilled. This happened because a command buffer has too many dependant command "
  162. "buffers. The maximum allowed number is " + toString(BS_MAX_VULKAN_CB_DEPENDENCIES) + " but can be "
  163. "increased by incrementing the value of BS_MAX_VULKAN_CB_DEPENDENCIES.");
  164. }
  165. }
  166. void VulkanCommandBufferManager::refreshStates(UINT32 deviceIdx)
  167. {
  168. SPtr<VulkanDevice> device = mRapi._getDevice(deviceIdx);
  169. for (UINT32 i = 0; i < GQT_COUNT; i++)
  170. {
  171. UINT32 numQueues = device->getNumQueues((GpuQueueType)i);
  172. for (UINT32 j = 0; j < numQueues; j++)
  173. {
  174. VulkanQueue* queue = device->getQueue((GpuQueueType)i, j);
  175. queue->refreshStates();
  176. }
  177. }
  178. }
  179. VulkanTransferBuffer* VulkanCommandBufferManager::getTransferBuffer(UINT32 deviceIdx, GpuQueueType type,
  180. UINT32 queueIdx)
  181. {
  182. assert(deviceIdx < mNumDevices);
  183. PerDeviceData& deviceData = mDeviceData[deviceIdx];
  184. VulkanTransferBuffer* transferBuffer = &deviceData.transferBuffers[type][queueIdx];
  185. transferBuffer->allocate();
  186. return transferBuffer;
  187. }
  188. void VulkanCommandBufferManager::flushTransferBuffers(UINT32 deviceIdx)
  189. {
  190. assert(deviceIdx < mNumDevices);
  191. PerDeviceData& deviceData = mDeviceData[deviceIdx];
  192. for (UINT32 i = 0; i < GQT_COUNT; i++)
  193. {
  194. for (UINT32 j = 0; j < BS_MAX_QUEUES_PER_TYPE; j++)
  195. deviceData.transferBuffers[i][j].flush(false);
  196. }
  197. }
  198. VulkanCommandBufferManager& gVulkanCBManager()
  199. {
  200. return static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  201. }
  202. }