BsVulkanCommandBufferManager.h 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #pragma once
  4. #include "BsVulkanPrerequisites.h"
  5. #include "BsCommandBufferManager.h"
  6. #include "BsVulkanCommandBuffer.h"
  7. namespace BansheeEngine
  8. {
  9. /** @addtogroup Vulkan
  10. * @{
  11. */
  12. /** Wrapper around a command buffer used specifically for transfer operations. */
  13. class VulkanTransferBuffer
  14. {
  15. public:
  16. VulkanTransferBuffer();
  17. VulkanTransferBuffer(VulkanDevice* device, GpuQueueType type, UINT32 queueIdx);
  18. ~VulkanTransferBuffer();
  19. /**
  20. * OR's the provided sync mask with the internal sync mask. The sync mask determines on which queues should
  21. * the buffer wait on before executing. Sync mask is reset after a flush. See CommandSyncMask on how to generate
  22. * a sync mask.
  23. */
  24. void appendMask(UINT32 syncMask) { mSyncMask |= syncMask; }
  25. /** Resets the sync mask. */
  26. void clearMask() { mSyncMask = 0; }
  27. /**
  28. * Issues a pipeline barrier on the provided buffer. See vkCmdPipelineBarrier in Vulkan spec. for usage
  29. * information.
  30. */
  31. void memoryBarrier(VkBuffer buffer, VkAccessFlags srcAccessFlags, VkAccessFlags dstAccessFlags,
  32. VkPipelineStageFlags srcStage, VkPipelineStageFlags dstStage);
  33. /**
  34. * Submits the command buffer on the queue.
  35. *
  36. * @param[in] wait If true, the caller thread will wait until all device operations on the command buffer's
  37. * queue complete.
  38. */
  39. void flush(bool wait);
  40. /** Returns the internal command buffer. */
  41. VulkanCmdBuffer* getCB() const { return mCB; }
  42. private:
  43. friend class VulkanCommandBufferManager;
  44. /** Allocates a new internal command buffer. */
  45. void allocate();
  46. VulkanDevice* mDevice;
  47. GpuQueueType mType;
  48. UINT32 mQueueIdx;
  49. VulkanQueue* mQueue;
  50. UINT32 mQueueMask;
  51. VulkanCmdBuffer* mCB;
  52. UINT32 mSyncMask;
  53. };
  54. /**
  55. * Handles creation of Vulkan command buffers. See CommandBuffer.
  56. *
  57. * @note Core thread only.
  58. */
  59. class VulkanCommandBufferManager : public CommandBufferManager
  60. {
  61. public:
  62. VulkanCommandBufferManager(const VulkanRenderAPI& rapi);
  63. ~VulkanCommandBufferManager();
  64. /** @copydoc CommandBufferManager::createInternal() */
  65. SPtr<CommandBuffer> createInternal(GpuQueueType type, UINT32 deviceIdx = 0, UINT32 queueIdx = 0,
  66. bool secondary = false) override;
  67. /** Notifies the manager that this buffer was just submitted to the queue for execution. */
  68. void setActiveBuffer(GpuQueueType type, UINT32 deviceIdx, UINT32 queueIdx, VulkanCmdBuffer* buffer);
  69. /**
  70. * Returns a set of command buffer semaphores depending on the provided sync mask.
  71. *
  72. * @param[in] deviceIdx Index of the device to get the semaphores for.
  73. * @param[in] syncMask Mask that has a bit enabled for each command buffer to retrieve the semaphore for.
  74. * If the command buffer is not currently executing, semaphore won't be returned.
  75. * @param[out] semaphores List containing all the required semaphores. Semaphores are tightly packed at the
  76. * beginning of the array. Must be able to hold at least BS_MAX_COMMAND_BUFFERS entries.
  77. * @param[out] count Number of semaphores provided in the @p semaphores array.
  78. */
  79. void getSyncSemaphores(UINT32 deviceIdx, UINT32 syncMask, VkSemaphore* semaphores, UINT32& count);
  80. /**
  81. * Checks if any of the active command buffers finished executing on the device and updates their states
  82. * accordingly.
  83. */
  84. void refreshStates(UINT32 deviceIdx);
  85. /**
  86. * Returns an command buffer that can be used for executing transfer operations on the specified queue.
  87. * Transfer buffers are automatically flushed (submitted) whenever a new (normal) command buffer is about to
  88. * execute.
  89. */
  90. VulkanTransferBuffer* getTransferBuffer(UINT32 deviceIdx, GpuQueueType type, UINT32 queueIdx);
  91. /** Submits all transfer command buffers, ensuring all queued transfer operations get executed. */
  92. void flushTransferBuffers(UINT32 deviceIdx);
  93. private:
  94. /** Contains command buffers specific to one device. */
  95. struct PerDeviceData
  96. {
  97. VulkanCmdBuffer* activeBuffers[BS_MAX_UNIQUE_QUEUES];
  98. VulkanTransferBuffer transferBuffers[GQT_COUNT][BS_MAX_QUEUES_PER_TYPE];
  99. };
  100. const VulkanRenderAPI& mRapi;
  101. PerDeviceData* mDeviceData;
  102. UINT32 mNumDevices;
  103. };
  104. /** Provides easy access to the VulkanCommandBufferManager. */
  105. VulkanCommandBufferManager& gVulkanCBManager();
  106. /** @} */
  107. }