BsVulkanCommandBufferManager.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #pragma once
  4. #include "BsVulkanPrerequisites.h"
  5. #include "BsCommandBufferManager.h"
  6. #include "BsVulkanCommandBuffer.h"
  7. namespace BansheeEngine
  8. {
  9. /** @addtogroup Vulkan
  10. * @{
  11. */
  12. /** Wrapper around a command buffer used specifically for transfer operations. */
  13. class VulkanTransferBufferInfo
  14. {
  15. public:
  16. VulkanTransferBufferInfo(UINT32 queueIdx);
  17. /**
  18. * OR's the provided sync mask with the internal sync mask. The sync mask determines on which queues should
  19. * the buffer wait on before executing. See CommandSyncMask.
  20. */
  21. void appendMask(UINT32 syncMask) { mSyncMask |= syncMask; }
  22. /** Resets the sync mask. */
  23. void clearMask() { mSyncMask = 0; }
  24. /** Returns the internal command buffer. */
  25. VulkanCmdBuffer* getCB() const { return mCB; }
  26. private:
  27. friend class VulkanCommandBufferManager;
  28. VulkanCmdBuffer* mCB;
  29. UINT32 mSyncMask;
  30. UINT32 mQueueIdx;
  31. };
  32. /**
  33. * Handles creation of Vulkan command buffers. See CommandBuffer.
  34. *
  35. * @note Core thread only.
  36. */
  37. class VulkanCommandBufferManager : public CommandBufferManager
  38. {
  39. public:
  40. VulkanCommandBufferManager(const VulkanRenderAPI& rapi);
  41. ~VulkanCommandBufferManager();
  42. /** @copydoc CommandBufferManager::createInternal() */
  43. SPtr<CommandBuffer> createInternal(GpuQueueType type, UINT32 deviceIdx = 0, UINT32 queueIdx = 0,
  44. bool secondary = false) override;
  45. /** Notifies the manager that this buffer was just submitted to the queue for execution. */
  46. void setActiveBuffer(GpuQueueType type, UINT32 deviceIdx, UINT32 queueIdx, VulkanCmdBuffer* buffer);
  47. /**
  48. * Returns a set of command buffer semaphores depending on the provided sync mask.
  49. *
  50. * @param[in] deviceIdx Index of the device to get the semaphores for.
  51. * @param[in] syncMask Mask that has a bit enabled for each command buffer to retrieve the semaphore for.
  52. * If the command buffer is not currently executing, semaphore won't be returned.
  53. * @param[out] semaphores List containing all the required semaphores. Semaphores are tightly packed at the
  54. * beginning of the array. Must be able to hold at least BS_MAX_COMMAND_BUFFERS entries.
  55. * @param[out] count Number of semaphores provided in the @p semaphores array.
  56. */
  57. void getSyncSemaphores(UINT32 deviceIdx, UINT32 syncMask, VkSemaphore* semaphores, UINT32& count);
  58. /**
  59. * Checks if any of the active command buffers finished executing on the device and updates their states
  60. * accordingly.
  61. */
  62. void refreshStates(UINT32 deviceIdx);
  63. /**
  64. * Returns an command buffer that can be used for executing transfer operations on the specified queue.
  65. * Transfer buffers are automatically flushed (submitted) whenever a new (normal) command buffer is about to
  66. * execute.
  67. */
  68. VulkanTransferBufferInfo* getTransferBuffer(UINT32 deviceIdx, GpuQueueType type, UINT32 queueIdx);
  69. /** Submits all transfer command buffers, ensuring all queued transfer operations get executed. */
  70. void flushTransferBuffers(UINT32 deviceIdx);
  71. private:
  72. /** Contains command buffers specific to one device. */
  73. struct PerDeviceData
  74. {
  75. VulkanCmdBuffer* activeBuffers[BS_MAX_UNIQUE_QUEUES];
  76. VulkanTransferBufferInfo transferBuffers[BS_MAX_UNIQUE_QUEUES];
  77. };
  78. const VulkanRenderAPI& mRapi;
  79. PerDeviceData* mDeviceData;
  80. UINT32 mNumDevices;
  81. };
  82. /** @} */
  83. }