BsVulkanCommandBuffer.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #pragma once
  4. #include "BsVulkanPrerequisites.h"
  5. #include "BsCommandBuffer.h"
  6. #include "BsVulkanRenderAPI.h"
  7. #include "BsVulkanResource.h"
  8. #include "BsVulkanGpuPipelineState.h"
  9. namespace bs
  10. {
  11. class VulkanImage;
  12. /** @addtogroup Vulkan
  13. * @{
  14. */
  15. #define BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY BS_MAX_QUEUES_PER_TYPE * 32
  16. // Maximum number of command buffers that another command buffer can be dependant on (via a sync mask)
  17. #define BS_MAX_VULKAN_CB_DEPENDENCIES 2
  18. /** Wrapper around a Vulkan semaphore object that manages its usage and lifetime. */
  19. class VulkanSemaphore : public VulkanResource
  20. {
  21. public:
  22. VulkanSemaphore(VulkanResourceManager* owner);
  23. ~VulkanSemaphore();
  24. /** Returns the internal handle to the Vulkan object. */
  25. VkSemaphore getHandle() const { return mSemaphore; }
  26. private:
  27. VkSemaphore mSemaphore;
  28. };
  29. class VulkanCmdBuffer;
  30. /** Pool that allocates and distributes Vulkan command buffers. */
  31. class VulkanCmdBufferPool
  32. {
  33. public:
  34. VulkanCmdBufferPool(VulkanDevice& device);
  35. ~VulkanCmdBufferPool();
  36. /**
  37. * Attempts to find a free command buffer, or creates a new one if not found. Caller must guarantee the provided
  38. * queue family is valid.
  39. */
  40. VulkanCmdBuffer* getBuffer(UINT32 queueFamily, bool secondary);
  41. private:
  42. /** Command buffer pool and related information. */
  43. struct PoolInfo
  44. {
  45. VkCommandPool pool = VK_NULL_HANDLE;
  46. VulkanCmdBuffer* buffers[BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY];
  47. UINT32 queueFamily = -1;
  48. };
  49. /** Creates a new command buffer. */
  50. VulkanCmdBuffer* createBuffer(UINT32 queueFamily, bool secondary);
  51. VulkanDevice& mDevice;
  52. UnorderedMap<UINT32, PoolInfo> mPools;
  53. UINT32 mNextId;
  54. };
  55. /** Determines where are the current descriptor sets bound to. */
  56. enum class DescriptorSetBindFlag
  57. {
  58. None = 0,
  59. Graphics = 1 << 0,
  60. Compute = 1 << 1
  61. };
  62. typedef Flags<DescriptorSetBindFlag> DescriptorSetBindFlags;
  63. BS_FLAGS_OPERATORS(DescriptorSetBindFlag)
  64. /**
  65. * Represents a direct wrapper over an internal Vulkan command buffer. This is unlike VulkanCommandBuffer which is a
  66. * higher level class, and it allows for re-use by internally using multiple low-level command buffers.
  67. */
  68. class VulkanCmdBuffer
  69. {
  70. /** Possible states a command buffer can be in. */
  71. enum class State
  72. {
  73. /** Buffer is ready to be re-used. */
  74. Ready,
  75. /** Buffer is currently recording commands, but isn't recording a render pass. */
  76. Recording,
  77. /** Buffer is currently recording render pass commands. */
  78. RecordingRenderPass,
  79. /** Buffer is done recording but hasn't been submitted. */
  80. RecordingDone,
  81. /** Buffer is done recording and is currently submitted on a queue. */
  82. Submitted
  83. };
  84. public:
  85. VulkanCmdBuffer(VulkanDevice& device, UINT32 id, VkCommandPool pool, UINT32 queueFamily, bool secondary);
  86. ~VulkanCmdBuffer();
  87. /** Returns an unique identifier of this command buffer. */
  88. UINT32 getId() const { return mId; }
  89. /** Returns the index of the queue family this command buffer is executing on. */
  90. UINT32 getQueueFamily() const { return mQueueFamily; }
  91. /** Returns the index of the device this command buffer will execute on. */
  92. UINT32 getDeviceIdx() const;
  93. /** Makes the command buffer ready to start recording commands. */
  94. void begin();
  95. /** Ends command buffer command recording (as started with begin()). */
  96. void end();
  97. /** Begins render pass recording. Must be called within begin()/end() calls. */
  98. void beginRenderPass();
  99. /** Ends render pass recording (as started with beginRenderPass(). */
  100. void endRenderPass();
  101. /**
  102. * Submits the command buffer for execution.
  103. *
  104. * @param[in] queue Queue to submit the command buffer on.
  105. * @param[in] queueIdx Index of the queue the command buffer was submitted on. Note that this may be different
  106. * from the actual VulkanQueue index since multiple command buffer queue indices can map
  107. * to the same queue.
  108. * @param[in] syncMask Mask that controls which other command buffers does this command buffer depend upon
  109. * (if any). See description of @p syncMask parameter in RenderAPICore::executeCommands().
  110. */
  111. void submit(VulkanQueue* queue, UINT32 queueIdx, UINT32 syncMask);
  112. /** Returns the handle to the internal Vulkan command buffer wrapped by this object. */
  113. VkCommandBuffer getHandle() const { return mCmdBuffer; }
  114. /** Returns a fence that can be used for tracking when the command buffer is done executing. */
  115. VkFence getFence() const { return mFence; }
  116. /**
  117. * Returns a semaphore that may be used for synchronizing execution between command buffers executing on the same
  118. * queue.
  119. */
  120. VulkanSemaphore* getIntraQueueSemaphore() const { return mIntraQueueSemaphore; }
  121. /**
  122. * Returns a semaphore that may be used for synchronizing execution between command buffers executing on different
  123. * queues. Note that these semaphores get used each time they are requested, and there is only a fixed number
  124. * available. If all are used up, null will be returned. New semaphores are generated when allocateSemaphores()
  125. * is called.
  126. */
  127. VulkanSemaphore* requestInterQueueSemaphore() const;
  128. /**
  129. * Allocates a new set of semaphores that may be used for synchronizing execution between different command buffers.
  130. * Releases the previously allocated semaphores, if they exist. Use getIntraQueueSemaphore() &
  131. * requestInterQueueSemaphore() to retrieve latest allocated semaphores.
  132. */
  133. void allocateSemaphores();
  134. /** Returns true if the command buffer is currently being processed by the device. */
  135. bool isSubmitted() const { return mState == State::Submitted; }
  136. /** Returns true if the command buffer is currently recording (but not within a render pass). */
  137. bool isRecording() const { return mState == State::Recording; }
  138. /** Returns true if the command buffer is ready to be submitted to a queue. */
  139. bool isReadyForSubmit() const { return mState == State::RecordingDone; }
  140. /** Returns true if the command buffer is currently recording a render pass. */
  141. bool isInRenderPass() const { return mState == State::RecordingRenderPass; }
  142. /** Returns a counter that gets incremented whenever the command buffer is done executing. */
  143. UINT32 getFenceCounter() const { return mFenceCounter; }
  144. /** Checks the internal fence and changes command buffer state if done executing. */
  145. void refreshFenceStatus();
  146. /**
  147. * Lets the command buffer know that the provided resource has been queued on it, and will be used by the
  148. * device when the command buffer is submitted. If a resource is an image or a buffer use the more specific
  149. * registerResource() overload.
  150. */
  151. void registerResource(VulkanResource* res, VulkanUseFlags flags);
  152. /**
  153. * Lets the command buffer know that the provided image resource has been queued on it, and will be used by the
  154. * device when the command buffer is submitted.
  155. */
  156. void registerResource(VulkanImage* res, VkAccessFlags accessFlags, VkImageLayout currentLayout,
  157. VkImageLayout newLayout, VulkanUseFlags flags, bool isFBAttachment = false);
  158. /**
  159. * Lets the command buffer know that the provided image resource has been queued on it, and will be used by the
  160. * device when the command buffer is submitted.
  161. */
  162. void registerResource(VulkanBuffer* res, VkAccessFlags accessFlags, VulkanUseFlags flags);
  163. /**
  164. * Lets the command buffer know that the provided framebuffer resource has been queued on it, and will be used by
  165. * the device when the command buffer is submitted.
  166. */
  167. void registerResource(VulkanFramebuffer* res, VulkanUseFlags flags);
  168. /************************************************************************/
  169. /* COMMANDS */
  170. /************************************************************************/
  171. /**
  172. * Assigns a render target the the command buffer. This render target's framebuffer and render pass will be used
  173. * when beginRenderPass() is called. Command buffer must not be currently recording a render pass.
  174. */
  175. void setRenderTarget(const SPtr<RenderTargetCore>& rt, bool readOnlyDepthStencil, RenderSurfaceMask loadMask);
  176. /** Clears the entirety currently bound render target. */
  177. void clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask);
  178. /** Clears the viewport portion of the currently bound render target. */
  179. void clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask);
  180. /** Assigns a pipeline state to use for subsequent draw commands. */
  181. void setPipelineState(const SPtr<GraphicsPipelineStateCore>& state);
  182. /** Assigns a pipeline state to use for subsequent dispatch commands. */
  183. void setPipelineState(const SPtr<ComputePipelineStateCore>& state);
  184. /** Assign GPU params to the GPU programs bound by the pipeline state. */
  185. void setGpuParams(const SPtr<GpuParamsCore>& gpuParams);
  186. /** Sets the current viewport which determine to which portion of the render target to render to. */
  187. void setViewport(const Rect2& area);
  188. /**
  189. * Sets the scissor rectangle area which determines in which area if the viewport are the fragments allowed to be
  190. * generated. Only relevant if enabled on the pipeline state.
  191. */
  192. void setScissorRect(const Rect2I& area);
  193. /** Sets a stencil reference value that will be used for comparisons in stencil operations, if enabled. */
  194. void setStencilRef(UINT32 value);
  195. /** Changes how are primitives interpreted as during rendering. */
  196. void setDrawOp(DrawOperationType drawOp);
  197. /** Sets one or multiple vertex buffers that will be used for subsequent draw() or drawIndexed() calls. */
  198. void setVertexBuffers(UINT32 index, SPtr<VertexBufferCore>* buffers, UINT32 numBuffers);
  199. /** Sets an index buffer that will be used for subsequent drawIndexed() calls. */
  200. void setIndexBuffer(const SPtr<IndexBufferCore>& buffer);
  201. /** Sets a declaration that determines how are vertex buffer contents interpreted. */
  202. void setVertexDeclaration(const SPtr<VertexDeclarationCore>& decl);
  203. /** Executes a draw command using the currently bound graphics pipeline, vertex buffer and render target. */
  204. void draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount);
  205. /** Executes a draw command using the currently bound graphics pipeline, index & vertex buffer and render target. */
  206. void drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 instanceCount);
  207. /** Executes a dispatch command using the currently bound compute pipeline. */
  208. void dispatch(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ);
  209. /**
  210. * Registers a command that signals the event when executed. Will be delayed until the end of the current
  211. * render pass, if any.
  212. */
  213. void setEvent(VulkanEvent* event);
  214. /**
  215. * Registers a command that resets the query. The command will be delayed until the next submit() if a render
  216. * pass is currently in progress, but is guaranteed to execute before this command buffer is submitted.
  217. */
  218. void resetQuery(VulkanQuery* query);
  219. private:
  220. friend class VulkanCmdBufferPool;
  221. friend class VulkanCommandBuffer;
  222. friend class VulkanQueue;
  223. /** Contains information about a single Vulkan resource bound/used on this command buffer. */
  224. struct ResourceUseHandle
  225. {
  226. bool used;
  227. VulkanUseFlags flags;
  228. };
  229. /** Contains information about a single Vulkan buffer resource bound/used on this command buffer. */
  230. struct BufferInfo
  231. {
  232. VkAccessFlags accessFlags;
  233. ResourceUseHandle useHandle;
  234. };
  235. /** Contains information about a single Vulkan image resource bound/used on this command buffer. */
  236. struct ImageInfo
  237. {
  238. VkAccessFlags accessFlags;
  239. VkImageSubresourceRange range;
  240. ResourceUseHandle useHandle;
  241. // Only relevant for layout transitions
  242. VkImageLayout currentLayout;
  243. VkImageLayout requiredLayout;
  244. VkImageLayout finalLayout;
  245. bool isFBAttachment : 1;
  246. bool isShaderInput : 1;
  247. };
  248. /** Checks if all the prerequisites for rendering have been made (e.g. render target and pipeline state are set. */
  249. bool isReadyForRender();
  250. /** Marks the command buffer as submitted on a queue. */
  251. void setIsSubmitted() { mState = State::Submitted; }
  252. /** Binds the current graphics pipeline to the command buffer. Returns true if bind was successful. */
  253. bool bindGraphicsPipeline();
  254. /** Binds any dynamic states to the pipeline, as required.
  255. *
  256. * @param[in] forceAll If true all states will be bound. If false only states marked as dirty will be bound.
  257. */
  258. void bindDynamicStates(bool forceAll);
  259. /** Clears the specified area of the currently bound render target. */
  260. void clearViewport(const Rect2I& area, UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  261. UINT8 targetMask);
  262. /** Starts and ends a render pass, intended only for a clear operation. */
  263. void executeClearPass();
  264. /** Executes any queued layout transitions by issuing a pipeline barrier. */
  265. void executeLayoutTransitions();
  266. UINT32 mId;
  267. UINT32 mQueueFamily;
  268. State mState;
  269. VulkanDevice& mDevice;
  270. VkCommandPool mPool;
  271. VkCommandBuffer mCmdBuffer;
  272. VkFence mFence;
  273. UINT32 mFenceCounter;
  274. VulkanSemaphore* mIntraQueueSemaphore;
  275. VulkanSemaphore* mInterQueueSemaphores[BS_MAX_VULKAN_CB_DEPENDENCIES];
  276. mutable UINT32 mNumUsedInterQueueSemaphores;
  277. VulkanFramebuffer* mFramebuffer;
  278. UINT32 mRenderTargetWidth;
  279. UINT32 mRenderTargetHeight;
  280. bool mRenderTargetDepthReadOnly;
  281. RenderSurfaceMask mRenderTargetLoadMask;
  282. UnorderedMap<VulkanResource*, ResourceUseHandle> mResources;
  283. UnorderedMap<VulkanResource*, UINT32> mImages;
  284. UnorderedMap<VulkanResource*, BufferInfo> mBuffers;
  285. Vector<ImageInfo> mImageInfos;
  286. UINT32 mGlobalQueueIdx;
  287. SPtr<VulkanGraphicsPipelineStateCore> mGraphicsPipeline;
  288. SPtr<VulkanComputePipelineStateCore> mComputePipeline;
  289. SPtr<VertexDeclarationCore> mVertexDecl;
  290. Rect2 mViewport;
  291. Rect2I mScissor;
  292. UINT32 mStencilRef;
  293. DrawOperationType mDrawOp;
  294. UINT32 mNumBoundDescriptorSets;
  295. bool mGfxPipelineRequiresBind : 1;
  296. bool mCmpPipelineRequiresBind : 1;
  297. bool mViewportRequiresBind : 1;
  298. bool mStencilRefRequiresBind : 1;
  299. bool mScissorRequiresBind : 1;
  300. DescriptorSetBindFlags mDescriptorSetsBindState;
  301. std::array<VkClearValue, BS_MAX_MULTIPLE_RENDER_TARGETS + 1> mClearValues;
  302. ClearMask mClearMask;
  303. Rect2I mClearArea;
  304. VulkanSemaphore* mSemaphoresTemp[BS_MAX_UNIQUE_QUEUES];
  305. VkBuffer mVertexBuffersTemp[BS_MAX_BOUND_VERTEX_BUFFERS];
  306. VkDeviceSize mVertexBufferOffsetsTemp[BS_MAX_BOUND_VERTEX_BUFFERS];
  307. VkDescriptorSet* mDescriptorSetsTemp;
  308. UnorderedMap<UINT32, TransitionInfo> mTransitionInfoTemp;
  309. Vector<VkImageMemoryBarrier> mLayoutTransitionBarriersTemp;
  310. UnorderedMap<VulkanImage*, UINT32> mQueuedLayoutTransitions;
  311. Vector<VulkanEvent*> mQueuedEvents;
  312. Vector<VulkanQuery*> mQueuedQueryResets;
  313. };
  314. /** CommandBuffer implementation for Vulkan. */
  315. class VulkanCommandBuffer : public CommandBuffer
  316. {
  317. public:
  318. /**
  319. * Submits the command buffer for execution.
  320. *
  321. * @param[in] syncMask Mask that controls which other command buffers does this command buffer depend upon
  322. * (if any). See description of @p syncMask parameter in RenderAPICore::executeCommands().
  323. */
  324. void submit(UINT32 syncMask);
  325. /**
  326. * Returns the internal command buffer.
  327. *
  328. * @note This buffer will change after a submit() call.
  329. */
  330. VulkanCmdBuffer* getInternal() const { return mBuffer; }
  331. private:
  332. friend class VulkanCommandBufferManager;
  333. VulkanCommandBuffer(VulkanDevice& device, GpuQueueType type, UINT32 deviceIdx, UINT32 queueIdx,
  334. bool secondary);
  335. /**
  336. * Tasks the command buffer to find a new internal command buffer. Call this after the command buffer has been
  337. * submitted to a queue (it's not allowed to be used until the queue is done with it).
  338. */
  339. void acquireNewBuffer();
  340. VulkanCmdBuffer* mBuffer;
  341. VulkanDevice& mDevice;
  342. VulkanQueue* mQueue;
  343. UINT32 mIdMask;
  344. };
  345. /** @} */
  346. }