BsVulkanCommandBuffer.cpp 53 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanCommandBuffer.h"
  4. #include "BsVulkanCommandBufferManager.h"
  5. #include "BsVulkanUtility.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanGpuParams.h"
  8. #include "BsVulkanQueue.h"
  9. #include "BsVulkanTexture.h"
  10. #include "BsVulkanIndexBuffer.h"
  11. #include "BsVulkanVertexBuffer.h"
  12. #include "BsVulkanHardwareBuffer.h"
  13. #include "BsVulkanFramebuffer.h"
  14. #include "BsVulkanVertexInputManager.h"
  15. #include "BsVulkanEventQuery.h"
  16. #include "BsVulkanQueryManager.h"
  17. #if BS_PLATFORM == BS_PLATFORM_WIN32
  18. #include "Win32/BsWin32RenderWindow.h"
  19. #else
  20. static_assert(false, "Other platforms go here");
  21. #endif
  22. namespace bs
  23. {
  24. VulkanSemaphore::VulkanSemaphore(VulkanResourceManager* owner)
  25. :VulkanResource(owner, true)
  26. {
  27. VkSemaphoreCreateInfo semaphoreCI;
  28. semaphoreCI.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  29. semaphoreCI.pNext = nullptr;
  30. semaphoreCI.flags = 0;
  31. VkResult result = vkCreateSemaphore(owner->getDevice().getLogical(), &semaphoreCI, gVulkanAllocator, &mSemaphore);
  32. assert(result == VK_SUCCESS);
  33. }
  34. VulkanSemaphore::~VulkanSemaphore()
  35. {
  36. vkDestroySemaphore(mOwner->getDevice().getLogical(), mSemaphore, gVulkanAllocator);
  37. }
  38. VulkanCmdBufferPool::VulkanCmdBufferPool(VulkanDevice& device)
  39. :mDevice(device), mNextId(1)
  40. {
  41. for (UINT32 i = 0; i < GQT_COUNT; i++)
  42. {
  43. UINT32 familyIdx = device.getQueueFamily((GpuQueueType)i);
  44. if (familyIdx == (UINT32)-1)
  45. continue;
  46. VkCommandPoolCreateInfo poolCI;
  47. poolCI.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  48. poolCI.pNext = nullptr;
  49. poolCI.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  50. poolCI.queueFamilyIndex = familyIdx;
  51. PoolInfo& poolInfo = mPools[familyIdx];
  52. poolInfo.queueFamily = familyIdx;
  53. memset(poolInfo.buffers, 0, sizeof(poolInfo.buffers));
  54. vkCreateCommandPool(device.getLogical(), &poolCI, gVulkanAllocator, &poolInfo.pool);
  55. }
  56. }
  57. VulkanCmdBufferPool::~VulkanCmdBufferPool()
  58. {
  59. // Note: Shutdown should be the only place command buffers are destroyed at, as the system relies on the fact that
  60. // they won't be destroyed during normal operation.
  61. for(auto& entry : mPools)
  62. {
  63. PoolInfo& poolInfo = entry.second;
  64. for (UINT32 i = 0; i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY; i++)
  65. {
  66. VulkanCmdBuffer* buffer = poolInfo.buffers[i];
  67. if (buffer == nullptr)
  68. break;
  69. bs_delete(buffer);
  70. }
  71. vkDestroyCommandPool(mDevice.getLogical(), poolInfo.pool, gVulkanAllocator);
  72. }
  73. }
  74. VulkanCmdBuffer* VulkanCmdBufferPool::getBuffer(UINT32 queueFamily, bool secondary)
  75. {
  76. auto iterFind = mPools.find(queueFamily);
  77. if (iterFind == mPools.end())
  78. return nullptr;
  79. VulkanCmdBuffer** buffers = iterFind->second.buffers;
  80. UINT32 i = 0;
  81. for(; i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY; i++)
  82. {
  83. if (buffers[i] == nullptr)
  84. break;
  85. if(buffers[i]->mState == VulkanCmdBuffer::State::Ready)
  86. {
  87. buffers[i]->begin();
  88. return buffers[i];
  89. }
  90. }
  91. assert(i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY &&
  92. "Too many command buffers allocated. Increment BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY to a higher value. ");
  93. buffers[i] = createBuffer(queueFamily, secondary);
  94. buffers[i]->begin();
  95. return buffers[i];
  96. }
  97. VulkanCmdBuffer* VulkanCmdBufferPool::createBuffer(UINT32 queueFamily, bool secondary)
  98. {
  99. auto iterFind = mPools.find(queueFamily);
  100. if (iterFind == mPools.end())
  101. return nullptr;
  102. const PoolInfo& poolInfo = iterFind->second;
  103. return bs_new<VulkanCmdBuffer>(mDevice, mNextId++, poolInfo.pool, poolInfo.queueFamily, secondary);
  104. }
  105. VulkanCmdBuffer::VulkanCmdBuffer(VulkanDevice& device, UINT32 id, VkCommandPool pool, UINT32 queueFamily, bool secondary)
  106. : mId(id), mQueueFamily(queueFamily), mState(State::Ready), mDevice(device), mPool(pool)
  107. , mIntraQueueSemaphore(nullptr), mInterQueueSemaphores(), mNumUsedInterQueueSemaphores(0)
  108. , mFramebuffer(nullptr), mRenderTargetWidth(0)
  109. , mRenderTargetHeight(0), mRenderTargetDepthReadOnly(false), mRenderTargetLoadMask(RT_NONE), mGlobalQueueIdx(-1)
  110. , mViewport(0.0f, 0.0f, 1.0f, 1.0f), mScissor(0, 0, 0, 0), mStencilRef(0), mDrawOp(DOT_TRIANGLE_LIST)
  111. , mNumBoundDescriptorSets(0), mGfxPipelineRequiresBind(true), mCmpPipelineRequiresBind(true)
  112. , mViewportRequiresBind(true), mStencilRefRequiresBind(true), mScissorRequiresBind(true), mBoundParamsDirty(false)
  113. , mClearValues(), mClearMask(), mVertexBuffersTemp(), mVertexBufferOffsetsTemp()
  114. {
  115. UINT32 maxBoundDescriptorSets = device.getDeviceProperties().limits.maxBoundDescriptorSets;
  116. mDescriptorSetsTemp = (VkDescriptorSet*)bs_alloc(sizeof(VkDescriptorSet) * maxBoundDescriptorSets);
  117. VkCommandBufferAllocateInfo cmdBufferAllocInfo;
  118. cmdBufferAllocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  119. cmdBufferAllocInfo.pNext = nullptr;
  120. cmdBufferAllocInfo.commandPool = pool;
  121. cmdBufferAllocInfo.level = secondary ? VK_COMMAND_BUFFER_LEVEL_SECONDARY : VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  122. cmdBufferAllocInfo.commandBufferCount = 1;
  123. VkResult result = vkAllocateCommandBuffers(mDevice.getLogical(), &cmdBufferAllocInfo, &mCmdBuffer);
  124. assert(result == VK_SUCCESS);
  125. VkFenceCreateInfo fenceCI;
  126. fenceCI.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  127. fenceCI.pNext = nullptr;
  128. fenceCI.flags = 0;
  129. result = vkCreateFence(mDevice.getLogical(), &fenceCI, gVulkanAllocator, &mFence);
  130. assert(result == VK_SUCCESS);
  131. }
  132. VulkanCmdBuffer::~VulkanCmdBuffer()
  133. {
  134. VkDevice device = mDevice.getLogical();
  135. if(mState == State::Submitted)
  136. {
  137. // Wait 1s
  138. UINT64 waitTime = 1000 * 1000 * 1000;
  139. VkResult result = vkWaitForFences(device, 1, &mFence, true, waitTime);
  140. assert(result == VK_SUCCESS || result == VK_TIMEOUT);
  141. if (result == VK_TIMEOUT)
  142. LOGWRN("Freeing a command buffer before done executing because fence wait expired!");
  143. // Resources have been marked as used, make sure to notify them we're done with them
  144. reset();
  145. }
  146. else if(mState != State::Ready)
  147. {
  148. // Notify any resources that they are no longer bound
  149. for (auto& entry : mResources)
  150. {
  151. ResourceUseHandle& useHandle = entry.second;
  152. assert(useHandle.used);
  153. entry.first->notifyUnbound();
  154. }
  155. for (auto& entry : mImages)
  156. {
  157. UINT32 imageInfoIdx = entry.second;
  158. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  159. ResourceUseHandle& useHandle = imageInfo.useHandle;
  160. assert(useHandle.used);
  161. entry.first->notifyUnbound();
  162. }
  163. for (auto& entry : mBuffers)
  164. {
  165. ResourceUseHandle& useHandle = entry.second.useHandle;
  166. assert(useHandle.used);
  167. entry.first->notifyUnbound();
  168. }
  169. }
  170. if (mIntraQueueSemaphore != nullptr)
  171. mIntraQueueSemaphore->destroy();
  172. for(UINT32 i = 0; i < BS_MAX_VULKAN_CB_DEPENDENCIES; i++)
  173. {
  174. if (mInterQueueSemaphores[i] != nullptr)
  175. mInterQueueSemaphores[i]->destroy();
  176. }
  177. vkDestroyFence(device, mFence, gVulkanAllocator);
  178. vkFreeCommandBuffers(device, mPool, 1, &mCmdBuffer);
  179. bs_free(mDescriptorSetsTemp);
  180. }
  181. UINT32 VulkanCmdBuffer::getDeviceIdx() const
  182. {
  183. return mDevice.getIndex();
  184. }
  185. void VulkanCmdBuffer::begin()
  186. {
  187. assert(mState == State::Ready);
  188. VkCommandBufferBeginInfo beginInfo;
  189. beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  190. beginInfo.pNext = nullptr;
  191. beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  192. beginInfo.pInheritanceInfo = nullptr;
  193. VkResult result = vkBeginCommandBuffer(mCmdBuffer, &beginInfo);
  194. assert(result == VK_SUCCESS);
  195. mState = State::Recording;
  196. }
  197. void VulkanCmdBuffer::end()
  198. {
  199. assert(mState == State::Recording);
  200. // If a clear is queued, execute the render pass with no additional instructions
  201. if (mClearMask)
  202. executeClearPass();
  203. VkResult result = vkEndCommandBuffer(mCmdBuffer);
  204. assert(result == VK_SUCCESS);
  205. mState = State::RecordingDone;
  206. }
  207. void VulkanCmdBuffer::beginRenderPass()
  208. {
  209. assert(mState == State::Recording);
  210. if (mFramebuffer == nullptr)
  211. {
  212. LOGWRN("Attempting to begin a render pass but no render target is bound to the command buffer.");
  213. return;
  214. }
  215. if(mClearMask != CLEAR_NONE)
  216. {
  217. // If a previous clear is queued, but it doesn't match the rendered area, need to execute a separate pass
  218. // just for it
  219. Rect2I rtArea(0, 0, mRenderTargetWidth, mRenderTargetHeight);
  220. if (mClearArea != rtArea)
  221. executeClearPass();
  222. }
  223. executeLayoutTransitions();
  224. // Check if any frame-buffer attachments are also used as shader inputs, in which case we make them read-only
  225. RenderSurfaceMask readMask = RT_NONE;
  226. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  227. for(UINT32 i = 0; i < numColorAttachments; i++)
  228. {
  229. VulkanImage* image = mFramebuffer->getColorAttachment(i).image;
  230. UINT32 imageInfoIdx = mImages[image];
  231. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  232. bool readOnly = imageInfo.isShaderInput;
  233. if(readOnly)
  234. readMask.set((RenderSurfaceMaskBits)(1 << i));
  235. }
  236. if(mFramebuffer->hasDepthAttachment())
  237. {
  238. VulkanImage* image = mFramebuffer->getDepthStencilAttachment().image;
  239. UINT32 imageInfoIdx = mImages[image];
  240. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  241. bool readOnly = imageInfo.isShaderInput;
  242. if (readOnly)
  243. readMask.set(RT_DEPTH);
  244. }
  245. // Reset flags that signal image usage (since those only matter for the render-pass' purposes)
  246. for (auto& entry : mImages)
  247. {
  248. UINT32 imageInfoIdx = entry.second;
  249. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  250. imageInfo.isFBAttachment = false;
  251. imageInfo.isShaderInput = false;
  252. }
  253. VkRenderPassBeginInfo renderPassBeginInfo;
  254. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  255. renderPassBeginInfo.pNext = nullptr;
  256. renderPassBeginInfo.framebuffer = mFramebuffer->getFramebuffer(mRenderTargetLoadMask, readMask, mClearMask);
  257. renderPassBeginInfo.renderPass = mFramebuffer->getRenderPass(mRenderTargetLoadMask, readMask, mClearMask);
  258. renderPassBeginInfo.renderArea.offset.x = 0;
  259. renderPassBeginInfo.renderArea.offset.y = 0;
  260. renderPassBeginInfo.renderArea.extent.width = mRenderTargetWidth;
  261. renderPassBeginInfo.renderArea.extent.height = mRenderTargetHeight;
  262. renderPassBeginInfo.clearValueCount = mFramebuffer->getNumAttachments();
  263. renderPassBeginInfo.pClearValues = mClearValues.data();
  264. vkCmdBeginRenderPass(mCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
  265. mClearMask = CLEAR_NONE;
  266. mState = State::RecordingRenderPass;
  267. }
  268. void VulkanCmdBuffer::endRenderPass()
  269. {
  270. assert(mState == State::RecordingRenderPass);
  271. vkCmdEndRenderPass(mCmdBuffer);
  272. // Execute any queued events
  273. for(auto& entry : mQueuedEvents)
  274. vkCmdSetEvent(mCmdBuffer, entry->getHandle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
  275. mQueuedEvents.clear();
  276. // Update any layout transitions that were performed by subpass dependencies, reset flags that signal image usage
  277. // and reset access flags
  278. for (auto& entry : mImages)
  279. {
  280. UINT32 imageInfoIdx = entry.second;
  281. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  282. imageInfo.isFBAttachment = false;
  283. imageInfo.isShaderInput = false;
  284. imageInfo.accessFlags = 0;
  285. }
  286. updateFinalLayouts();
  287. mState = State::Recording;
  288. }
  289. void VulkanCmdBuffer::allocateSemaphores()
  290. {
  291. if (mIntraQueueSemaphore != nullptr)
  292. mIntraQueueSemaphore->destroy();
  293. mIntraQueueSemaphore = mDevice.getResourceManager().create<VulkanSemaphore>();
  294. for (UINT32 i = 0; i < BS_MAX_VULKAN_CB_DEPENDENCIES; i++)
  295. {
  296. if (mInterQueueSemaphores[i] != nullptr)
  297. mInterQueueSemaphores[i]->destroy();
  298. mInterQueueSemaphores[i] = mDevice.getResourceManager().create<VulkanSemaphore>();
  299. }
  300. mNumUsedInterQueueSemaphores = 0;
  301. }
  302. VulkanSemaphore* VulkanCmdBuffer::requestInterQueueSemaphore() const
  303. {
  304. if (mNumUsedInterQueueSemaphores >= BS_MAX_VULKAN_CB_DEPENDENCIES)
  305. return nullptr;
  306. return mInterQueueSemaphores[mNumUsedInterQueueSemaphores++];
  307. }
  308. void VulkanCmdBuffer::submit(VulkanQueue* queue, UINT32 queueIdx, UINT32 syncMask)
  309. {
  310. assert(isReadyForSubmit());
  311. // Make sure to reset the CB fence before we submit it
  312. VkResult result = vkResetFences(mDevice.getLogical(), 1, &mFence);
  313. assert(result == VK_SUCCESS);
  314. // If there are any query resets needed, execute those first
  315. VulkanDevice& device = queue->getDevice();
  316. if(!mQueuedQueryResets.empty())
  317. {
  318. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(mQueueFamily, false);
  319. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  320. for (auto& entry : mQueuedQueryResets)
  321. entry->reset(vkCmdBuffer);
  322. cmdBuffer->end();
  323. queue->queueSubmit(cmdBuffer, nullptr, 0);
  324. mQueuedQueryResets.clear();
  325. }
  326. // Issue pipeline barriers for queue transitions (need to happen on original queue first, then on new queue)
  327. for (auto& entry : mBuffers)
  328. {
  329. VulkanBuffer* resource = static_cast<VulkanBuffer*>(entry.first);
  330. if (!resource->isExclusive())
  331. continue;
  332. UINT32 currentQueueFamily = resource->getQueueFamily();
  333. if (currentQueueFamily != -1 && currentQueueFamily != mQueueFamily)
  334. {
  335. Vector<VkBufferMemoryBarrier>& barriers = mTransitionInfoTemp[currentQueueFamily].bufferBarriers;
  336. barriers.push_back(VkBufferMemoryBarrier());
  337. VkBufferMemoryBarrier& barrier = barriers.back();
  338. barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  339. barrier.pNext = nullptr;
  340. barrier.srcAccessMask = entry.second.accessFlags;
  341. barrier.dstAccessMask = entry.second.accessFlags;
  342. barrier.srcQueueFamilyIndex = currentQueueFamily;
  343. barrier.dstQueueFamilyIndex = mQueueFamily;
  344. barrier.buffer = resource->getHandle();
  345. barrier.offset = 0;
  346. barrier.size = VK_WHOLE_SIZE;
  347. }
  348. }
  349. // For images issue queue transitions, as above. Also issue layout transitions to their inital layouts.
  350. for (auto& entry : mImages)
  351. {
  352. VulkanImage* resource = static_cast<VulkanImage*>(entry.first);
  353. ImageInfo& imageInfo = mImageInfos[entry.second];
  354. UINT32 currentQueueFamily = resource->getQueueFamily();
  355. bool queueMismatch = resource->isExclusive() && currentQueueFamily != -1 && currentQueueFamily != mQueueFamily;
  356. VkImageLayout currentLayout = resource->getLayout();
  357. VkImageLayout initialLayout = imageInfo.initialLayout;
  358. if (queueMismatch || (currentLayout != initialLayout && initialLayout != VK_IMAGE_LAYOUT_UNDEFINED))
  359. {
  360. Vector<VkImageMemoryBarrier>& barriers = mTransitionInfoTemp[currentQueueFamily].imageBarriers;
  361. if (initialLayout == VK_IMAGE_LAYOUT_UNDEFINED)
  362. initialLayout = currentLayout;
  363. barriers.push_back(VkImageMemoryBarrier());
  364. VkImageMemoryBarrier& barrier = barriers.back();
  365. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  366. barrier.pNext = nullptr;
  367. barrier.srcAccessMask = resource->getAccessFlags(currentLayout);
  368. barrier.dstAccessMask = imageInfo.accessFlags;
  369. barrier.oldLayout = currentLayout;
  370. barrier.newLayout = initialLayout;
  371. barrier.image = resource->getHandle();
  372. barrier.subresourceRange = imageInfo.range;
  373. barrier.srcQueueFamilyIndex = currentQueueFamily;
  374. barrier.dstQueueFamilyIndex = mQueueFamily;
  375. // Check if queue transition needed
  376. if (queueMismatch)
  377. {
  378. barrier.srcQueueFamilyIndex = currentQueueFamily;
  379. barrier.dstQueueFamilyIndex = mQueueFamily;
  380. }
  381. else
  382. {
  383. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  384. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  385. }
  386. }
  387. resource->setLayout(imageInfo.finalLayout);
  388. }
  389. for (auto& entry : mTransitionInfoTemp)
  390. {
  391. bool empty = entry.second.imageBarriers.size() == 0 && entry.second.bufferBarriers.size() == 0;
  392. if (empty)
  393. continue;
  394. UINT32 entryQueueFamily = entry.first;
  395. // No queue transition needed for entries on this queue (this entry is most likely an image layout transition)
  396. if (entryQueueFamily == -1 || entryQueueFamily == mQueueFamily)
  397. continue;
  398. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(entryQueueFamily, false);
  399. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  400. TransitionInfo& barriers = entry.second;
  401. UINT32 numImgBarriers = (UINT32)barriers.imageBarriers.size();
  402. UINT32 numBufferBarriers = (UINT32)barriers.bufferBarriers.size();
  403. vkCmdPipelineBarrier(vkCmdBuffer,
  404. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  405. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // The main idea is that the barrier executes before the semaphore triggers, no actual stage dependencies are needed.
  406. 0, 0, nullptr,
  407. numBufferBarriers, barriers.bufferBarriers.data(),
  408. numImgBarriers, barriers.imageBarriers.data());
  409. // Find an appropriate queue to execute on
  410. UINT32 otherQueueIdx = 0;
  411. VulkanQueue* otherQueue = nullptr;
  412. GpuQueueType otherQueueType = GQT_GRAPHICS;
  413. for (UINT32 i = 0; i < GQT_COUNT; i++)
  414. {
  415. otherQueueType = (GpuQueueType)i;
  416. if (device.getQueueFamily(otherQueueType) != entryQueueFamily)
  417. continue;
  418. UINT32 numQueues = device.getNumQueues(otherQueueType);
  419. for (UINT32 j = 0; j < numQueues; j++)
  420. {
  421. // Try to find a queue not currently executing
  422. VulkanQueue* curQueue = device.getQueue(otherQueueType, j);
  423. if (!curQueue->isExecuting())
  424. {
  425. otherQueue = curQueue;
  426. otherQueueIdx = j;
  427. }
  428. }
  429. // Can't find empty one, use the first one then
  430. if (otherQueue == nullptr)
  431. {
  432. otherQueue = device.getQueue(otherQueueType, 0);
  433. otherQueueIdx = 0;
  434. }
  435. break;
  436. }
  437. syncMask |= CommandSyncMask::getGlobalQueueMask(otherQueueType, otherQueueIdx);
  438. cmdBuffer->end();
  439. otherQueue->submit(cmdBuffer, nullptr, 0);
  440. // If there are any layout transitions, reset them as we don't need them for the second pipeline barrier
  441. for (auto& barrierEntry : barriers.imageBarriers)
  442. barrierEntry.oldLayout = barrierEntry.newLayout;
  443. }
  444. UINT32 deviceIdx = device.getIndex();
  445. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  446. UINT32 numSemaphores;
  447. cbm.getSyncSemaphores(deviceIdx, syncMask, mSemaphoresTemp, numSemaphores);
  448. // Issue second part of transition pipeline barriers (on this queue)
  449. for (auto& entry : mTransitionInfoTemp)
  450. {
  451. bool empty = entry.second.imageBarriers.size() == 0 && entry.second.bufferBarriers.size() == 0;
  452. if (empty)
  453. continue;
  454. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(mQueueFamily, false);
  455. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  456. TransitionInfo& barriers = entry.second;
  457. UINT32 numImgBarriers = (UINT32)barriers.imageBarriers.size();
  458. UINT32 numBufferBarriers = (UINT32)barriers.bufferBarriers.size();
  459. vkCmdPipelineBarrier(vkCmdBuffer,
  460. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  461. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  462. 0, 0, nullptr,
  463. numBufferBarriers, barriers.bufferBarriers.data(),
  464. numImgBarriers, barriers.imageBarriers.data());
  465. cmdBuffer->end();
  466. queue->queueSubmit(cmdBuffer, mSemaphoresTemp, numSemaphores);
  467. numSemaphores = 0; // Semaphores are only needed the first time, since we're adding the buffers on the same queue
  468. }
  469. queue->queueSubmit(this, mSemaphoresTemp, numSemaphores);
  470. queue->submitQueued();
  471. mGlobalQueueIdx = CommandSyncMask::getGlobalQueueIdx(queue->getType(), queueIdx);
  472. for (auto& entry : mResources)
  473. {
  474. ResourceUseHandle& useHandle = entry.second;
  475. assert(!useHandle.used);
  476. useHandle.used = true;
  477. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  478. }
  479. for (auto& entry : mImages)
  480. {
  481. UINT32 imageInfoIdx = entry.second;
  482. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  483. ResourceUseHandle& useHandle = imageInfo.useHandle;
  484. assert(!useHandle.used);
  485. useHandle.used = true;
  486. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  487. }
  488. for (auto& entry : mBuffers)
  489. {
  490. ResourceUseHandle& useHandle = entry.second.useHandle;
  491. assert(!useHandle.used);
  492. useHandle.used = true;
  493. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  494. }
  495. // Note: Uncomment for debugging only, prevents any device concurrency issues.
  496. // vkQueueWaitIdle(queue->getHandle());
  497. // Clear vectors but don't clear the actual map, as we want to re-use the memory since we expect queue family
  498. // indices to be the same
  499. for (auto& entry : mTransitionInfoTemp)
  500. {
  501. entry.second.imageBarriers.clear();
  502. entry.second.bufferBarriers.clear();
  503. }
  504. mGraphicsPipeline = nullptr;
  505. mComputePipeline = nullptr;
  506. mGfxPipelineRequiresBind = true;
  507. mCmpPipelineRequiresBind = true;
  508. mFramebuffer = nullptr;
  509. mDescriptorSetsBindState = DescriptorSetBindFlag::Graphics | DescriptorSetBindFlag::Compute;
  510. mQueuedLayoutTransitions.clear();
  511. mBoundParams = nullptr;
  512. mBoundParams = false;
  513. }
  514. bool VulkanCmdBuffer::checkFenceStatus() const
  515. {
  516. VkResult result = vkGetFenceStatus(mDevice.getLogical(), mFence);
  517. assert(result == VK_SUCCESS || result == VK_NOT_READY);
  518. return result == VK_SUCCESS;
  519. }
  520. void VulkanCmdBuffer::reset()
  521. {
  522. if (mState != State::Submitted)
  523. return;
  524. mState = State::Ready;
  525. vkResetCommandBuffer(mCmdBuffer, VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); // Note: Maybe better not to release resources?
  526. for (auto& entry : mResources)
  527. {
  528. ResourceUseHandle& useHandle = entry.second;
  529. assert(useHandle.used);
  530. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  531. }
  532. for (auto& entry : mImages)
  533. {
  534. UINT32 imageInfoIdx = entry.second;
  535. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  536. ResourceUseHandle& useHandle = imageInfo.useHandle;
  537. assert(useHandle.used);
  538. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  539. }
  540. for (auto& entry : mBuffers)
  541. {
  542. ResourceUseHandle& useHandle = entry.second.useHandle;
  543. assert(useHandle.used);
  544. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  545. }
  546. mResources.clear();
  547. mImages.clear();
  548. mBuffers.clear();
  549. mImageInfos.clear();
  550. }
  551. void VulkanCmdBuffer::setRenderTarget(const SPtr<RenderTargetCore>& rt, bool readOnlyDepthStencil,
  552. RenderSurfaceMask loadMask)
  553. {
  554. assert(mState != State::Submitted);
  555. VulkanFramebuffer* newFB;
  556. if(rt != nullptr)
  557. {
  558. if (rt->getProperties().isWindow())
  559. {
  560. Win32RenderWindowCore* window = static_cast<Win32RenderWindowCore*>(rt.get());
  561. window->acquireBackBuffer();
  562. }
  563. rt->getCustomAttribute("FB", &newFB);
  564. }
  565. else
  566. newFB = nullptr;
  567. if (mFramebuffer == newFB && mRenderTargetDepthReadOnly == readOnlyDepthStencil && mRenderTargetLoadMask == loadMask)
  568. return;
  569. if (isInRenderPass())
  570. endRenderPass();
  571. else
  572. {
  573. // If a clear is queued for previous FB, execute the render pass with no additional instructions
  574. if (mClearMask)
  575. executeClearPass();
  576. }
  577. if(newFB == nullptr)
  578. {
  579. mFramebuffer = nullptr;
  580. mRenderTargetWidth = 0;
  581. mRenderTargetHeight = 0;
  582. mRenderTargetDepthReadOnly = false;
  583. mRenderTargetLoadMask = RT_NONE;
  584. }
  585. else
  586. {
  587. mFramebuffer = newFB;
  588. mRenderTargetWidth = rt->getProperties().getWidth();
  589. mRenderTargetHeight = rt->getProperties().getHeight();
  590. mRenderTargetDepthReadOnly = readOnlyDepthStencil;
  591. mRenderTargetLoadMask = loadMask;
  592. }
  593. // Reset flags that signal image usage
  594. for (auto& entry : mImages)
  595. {
  596. UINT32 imageInfoIdx = entry.second;
  597. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  598. imageInfo.isFBAttachment = false;
  599. }
  600. setGpuParams(nullptr);
  601. if(mFramebuffer != nullptr)
  602. registerResource(mFramebuffer, loadMask, VulkanUseFlag::Write);
  603. mGfxPipelineRequiresBind = true;
  604. }
  605. void VulkanCmdBuffer::clearViewport(const Rect2I& area, UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  606. UINT8 targetMask)
  607. {
  608. if (buffers == 0 || mFramebuffer == nullptr)
  609. return;
  610. // Add clear command if currently in render pass
  611. if (isInRenderPass())
  612. {
  613. VkClearAttachment attachments[BS_MAX_MULTIPLE_RENDER_TARGETS + 1];
  614. UINT32 baseLayer = 0;
  615. UINT32 attachmentIdx = 0;
  616. if ((buffers & FBT_COLOR) != 0)
  617. {
  618. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  619. for (UINT32 i = 0; i < numColorAttachments; i++)
  620. {
  621. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  622. if (((1 << attachment.index) & targetMask) == 0)
  623. continue;
  624. attachments[attachmentIdx].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  625. attachments[attachmentIdx].colorAttachment = i;
  626. VkClearColorValue& colorValue = attachments[attachmentIdx].clearValue.color;
  627. colorValue.float32[0] = color.r;
  628. colorValue.float32[1] = color.g;
  629. colorValue.float32[2] = color.b;
  630. colorValue.float32[3] = color.a;
  631. UINT32 curBaseLayer = attachment.baseLayer;
  632. if (attachmentIdx == 0)
  633. baseLayer = curBaseLayer;
  634. else
  635. {
  636. if (baseLayer != curBaseLayer)
  637. {
  638. // Note: This could be supported relatively easily: we would need to issue multiple separate
  639. // clear commands for such framebuffers.
  640. LOGERR("Attempting to clear a texture that has multiple multi-layer surfaces with mismatching "
  641. "starting layers. This is currently not supported.");
  642. }
  643. }
  644. attachmentIdx++;
  645. }
  646. }
  647. if ((buffers & FBT_DEPTH) != 0 || (buffers & FBT_STENCIL) != 0)
  648. {
  649. if (mFramebuffer->hasDepthAttachment())
  650. {
  651. attachments[attachmentIdx].aspectMask = 0;
  652. if ((buffers & FBT_DEPTH) != 0)
  653. {
  654. attachments[attachmentIdx].aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
  655. attachments[attachmentIdx].clearValue.depthStencil.depth = depth;
  656. }
  657. if ((buffers & FBT_STENCIL) != 0)
  658. {
  659. attachments[attachmentIdx].aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  660. attachments[attachmentIdx].clearValue.depthStencil.stencil = stencil;
  661. }
  662. attachments[attachmentIdx].colorAttachment = 0;
  663. UINT32 curBaseLayer = mFramebuffer->getDepthStencilAttachment().baseLayer;
  664. if (attachmentIdx == 0)
  665. baseLayer = curBaseLayer;
  666. else
  667. {
  668. if (baseLayer != curBaseLayer)
  669. {
  670. // Note: This could be supported relatively easily: we would need to issue multiple separate
  671. // clear commands for such framebuffers.
  672. LOGERR("Attempting to clear a texture that has multiple multi-layer surfaces with mismatching "
  673. "starting layers. This is currently not supported.");
  674. }
  675. }
  676. attachmentIdx++;
  677. }
  678. }
  679. UINT32 numAttachments = attachmentIdx;
  680. if (numAttachments == 0)
  681. return;
  682. VkClearRect clearRect;
  683. clearRect.baseArrayLayer = baseLayer;
  684. clearRect.layerCount = mFramebuffer->getNumLayers();
  685. clearRect.rect.offset.x = area.x;
  686. clearRect.rect.offset.y = area.y;
  687. clearRect.rect.extent.width = area.width;
  688. clearRect.rect.extent.height = area.height;
  689. vkCmdClearAttachments(mCmdBuffer, numAttachments, attachments, 1, &clearRect);
  690. }
  691. // Otherwise we use a render pass that performs a clear on begin
  692. else
  693. {
  694. ClearMask clearMask;
  695. std::array<VkClearValue, BS_MAX_MULTIPLE_RENDER_TARGETS + 1> clearValues = mClearValues;
  696. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  697. if ((buffers & FBT_COLOR) != 0)
  698. {
  699. for (UINT32 i = 0; i < numColorAttachments; i++)
  700. {
  701. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  702. if (((1 << attachment.index) & targetMask) == 0)
  703. continue;
  704. clearMask |= (ClearMaskBits)(1 << attachment.index);
  705. VkClearColorValue& colorValue = clearValues[i].color;
  706. colorValue.float32[0] = color.r;
  707. colorValue.float32[1] = color.g;
  708. colorValue.float32[2] = color.b;
  709. colorValue.float32[3] = color.a;
  710. }
  711. }
  712. if ((buffers & FBT_DEPTH) != 0 || (buffers & FBT_STENCIL) != 0)
  713. {
  714. if (mFramebuffer->hasDepthAttachment())
  715. {
  716. UINT32 depthAttachmentIdx = numColorAttachments;
  717. if ((buffers & FBT_DEPTH) != 0)
  718. {
  719. clearValues[depthAttachmentIdx].depthStencil.depth = depth;
  720. clearMask |= CLEAR_DEPTH;
  721. }
  722. if ((buffers & FBT_STENCIL) != 0)
  723. {
  724. clearValues[depthAttachmentIdx].depthStencil.stencil = stencil;
  725. clearMask |= CLEAR_STENCIL;
  726. }
  727. }
  728. }
  729. if (!clearMask)
  730. return;
  731. // Some previous clear operation is already queued, execute it first
  732. bool previousClearNeedsToFinish = (mClearMask & clearMask) != CLEAR_NONE;
  733. if(previousClearNeedsToFinish)
  734. executeClearPass();
  735. mClearMask |= clearMask;
  736. mClearValues = clearValues;
  737. mClearArea = area;
  738. }
  739. }
  740. void VulkanCmdBuffer::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask)
  741. {
  742. Rect2I area(0, 0, mRenderTargetWidth, mRenderTargetHeight);
  743. clearViewport(area, buffers, color, depth, stencil, targetMask);
  744. }
  745. void VulkanCmdBuffer::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask)
  746. {
  747. Rect2I area;
  748. area.x = (UINT32)(mViewport.x * mRenderTargetWidth);
  749. area.y = (UINT32)(mViewport.y * mRenderTargetHeight);
  750. area.width = (UINT32)(mViewport.width * mRenderTargetWidth);
  751. area.height = (UINT32)(mViewport.height * mRenderTargetHeight);
  752. clearViewport(area, buffers, color, depth, stencil, targetMask);
  753. }
  754. void VulkanCmdBuffer::setPipelineState(const SPtr<GraphicsPipelineStateCore>& state)
  755. {
  756. if (mGraphicsPipeline == state)
  757. return;
  758. mGraphicsPipeline = std::static_pointer_cast<VulkanGraphicsPipelineStateCore>(state);
  759. mGfxPipelineRequiresBind = true;
  760. }
  761. void VulkanCmdBuffer::setPipelineState(const SPtr<ComputePipelineStateCore>& state)
  762. {
  763. if (mComputePipeline == state)
  764. return;
  765. mComputePipeline = std::static_pointer_cast<VulkanComputePipelineStateCore>(state);
  766. mCmpPipelineRequiresBind = true;
  767. }
  768. void VulkanCmdBuffer::setGpuParams(const SPtr<GpuParamsCore>& gpuParams)
  769. {
  770. // Note: We keep an internal reference to GPU params even though we shouldn't keep a reference to a core thread
  771. // object. But it should be fine since we expect the resource to be externally synchronized so it should never
  772. // be allowed to go out of scope on a non-core thread anyway.
  773. mBoundParams = std::static_pointer_cast<VulkanGpuParams>(gpuParams);
  774. if (mBoundParams != nullptr)
  775. mBoundParamsDirty = true;
  776. else
  777. {
  778. mNumBoundDescriptorSets = 0;
  779. mBoundParamsDirty = false;
  780. }
  781. mDescriptorSetsBindState = DescriptorSetBindFlag::Graphics | DescriptorSetBindFlag::Compute;
  782. }
  783. void VulkanCmdBuffer::setViewport(const Rect2& area)
  784. {
  785. if (mViewport == area)
  786. return;
  787. mViewport = area;
  788. mViewportRequiresBind = true;
  789. }
  790. void VulkanCmdBuffer::setScissorRect(const Rect2I& value)
  791. {
  792. if (mScissor == value)
  793. return;
  794. mScissor = value;
  795. mScissorRequiresBind = true;
  796. }
  797. void VulkanCmdBuffer::setStencilRef(UINT32 value)
  798. {
  799. if (mStencilRef == value)
  800. return;
  801. mStencilRef = value;
  802. mStencilRefRequiresBind = true;
  803. }
  804. void VulkanCmdBuffer::setDrawOp(DrawOperationType drawOp)
  805. {
  806. if (mDrawOp == drawOp)
  807. return;
  808. mDrawOp = drawOp;
  809. mGfxPipelineRequiresBind = true;
  810. }
  811. void VulkanCmdBuffer::setVertexBuffers(UINT32 index, SPtr<VertexBufferCore>* buffers, UINT32 numBuffers)
  812. {
  813. if (numBuffers == 0)
  814. return;
  815. for(UINT32 i = 0; i < numBuffers; i++)
  816. {
  817. VulkanVertexBufferCore* vertexBuffer = static_cast<VulkanVertexBufferCore*>(buffers[i].get());
  818. if (vertexBuffer != nullptr)
  819. {
  820. VulkanBuffer* resource = vertexBuffer->getResource(mDevice.getIndex());
  821. if (resource != nullptr)
  822. {
  823. mVertexBuffersTemp[i] = resource->getHandle();
  824. registerResource(resource, VulkanUseFlag::Read);
  825. }
  826. else
  827. mVertexBuffersTemp[i] = VK_NULL_HANDLE;
  828. }
  829. else
  830. mVertexBuffersTemp[i] = VK_NULL_HANDLE;
  831. }
  832. vkCmdBindVertexBuffers(mCmdBuffer, index, numBuffers, mVertexBuffersTemp, mVertexBufferOffsetsTemp);
  833. }
  834. void VulkanCmdBuffer::setIndexBuffer(const SPtr<IndexBufferCore>& buffer)
  835. {
  836. VulkanIndexBufferCore* indexBuffer = static_cast<VulkanIndexBufferCore*>(buffer.get());
  837. VkBuffer vkBuffer = VK_NULL_HANDLE;
  838. VkIndexType indexType = VK_INDEX_TYPE_UINT32;
  839. if (indexBuffer != nullptr)
  840. {
  841. VulkanBuffer* resource = indexBuffer->getResource(mDevice.getIndex());
  842. if (resource != nullptr)
  843. {
  844. vkBuffer = resource->getHandle();
  845. indexType = VulkanUtility::getIndexType(buffer->getProperties().getType());
  846. registerResource(resource, VulkanUseFlag::Read);
  847. }
  848. }
  849. vkCmdBindIndexBuffer(mCmdBuffer, vkBuffer, 0, indexType);
  850. }
  851. void VulkanCmdBuffer::setVertexDeclaration(const SPtr<VertexDeclarationCore>& decl)
  852. {
  853. if (mVertexDecl == decl)
  854. return;
  855. mVertexDecl = decl;
  856. mGfxPipelineRequiresBind = true;
  857. }
  858. bool VulkanCmdBuffer::isReadyForRender()
  859. {
  860. if (mGraphicsPipeline == nullptr)
  861. return false;
  862. SPtr<VertexDeclarationCore> inputDecl = mGraphicsPipeline->getInputDeclaration();
  863. if (inputDecl == nullptr)
  864. return false;
  865. return mFramebuffer != nullptr && mVertexDecl != nullptr;
  866. }
  867. bool VulkanCmdBuffer::bindGraphicsPipeline()
  868. {
  869. SPtr<VertexDeclarationCore> inputDecl = mGraphicsPipeline->getInputDeclaration();
  870. SPtr<VulkanVertexInput> vertexInput = VulkanVertexInputManager::instance().getVertexInfo(mVertexDecl, inputDecl);
  871. VulkanPipeline* pipeline = mGraphicsPipeline->getPipeline(mDevice.getIndex(), mFramebuffer,
  872. mRenderTargetDepthReadOnly, mDrawOp, vertexInput);
  873. if (pipeline == nullptr)
  874. return false;
  875. // Check that pipeline matches the read-only state of any framebuffer attachments
  876. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  877. for (UINT32 i = 0; i < numColorAttachments; i++)
  878. {
  879. VulkanImage* image = mFramebuffer->getColorAttachment(i).image;
  880. UINT32 imageInfoIdx = mImages[image];
  881. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  882. if (imageInfo.isShaderInput && !pipeline->isColorReadOnly(i))
  883. {
  884. LOGWRN("Framebuffer attachment also used as a shader input, but color writes aren't disabled. This will"
  885. " result in undefined behavior.");
  886. }
  887. }
  888. if (mFramebuffer->hasDepthAttachment())
  889. {
  890. VulkanImage* image = mFramebuffer->getDepthStencilAttachment().image;
  891. UINT32 imageInfoIdx = mImages[image];
  892. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  893. if (imageInfo.isShaderInput && !pipeline->isDepthStencilReadOnly())
  894. {
  895. LOGWRN("Framebuffer attachment also used as a shader input, but depth/stencil writes aren't disabled. "
  896. "This will result in undefined behavior.");
  897. }
  898. }
  899. mGraphicsPipeline->registerPipelineResources(this);
  900. registerResource(pipeline, VulkanUseFlag::Read);
  901. vkCmdBindPipeline(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->getHandle());
  902. bindDynamicStates(true);
  903. mGfxPipelineRequiresBind = false;
  904. return true;
  905. }
  906. void VulkanCmdBuffer::bindDynamicStates(bool forceAll)
  907. {
  908. if (mViewportRequiresBind || forceAll)
  909. {
  910. VkViewport viewport;
  911. viewport.x = mViewport.x * mRenderTargetWidth;
  912. viewport.y = mViewport.y * mRenderTargetHeight;
  913. viewport.width = mViewport.width * mRenderTargetWidth;
  914. viewport.height = mViewport.height * mRenderTargetHeight;
  915. viewport.minDepth = 0.0f;
  916. viewport.maxDepth = 1.0f;
  917. vkCmdSetViewport(mCmdBuffer, 0, 1, &viewport);
  918. mViewportRequiresBind = false;
  919. }
  920. if(mStencilRefRequiresBind || forceAll)
  921. {
  922. vkCmdSetStencilReference(mCmdBuffer, VK_STENCIL_FRONT_AND_BACK, mStencilRef);
  923. mStencilRefRequiresBind = false;
  924. }
  925. if(mScissorRequiresBind || forceAll)
  926. {
  927. VkRect2D scissorRect;
  928. if(mGraphicsPipeline->isScissorEnabled())
  929. {
  930. scissorRect.offset.x = mScissor.x;
  931. scissorRect.offset.y = mScissor.y;
  932. scissorRect.extent.width = mScissor.width;
  933. scissorRect.extent.height = mScissor.height;
  934. }
  935. else
  936. {
  937. scissorRect.offset.x = 0;
  938. scissorRect.offset.y = 0;
  939. scissorRect.extent.width = mRenderTargetWidth;
  940. scissorRect.extent.height = mRenderTargetHeight;
  941. }
  942. vkCmdSetScissor(mCmdBuffer, 0, 1, &scissorRect);
  943. mScissorRequiresBind = false;
  944. }
  945. }
  946. void VulkanCmdBuffer::bindGpuParams()
  947. {
  948. if (mBoundParamsDirty)
  949. {
  950. if (mBoundParams != nullptr)
  951. {
  952. mNumBoundDescriptorSets = mBoundParams->getNumSets();
  953. mBoundParams->prepareForBind(*this, mDescriptorSetsTemp);
  954. }
  955. else
  956. mNumBoundDescriptorSets = 0;
  957. mBoundParamsDirty = false;
  958. }
  959. else
  960. {
  961. mNumBoundDescriptorSets = 0;
  962. }
  963. }
  964. void VulkanCmdBuffer::executeLayoutTransitions()
  965. {
  966. auto createLayoutTransitionBarrier = [&](VulkanImage* image, ImageInfo& imageInfo)
  967. {
  968. mLayoutTransitionBarriersTemp.push_back(VkImageMemoryBarrier());
  969. VkImageMemoryBarrier& barrier = mLayoutTransitionBarriersTemp.back();
  970. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  971. barrier.pNext = nullptr;
  972. barrier.srcAccessMask = image->getAccessFlags(imageInfo.currentLayout);
  973. barrier.dstAccessMask = imageInfo.accessFlags;
  974. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  975. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  976. barrier.oldLayout = imageInfo.currentLayout;
  977. barrier.newLayout = imageInfo.requiredLayout;
  978. barrier.image = image->getHandle();
  979. barrier.subresourceRange = imageInfo.range;
  980. imageInfo.currentLayout = imageInfo.requiredLayout;
  981. imageInfo.accessFlags = 0;
  982. imageInfo.hasTransitioned = true;
  983. };
  984. // Note: These layout transitions will contain transitions for offscreen framebuffer attachments (while they
  985. // transition to shader read-only layout). This can be avoided, since they're immediately used by the render pass
  986. // as color attachments, making the layout change redundant.
  987. for (auto& entry : mQueuedLayoutTransitions)
  988. {
  989. UINT32 imageInfoIdx = entry.second;
  990. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  991. createLayoutTransitionBarrier(entry.first, imageInfo);
  992. }
  993. vkCmdPipelineBarrier(mCmdBuffer,
  994. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  995. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  996. 0, 0, nullptr,
  997. 0, nullptr,
  998. (UINT32)mLayoutTransitionBarriersTemp.size(), mLayoutTransitionBarriersTemp.data());
  999. mQueuedLayoutTransitions.clear();
  1000. mLayoutTransitionBarriersTemp.clear();
  1001. }
  1002. void VulkanCmdBuffer::updateFinalLayouts()
  1003. {
  1004. if (mFramebuffer == nullptr)
  1005. return;
  1006. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  1007. for (UINT32 i = 0; i < numColorAttachments; i++)
  1008. {
  1009. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  1010. UINT32 imageInfoIdx = mImages[attachment.image];
  1011. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1012. imageInfo.currentLayout = imageInfo.finalLayout;
  1013. imageInfo.requiredLayout = imageInfo.finalLayout;
  1014. imageInfo.hasTransitioned = true;
  1015. }
  1016. if (mFramebuffer->hasDepthAttachment())
  1017. {
  1018. const VulkanFramebufferAttachment& attachment = mFramebuffer->getDepthStencilAttachment();
  1019. UINT32 imageInfoIdx = mImages[attachment.image];
  1020. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1021. imageInfo.currentLayout = imageInfo.finalLayout;
  1022. imageInfo.requiredLayout = imageInfo.finalLayout;
  1023. imageInfo.hasTransitioned = true;
  1024. }
  1025. }
  1026. void VulkanCmdBuffer::executeClearPass()
  1027. {
  1028. assert(mState == State::Recording);
  1029. executeLayoutTransitions();
  1030. VkRenderPassBeginInfo renderPassBeginInfo;
  1031. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  1032. renderPassBeginInfo.pNext = nullptr;
  1033. renderPassBeginInfo.framebuffer = mFramebuffer->getFramebuffer(RT_NONE, RT_NONE, mClearMask);
  1034. renderPassBeginInfo.renderPass = mFramebuffer->getRenderPass(RT_NONE, RT_NONE, mClearMask);
  1035. renderPassBeginInfo.renderArea.offset.x = mClearArea.x;
  1036. renderPassBeginInfo.renderArea.offset.y = mClearArea.y;
  1037. renderPassBeginInfo.renderArea.extent.width = mClearArea.width;
  1038. renderPassBeginInfo.renderArea.extent.height = mClearArea.height;
  1039. renderPassBeginInfo.clearValueCount = mFramebuffer->getNumAttachments();
  1040. renderPassBeginInfo.pClearValues = mClearValues.data();
  1041. vkCmdBeginRenderPass(mCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
  1042. vkCmdEndRenderPass(mCmdBuffer);
  1043. updateFinalLayouts();
  1044. mClearMask = CLEAR_NONE;
  1045. }
  1046. void VulkanCmdBuffer::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount)
  1047. {
  1048. if (!isReadyForRender())
  1049. return;
  1050. bindGpuParams();
  1051. if (!isInRenderPass())
  1052. beginRenderPass();
  1053. if (mGfxPipelineRequiresBind)
  1054. {
  1055. if (!bindGraphicsPipeline())
  1056. return;
  1057. }
  1058. else
  1059. bindDynamicStates(false);
  1060. if (mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Graphics))
  1061. {
  1062. if (mNumBoundDescriptorSets > 0)
  1063. {
  1064. UINT32 deviceIdx = mDevice.getIndex();
  1065. VkPipelineLayout pipelineLayout = mGraphicsPipeline->getPipelineLayout(deviceIdx);
  1066. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0,
  1067. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1068. }
  1069. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Graphics);
  1070. }
  1071. vkCmdDraw(mCmdBuffer, vertexCount, instanceCount, vertexOffset, 0);
  1072. }
  1073. void VulkanCmdBuffer::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 instanceCount)
  1074. {
  1075. if (!isReadyForRender())
  1076. return;
  1077. bindGpuParams();
  1078. if (!isInRenderPass())
  1079. beginRenderPass();
  1080. if (mGfxPipelineRequiresBind)
  1081. {
  1082. if (!bindGraphicsPipeline())
  1083. return;
  1084. }
  1085. else
  1086. bindDynamicStates(false);
  1087. if (mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Graphics))
  1088. {
  1089. if (mNumBoundDescriptorSets > 0)
  1090. {
  1091. UINT32 deviceIdx = mDevice.getIndex();
  1092. VkPipelineLayout pipelineLayout = mGraphicsPipeline->getPipelineLayout(deviceIdx);
  1093. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0,
  1094. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1095. }
  1096. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Graphics);
  1097. }
  1098. vkCmdDrawIndexed(mCmdBuffer, indexCount, instanceCount, startIndex, vertexOffset, 0);
  1099. }
  1100. void VulkanCmdBuffer::dispatch(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ)
  1101. {
  1102. if (mComputePipeline == nullptr)
  1103. return;
  1104. bindGpuParams();
  1105. if (isInRenderPass())
  1106. endRenderPass();
  1107. UINT32 deviceIdx = mDevice.getIndex();
  1108. if(mCmpPipelineRequiresBind)
  1109. {
  1110. VulkanPipeline* pipeline = mComputePipeline->getPipeline(deviceIdx);
  1111. if (pipeline == nullptr)
  1112. return;
  1113. registerResource(pipeline, VulkanUseFlag::Read);
  1114. mComputePipeline->registerPipelineResources(this);
  1115. vkCmdBindPipeline(mCmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline->getHandle());
  1116. mCmpPipelineRequiresBind = false;
  1117. }
  1118. if(mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Compute))
  1119. {
  1120. if (mNumBoundDescriptorSets > 0)
  1121. {
  1122. VkPipelineLayout pipelineLayout = mComputePipeline->getPipelineLayout(deviceIdx);
  1123. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0,
  1124. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1125. }
  1126. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Compute);
  1127. }
  1128. vkCmdDispatch(mCmdBuffer, numGroupsX, numGroupsY, numGroupsZ);
  1129. }
  1130. void VulkanCmdBuffer::setEvent(VulkanEvent* event)
  1131. {
  1132. if(isInRenderPass())
  1133. mQueuedEvents.push_back(event);
  1134. else
  1135. vkCmdSetEvent(mCmdBuffer, event->getHandle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
  1136. }
  1137. void VulkanCmdBuffer::resetQuery(VulkanQuery* query)
  1138. {
  1139. if (isInRenderPass())
  1140. mQueuedQueryResets.push_back(query);
  1141. else
  1142. query->reset(mCmdBuffer);
  1143. }
  1144. void VulkanCmdBuffer::registerResource(VulkanResource* res, VulkanUseFlags flags)
  1145. {
  1146. auto insertResult = mResources.insert(std::make_pair(res, ResourceUseHandle()));
  1147. if(insertResult.second) // New element
  1148. {
  1149. ResourceUseHandle& useHandle = insertResult.first->second;
  1150. useHandle.used = false;
  1151. useHandle.flags = flags;
  1152. res->notifyBound();
  1153. }
  1154. else // Existing element
  1155. {
  1156. ResourceUseHandle& useHandle = insertResult.first->second;
  1157. assert(!useHandle.used);
  1158. useHandle.flags |= flags;
  1159. }
  1160. }
  1161. void VulkanCmdBuffer::registerResource(VulkanImage* res, VulkanUseFlags flags)
  1162. {
  1163. VkImageLayout layout = res->getOptimalLayout();
  1164. VkAccessFlags accessFlags = res->getAccessFlags(layout);
  1165. registerResource(res, accessFlags, VK_IMAGE_LAYOUT_UNDEFINED, layout, flags, false);
  1166. }
  1167. void VulkanCmdBuffer::registerResource(VulkanImage* res, VkAccessFlags accessFlags, VkImageLayout newLayout,
  1168. VkImageLayout finalLayout, VulkanUseFlags flags, bool isFBAttachment)
  1169. {
  1170. // Note: I currently always perform pipeline barriers (layout transitions and similar), over the entire image.
  1171. // In the case of render and storage images, the case is often that only a specific subresource requires
  1172. // it. However this makes grouping and tracking of current image layouts much more difficult.
  1173. // If this is ever requires we'll need to track image layout per-subresource instead per-image, and we
  1174. // might also need a smart way to group layout transitions for multiple sub-resources on the same image.
  1175. VkImageSubresourceRange range = res->getRange();
  1176. UINT32 nextImageInfoIdx = (UINT32)mImageInfos.size();
  1177. auto insertResult = mImages.insert(std::make_pair(res, nextImageInfoIdx));
  1178. if (insertResult.second) // New element
  1179. {
  1180. UINT32 imageInfoIdx = insertResult.first->second;
  1181. mImageInfos.push_back(ImageInfo());
  1182. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1183. imageInfo.accessFlags = accessFlags;
  1184. imageInfo.currentLayout = newLayout;
  1185. imageInfo.initialLayout = newLayout;
  1186. imageInfo.requiredLayout = newLayout;
  1187. imageInfo.finalLayout = finalLayout;
  1188. imageInfo.range = range;
  1189. imageInfo.isFBAttachment = isFBAttachment;
  1190. imageInfo.isShaderInput = !isFBAttachment;
  1191. imageInfo.hasTransitioned = false;
  1192. imageInfo.useHandle.used = false;
  1193. imageInfo.useHandle.flags = flags;
  1194. res->notifyBound();
  1195. }
  1196. else // Existing element
  1197. {
  1198. UINT32 imageInfoIdx = insertResult.first->second;
  1199. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1200. assert(!imageInfo.useHandle.used);
  1201. imageInfo.useHandle.flags |= flags;
  1202. imageInfo.accessFlags |= accessFlags;
  1203. // New layout is valid, check for transitions (UNDEFINED signifies the caller doesn't want a layout transition)
  1204. if (newLayout != VK_IMAGE_LAYOUT_UNDEFINED)
  1205. {
  1206. // If layout transition was requested by framebuffer bind, respect it because render-pass will only accept a
  1207. // specific layout (in certain cases), and we have no choice.
  1208. // In the case when a FB attachment is also bound for shader reads, this will override the layout required for
  1209. // shader read (GENERAL or DEPTH_READ_ONLY), but that is fine because those transitions are handled
  1210. // automatically by render-pass layout transitions.
  1211. // Any other texture (non FB attachment) will only even be bound in a single layout and we can keep the one it
  1212. // was originally registered with.
  1213. if (isFBAttachment)
  1214. imageInfo.requiredLayout = newLayout;
  1215. else if(!imageInfo.isFBAttachment) // Layout transition is not being done on a FB image
  1216. {
  1217. // Check if the image had a layout previously assigned, and if so check if multiple different layouts
  1218. // were requested. In that case we wish to transfer the image to GENERAL layout.
  1219. bool firstUseInRenderPass = !imageInfo.isShaderInput && !imageInfo.isFBAttachment;
  1220. if (firstUseInRenderPass || imageInfo.requiredLayout == VK_IMAGE_LAYOUT_UNDEFINED)
  1221. imageInfo.requiredLayout = newLayout;
  1222. else if (imageInfo.requiredLayout != newLayout)
  1223. imageInfo.requiredLayout = VK_IMAGE_LAYOUT_GENERAL;
  1224. }
  1225. }
  1226. // If attached to FB, then the final layout is set by the FB (provided as layout param here), otherwise its
  1227. // the same as required layout
  1228. if(!isFBAttachment && !imageInfo.isFBAttachment)
  1229. imageInfo.finalLayout = imageInfo.requiredLayout;
  1230. else
  1231. {
  1232. if (isFBAttachment)
  1233. imageInfo.finalLayout = finalLayout;
  1234. }
  1235. // If we haven't done a layout transition yet, we can just overwrite the previously written values, and the
  1236. // transition will be handled as the first thing in submit(), otherwise we queue a non-initial transition
  1237. // below.
  1238. if (!imageInfo.hasTransitioned)
  1239. {
  1240. imageInfo.initialLayout = imageInfo.requiredLayout;
  1241. imageInfo.currentLayout = imageInfo.requiredLayout;
  1242. }
  1243. else
  1244. {
  1245. if (imageInfo.currentLayout != imageInfo.requiredLayout)
  1246. mQueuedLayoutTransitions[res] = imageInfoIdx;
  1247. }
  1248. // If a FB attachment was just bound as a shader input, we might need to restart the render pass with a FB
  1249. // attachment that supports read-only attachments using the GENERAL layout
  1250. bool requiresReadOnlyFB = false;
  1251. if (isFBAttachment)
  1252. {
  1253. if (!imageInfo.isFBAttachment)
  1254. {
  1255. imageInfo.isFBAttachment = true;
  1256. requiresReadOnlyFB = imageInfo.isShaderInput;
  1257. }
  1258. }
  1259. else
  1260. {
  1261. if (!imageInfo.isShaderInput)
  1262. {
  1263. imageInfo.isShaderInput = true;
  1264. requiresReadOnlyFB = imageInfo.isFBAttachment;
  1265. }
  1266. }
  1267. // If we need to switch frame-buffers, end current render pass
  1268. if (requiresReadOnlyFB && isInRenderPass())
  1269. endRenderPass();
  1270. }
  1271. // Register any sub-resources
  1272. for(UINT32 i = 0; i < range.layerCount; i++)
  1273. {
  1274. for(UINT32 j = 0; j < range.levelCount; j++)
  1275. {
  1276. UINT32 layer = range.baseArrayLayer + i;
  1277. UINT32 mipLevel = range.baseMipLevel + j;
  1278. registerResource(res->getSubresource(layer, mipLevel), flags);
  1279. }
  1280. }
  1281. }
  1282. void VulkanCmdBuffer::registerResource(VulkanBuffer* res, VkAccessFlags accessFlags, VulkanUseFlags flags)
  1283. {
  1284. auto insertResult = mBuffers.insert(std::make_pair(res, BufferInfo()));
  1285. if (insertResult.second) // New element
  1286. {
  1287. BufferInfo& bufferInfo = insertResult.first->second;
  1288. bufferInfo.accessFlags = accessFlags;
  1289. bufferInfo.useHandle.used = false;
  1290. bufferInfo.useHandle.flags = flags;
  1291. res->notifyBound();
  1292. }
  1293. else // Existing element
  1294. {
  1295. BufferInfo& bufferInfo = insertResult.first->second;
  1296. assert(!bufferInfo.useHandle.used);
  1297. bufferInfo.useHandle.flags |= flags;
  1298. bufferInfo.accessFlags |= accessFlags;
  1299. }
  1300. }
  1301. void VulkanCmdBuffer::registerResource(VulkanFramebuffer* res, RenderSurfaceMask loadMask, VulkanUseFlags flags)
  1302. {
  1303. auto insertResult = mResources.insert(std::make_pair(res, ResourceUseHandle()));
  1304. if (insertResult.second) // New element
  1305. {
  1306. ResourceUseHandle& useHandle = insertResult.first->second;
  1307. useHandle.used = false;
  1308. useHandle.flags = flags;
  1309. res->notifyBound();
  1310. }
  1311. else // Existing element
  1312. {
  1313. ResourceUseHandle& useHandle = insertResult.first->second;
  1314. assert(!useHandle.used);
  1315. useHandle.flags |= flags;
  1316. }
  1317. // Register any sub-resources
  1318. UINT32 numColorAttachments = res->getNumColorAttachments();
  1319. for (UINT32 i = 0; i < numColorAttachments; i++)
  1320. {
  1321. const VulkanFramebufferAttachment& attachment = res->getColorAttachment(i);
  1322. VkImageLayout layout;
  1323. VkAccessFlags accessMask;
  1324. // If image is being loaded, we need to transfer it to correct layout, otherwise it doesn't matter
  1325. if (loadMask.isSet((RenderSurfaceMaskBits)(1 << i)))
  1326. {
  1327. layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1328. accessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  1329. }
  1330. else
  1331. {
  1332. layout = VK_IMAGE_LAYOUT_UNDEFINED;
  1333. accessMask = 0;
  1334. }
  1335. registerResource(attachment.image, accessMask, layout, attachment.finalLayout, VulkanUseFlag::Write, true);
  1336. }
  1337. if(res->hasDepthAttachment())
  1338. {
  1339. const VulkanFramebufferAttachment& attachment = res->getDepthStencilAttachment();
  1340. VkImageLayout layout;
  1341. VkAccessFlags accessMask;
  1342. // If image is being loaded, we need to transfer it to correct layout, otherwise it doesn't matter
  1343. if (loadMask.isSet(RT_DEPTH))
  1344. {
  1345. layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1346. accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
  1347. }
  1348. else
  1349. {
  1350. layout = VK_IMAGE_LAYOUT_UNDEFINED;
  1351. accessMask = 0;
  1352. }
  1353. registerResource(attachment.image, accessMask, layout, attachment.finalLayout, VulkanUseFlag::Write, true);
  1354. }
  1355. }
  1356. VulkanCommandBuffer::VulkanCommandBuffer(VulkanDevice& device, GpuQueueType type, UINT32 deviceIdx,
  1357. UINT32 queueIdx, bool secondary)
  1358. : CommandBuffer(type, deviceIdx, queueIdx, secondary), mBuffer(nullptr)
  1359. , mDevice(device), mQueue(nullptr), mIdMask(0)
  1360. {
  1361. UINT32 numQueues = device.getNumQueues(mType);
  1362. if (numQueues == 0) // Fall back to graphics queue
  1363. {
  1364. mType = GQT_GRAPHICS;
  1365. numQueues = device.getNumQueues(GQT_GRAPHICS);
  1366. }
  1367. mQueue = device.getQueue(mType, mQueueIdx % numQueues);
  1368. mIdMask = device.getQueueMask(mType, mQueueIdx);
  1369. acquireNewBuffer();
  1370. }
  1371. void VulkanCommandBuffer::acquireNewBuffer()
  1372. {
  1373. VulkanCmdBufferPool& pool = mDevice.getCmdBufferPool();
  1374. if (mBuffer != nullptr)
  1375. assert(mBuffer->isSubmitted());
  1376. UINT32 queueFamily = mDevice.getQueueFamily(mType);
  1377. mBuffer = pool.getBuffer(queueFamily, mIsSecondary);
  1378. }
  1379. void VulkanCommandBuffer::submit(UINT32 syncMask)
  1380. {
  1381. // Ignore myself
  1382. syncMask &= ~mIdMask;
  1383. if (mBuffer->isInRenderPass())
  1384. mBuffer->endRenderPass();
  1385. // Execute any queued layout transitions that weren't already handled by the render pass
  1386. mBuffer->executeLayoutTransitions();
  1387. if (mBuffer->isRecording())
  1388. mBuffer->end();
  1389. if (!mBuffer->isReadyForSubmit()) // Possibly nothing was recorded in the buffer
  1390. return;
  1391. mBuffer->submit(mQueue, mQueueIdx, syncMask);
  1392. acquireNewBuffer();
  1393. gVulkanCBManager().refreshStates(mDeviceIdx);
  1394. }
  1395. }