BsVulkanCommandBuffer.cpp 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanCommandBuffer.h"
  4. #include "BsVulkanCommandBufferManager.h"
  5. #include "BsVulkanUtility.h"
  6. #include "BsVulkanDevice.h"
  7. #include "BsVulkanGpuParams.h"
  8. #include "BsVulkanQueue.h"
  9. #include "BsVulkanTexture.h"
  10. #include "BsVulkanIndexBuffer.h"
  11. #include "BsVulkanVertexBuffer.h"
  12. #include "BsVulkanHardwareBuffer.h"
  13. #include "BsVulkanFramebuffer.h"
  14. #include "BsVulkanVertexInputManager.h"
  15. #include "BsVulkanEventQuery.h"
  16. #include "BsVulkanQueryManager.h"
  17. #if BS_PLATFORM == BS_PLATFORM_WIN32
  18. #include "Win32/BsWin32RenderWindow.h"
  19. #else
  20. static_assert(false, "Other platforms go here");
  21. #endif
  22. namespace bs { namespace ct
  23. {
  24. VulkanSemaphore::VulkanSemaphore(VulkanResourceManager* owner)
  25. :VulkanResource(owner, true)
  26. {
  27. VkSemaphoreCreateInfo semaphoreCI;
  28. semaphoreCI.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  29. semaphoreCI.pNext = nullptr;
  30. semaphoreCI.flags = 0;
  31. VkResult result = vkCreateSemaphore(owner->getDevice().getLogical(), &semaphoreCI, gVulkanAllocator, &mSemaphore);
  32. assert(result == VK_SUCCESS);
  33. }
  34. VulkanSemaphore::~VulkanSemaphore()
  35. {
  36. vkDestroySemaphore(mOwner->getDevice().getLogical(), mSemaphore, gVulkanAllocator);
  37. }
  38. VulkanCmdBufferPool::VulkanCmdBufferPool(VulkanDevice& device)
  39. :mDevice(device), mNextId(1)
  40. {
  41. for (UINT32 i = 0; i < GQT_COUNT; i++)
  42. {
  43. UINT32 familyIdx = device.getQueueFamily((GpuQueueType)i);
  44. if (familyIdx == (UINT32)-1)
  45. continue;
  46. VkCommandPoolCreateInfo poolCI;
  47. poolCI.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  48. poolCI.pNext = nullptr;
  49. poolCI.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  50. poolCI.queueFamilyIndex = familyIdx;
  51. PoolInfo& poolInfo = mPools[familyIdx];
  52. poolInfo.queueFamily = familyIdx;
  53. memset(poolInfo.buffers, 0, sizeof(poolInfo.buffers));
  54. vkCreateCommandPool(device.getLogical(), &poolCI, gVulkanAllocator, &poolInfo.pool);
  55. }
  56. }
  57. VulkanCmdBufferPool::~VulkanCmdBufferPool()
  58. {
  59. // Note: Shutdown should be the only place command buffers are destroyed at, as the system relies on the fact that
  60. // they won't be destroyed during normal operation.
  61. for(auto& entry : mPools)
  62. {
  63. PoolInfo& poolInfo = entry.second;
  64. for (UINT32 i = 0; i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY; i++)
  65. {
  66. VulkanCmdBuffer* buffer = poolInfo.buffers[i];
  67. if (buffer == nullptr)
  68. break;
  69. bs_delete(buffer);
  70. }
  71. vkDestroyCommandPool(mDevice.getLogical(), poolInfo.pool, gVulkanAllocator);
  72. }
  73. }
  74. VulkanCmdBuffer* VulkanCmdBufferPool::getBuffer(UINT32 queueFamily, bool secondary)
  75. {
  76. auto iterFind = mPools.find(queueFamily);
  77. if (iterFind == mPools.end())
  78. return nullptr;
  79. VulkanCmdBuffer** buffers = iterFind->second.buffers;
  80. UINT32 i = 0;
  81. for(; i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY; i++)
  82. {
  83. if (buffers[i] == nullptr)
  84. break;
  85. if(buffers[i]->mState == VulkanCmdBuffer::State::Ready)
  86. {
  87. buffers[i]->begin();
  88. return buffers[i];
  89. }
  90. }
  91. assert(i < BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY &&
  92. "Too many command buffers allocated. Increment BS_MAX_VULKAN_CB_PER_QUEUE_FAMILY to a higher value. ");
  93. buffers[i] = createBuffer(queueFamily, secondary);
  94. buffers[i]->begin();
  95. return buffers[i];
  96. }
  97. VulkanCmdBuffer* VulkanCmdBufferPool::createBuffer(UINT32 queueFamily, bool secondary)
  98. {
  99. auto iterFind = mPools.find(queueFamily);
  100. if (iterFind == mPools.end())
  101. return nullptr;
  102. const PoolInfo& poolInfo = iterFind->second;
  103. return bs_new<VulkanCmdBuffer>(mDevice, mNextId++, poolInfo.pool, poolInfo.queueFamily, secondary);
  104. }
  105. VulkanCmdBuffer::VulkanCmdBuffer(VulkanDevice& device, UINT32 id, VkCommandPool pool, UINT32 queueFamily, bool secondary)
  106. : mId(id), mQueueFamily(queueFamily), mState(State::Ready), mDevice(device), mPool(pool)
  107. , mIntraQueueSemaphore(nullptr), mInterQueueSemaphores(), mNumUsedInterQueueSemaphores(0)
  108. , mFramebuffer(nullptr), mRenderTargetWidth(0)
  109. , mRenderTargetHeight(0), mRenderTargetDepthReadOnly(false), mRenderTargetLoadMask(RT_NONE), mGlobalQueueIdx(-1)
  110. , mViewport(0.0f, 0.0f, 1.0f, 1.0f), mScissor(0, 0, 0, 0), mStencilRef(0), mDrawOp(DOT_TRIANGLE_LIST)
  111. , mNumBoundDescriptorSets(0), mGfxPipelineRequiresBind(true), mCmpPipelineRequiresBind(true)
  112. , mViewportRequiresBind(true), mStencilRefRequiresBind(true), mScissorRequiresBind(true), mBoundParamsDirty(false)
  113. , mClearValues(), mClearMask(), mVertexBuffersTemp(), mVertexBufferOffsetsTemp()
  114. {
  115. UINT32 maxBoundDescriptorSets = device.getDeviceProperties().limits.maxBoundDescriptorSets;
  116. mDescriptorSetsTemp = (VkDescriptorSet*)bs_alloc(sizeof(VkDescriptorSet) * maxBoundDescriptorSets);
  117. VkCommandBufferAllocateInfo cmdBufferAllocInfo;
  118. cmdBufferAllocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  119. cmdBufferAllocInfo.pNext = nullptr;
  120. cmdBufferAllocInfo.commandPool = pool;
  121. cmdBufferAllocInfo.level = secondary ? VK_COMMAND_BUFFER_LEVEL_SECONDARY : VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  122. cmdBufferAllocInfo.commandBufferCount = 1;
  123. VkResult result = vkAllocateCommandBuffers(mDevice.getLogical(), &cmdBufferAllocInfo, &mCmdBuffer);
  124. assert(result == VK_SUCCESS);
  125. VkFenceCreateInfo fenceCI;
  126. fenceCI.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  127. fenceCI.pNext = nullptr;
  128. fenceCI.flags = 0;
  129. result = vkCreateFence(mDevice.getLogical(), &fenceCI, gVulkanAllocator, &mFence);
  130. assert(result == VK_SUCCESS);
  131. }
  132. VulkanCmdBuffer::~VulkanCmdBuffer()
  133. {
  134. VkDevice device = mDevice.getLogical();
  135. if(mState == State::Submitted)
  136. {
  137. // Wait 1s
  138. UINT64 waitTime = 1000 * 1000 * 1000;
  139. VkResult result = vkWaitForFences(device, 1, &mFence, true, waitTime);
  140. assert(result == VK_SUCCESS || result == VK_TIMEOUT);
  141. if (result == VK_TIMEOUT)
  142. LOGWRN("Freeing a command buffer before done executing because fence wait expired!");
  143. // Resources have been marked as used, make sure to notify them we're done with them
  144. reset();
  145. }
  146. else if(mState != State::Ready)
  147. {
  148. // Notify any resources that they are no longer bound
  149. for (auto& entry : mResources)
  150. {
  151. ResourceUseHandle& useHandle = entry.second;
  152. assert(useHandle.used);
  153. entry.first->notifyUnbound();
  154. }
  155. for (auto& entry : mImages)
  156. {
  157. UINT32 imageInfoIdx = entry.second;
  158. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  159. ResourceUseHandle& useHandle = imageInfo.useHandle;
  160. assert(useHandle.used);
  161. entry.first->notifyUnbound();
  162. }
  163. for (auto& entry : mBuffers)
  164. {
  165. ResourceUseHandle& useHandle = entry.second.useHandle;
  166. assert(useHandle.used);
  167. entry.first->notifyUnbound();
  168. }
  169. }
  170. if (mIntraQueueSemaphore != nullptr)
  171. mIntraQueueSemaphore->destroy();
  172. for(UINT32 i = 0; i < BS_MAX_VULKAN_CB_DEPENDENCIES; i++)
  173. {
  174. if (mInterQueueSemaphores[i] != nullptr)
  175. mInterQueueSemaphores[i]->destroy();
  176. }
  177. vkDestroyFence(device, mFence, gVulkanAllocator);
  178. vkFreeCommandBuffers(device, mPool, 1, &mCmdBuffer);
  179. bs_free(mDescriptorSetsTemp);
  180. }
  181. UINT32 VulkanCmdBuffer::getDeviceIdx() const
  182. {
  183. return mDevice.getIndex();
  184. }
  185. void VulkanCmdBuffer::begin()
  186. {
  187. assert(mState == State::Ready);
  188. VkCommandBufferBeginInfo beginInfo;
  189. beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  190. beginInfo.pNext = nullptr;
  191. beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  192. beginInfo.pInheritanceInfo = nullptr;
  193. VkResult result = vkBeginCommandBuffer(mCmdBuffer, &beginInfo);
  194. assert(result == VK_SUCCESS);
  195. mState = State::Recording;
  196. }
  197. void VulkanCmdBuffer::end()
  198. {
  199. assert(mState == State::Recording);
  200. // If a clear is queued, execute the render pass with no additional instructions
  201. if (mClearMask)
  202. executeClearPass();
  203. VkResult result = vkEndCommandBuffer(mCmdBuffer);
  204. assert(result == VK_SUCCESS);
  205. mState = State::RecordingDone;
  206. }
  207. void VulkanCmdBuffer::beginRenderPass()
  208. {
  209. assert(mState == State::Recording);
  210. if (mFramebuffer == nullptr)
  211. {
  212. LOGWRN("Attempting to begin a render pass but no render target is bound to the command buffer.");
  213. return;
  214. }
  215. if(mClearMask != CLEAR_NONE)
  216. {
  217. // If a previous clear is queued, but it doesn't match the rendered area, need to execute a separate pass
  218. // just for it
  219. Rect2I rtArea(0, 0, mRenderTargetWidth, mRenderTargetHeight);
  220. if (mClearArea != rtArea)
  221. executeClearPass();
  222. }
  223. executeLayoutTransitions();
  224. // Check if any frame-buffer attachments are also used as shader inputs, in which case we make them read-only
  225. RenderSurfaceMask readMask = RT_NONE;
  226. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  227. for(UINT32 i = 0; i < numColorAttachments; i++)
  228. {
  229. VulkanImage* image = mFramebuffer->getColorAttachment(i).image;
  230. UINT32 imageInfoIdx = mImages[image];
  231. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  232. bool readOnly = imageInfo.isShaderInput;
  233. if(readOnly)
  234. readMask.set((RenderSurfaceMaskBits)(1 << i));
  235. }
  236. if(mFramebuffer->hasDepthAttachment())
  237. {
  238. VulkanImage* image = mFramebuffer->getDepthStencilAttachment().image;
  239. UINT32 imageInfoIdx = mImages[image];
  240. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  241. bool readOnly = imageInfo.isShaderInput;
  242. if (readOnly)
  243. readMask.set(RT_DEPTH);
  244. }
  245. // Reset flags that signal image usage (since those only matter for the render-pass' purposes)
  246. for (auto& entry : mImages)
  247. {
  248. UINT32 imageInfoIdx = entry.second;
  249. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  250. imageInfo.isFBAttachment = false;
  251. imageInfo.isShaderInput = false;
  252. }
  253. VkRenderPassBeginInfo renderPassBeginInfo;
  254. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  255. renderPassBeginInfo.pNext = nullptr;
  256. renderPassBeginInfo.framebuffer = mFramebuffer->getFramebuffer(mRenderTargetLoadMask, readMask, mClearMask);
  257. renderPassBeginInfo.renderPass = mFramebuffer->getRenderPass(mRenderTargetLoadMask, readMask, mClearMask);
  258. renderPassBeginInfo.renderArea.offset.x = 0;
  259. renderPassBeginInfo.renderArea.offset.y = 0;
  260. renderPassBeginInfo.renderArea.extent.width = mRenderTargetWidth;
  261. renderPassBeginInfo.renderArea.extent.height = mRenderTargetHeight;
  262. renderPassBeginInfo.clearValueCount = mFramebuffer->getNumAttachments();
  263. renderPassBeginInfo.pClearValues = mClearValues.data();
  264. vkCmdBeginRenderPass(mCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
  265. mClearMask = CLEAR_NONE;
  266. mState = State::RecordingRenderPass;
  267. }
  268. void VulkanCmdBuffer::endRenderPass()
  269. {
  270. assert(mState == State::RecordingRenderPass);
  271. vkCmdEndRenderPass(mCmdBuffer);
  272. // Execute any queued events
  273. for(auto& entry : mQueuedEvents)
  274. vkCmdSetEvent(mCmdBuffer, entry->getHandle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
  275. mQueuedEvents.clear();
  276. // Update any layout transitions that were performed by subpass dependencies, reset flags that signal image usage
  277. // and reset read-only state.
  278. // Note: It's okay reset these even those they might still be bound on the GPU, because these values only matter
  279. // for state transitions.
  280. for (auto& entry : mImages)
  281. {
  282. UINT32 imageInfoIdx = entry.second;
  283. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  284. imageInfo.isFBAttachment = false;
  285. imageInfo.isShaderInput = false;
  286. imageInfo.isReadOnly = true;
  287. }
  288. updateFinalLayouts();
  289. mState = State::Recording;
  290. }
  291. void VulkanCmdBuffer::allocateSemaphores()
  292. {
  293. if (mIntraQueueSemaphore != nullptr)
  294. mIntraQueueSemaphore->destroy();
  295. mIntraQueueSemaphore = mDevice.getResourceManager().create<VulkanSemaphore>();
  296. for (UINT32 i = 0; i < BS_MAX_VULKAN_CB_DEPENDENCIES; i++)
  297. {
  298. if (mInterQueueSemaphores[i] != nullptr)
  299. mInterQueueSemaphores[i]->destroy();
  300. mInterQueueSemaphores[i] = mDevice.getResourceManager().create<VulkanSemaphore>();
  301. }
  302. mNumUsedInterQueueSemaphores = 0;
  303. }
  304. VulkanSemaphore* VulkanCmdBuffer::requestInterQueueSemaphore() const
  305. {
  306. if (mNumUsedInterQueueSemaphores >= BS_MAX_VULKAN_CB_DEPENDENCIES)
  307. return nullptr;
  308. return mInterQueueSemaphores[mNumUsedInterQueueSemaphores++];
  309. }
  310. void VulkanCmdBuffer::submit(VulkanQueue* queue, UINT32 queueIdx, UINT32 syncMask)
  311. {
  312. assert(isReadyForSubmit());
  313. // Make sure to reset the CB fence before we submit it
  314. VkResult result = vkResetFences(mDevice.getLogical(), 1, &mFence);
  315. assert(result == VK_SUCCESS);
  316. // If there are any query resets needed, execute those first
  317. VulkanDevice& device = queue->getDevice();
  318. if(!mQueuedQueryResets.empty())
  319. {
  320. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(mQueueFamily, false);
  321. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  322. for (auto& entry : mQueuedQueryResets)
  323. entry->reset(vkCmdBuffer);
  324. cmdBuffer->end();
  325. queue->queueSubmit(cmdBuffer, nullptr, 0);
  326. mQueuedQueryResets.clear();
  327. }
  328. // Issue pipeline barriers for queue transitions (need to happen on original queue first, then on new queue)
  329. for (auto& entry : mBuffers)
  330. {
  331. VulkanBuffer* resource = static_cast<VulkanBuffer*>(entry.first);
  332. if (!resource->isExclusive())
  333. continue;
  334. UINT32 currentQueueFamily = resource->getQueueFamily();
  335. if (currentQueueFamily != -1 && currentQueueFamily != mQueueFamily)
  336. {
  337. Vector<VkBufferMemoryBarrier>& barriers = mTransitionInfoTemp[currentQueueFamily].bufferBarriers;
  338. barriers.push_back(VkBufferMemoryBarrier());
  339. VkBufferMemoryBarrier& barrier = barriers.back();
  340. barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  341. barrier.pNext = nullptr;
  342. barrier.srcAccessMask = entry.second.accessFlags;
  343. barrier.dstAccessMask = entry.second.accessFlags;
  344. barrier.srcQueueFamilyIndex = currentQueueFamily;
  345. barrier.dstQueueFamilyIndex = mQueueFamily;
  346. barrier.buffer = resource->getHandle();
  347. barrier.offset = 0;
  348. barrier.size = VK_WHOLE_SIZE;
  349. }
  350. }
  351. // For images issue queue transitions, as above. Also issue layout transitions to their inital layouts.
  352. for (auto& entry : mImages)
  353. {
  354. VulkanImage* resource = static_cast<VulkanImage*>(entry.first);
  355. ImageInfo& imageInfo = mImageInfos[entry.second];
  356. UINT32 currentQueueFamily = resource->getQueueFamily();
  357. bool queueMismatch = resource->isExclusive() && currentQueueFamily != -1 && currentQueueFamily != mQueueFamily;
  358. VkImageLayout currentLayout = resource->getLayout();
  359. VkImageLayout initialLayout = imageInfo.initialLayout;
  360. if (queueMismatch || (currentLayout != initialLayout && initialLayout != VK_IMAGE_LAYOUT_UNDEFINED))
  361. {
  362. Vector<VkImageMemoryBarrier>& barriers = mTransitionInfoTemp[currentQueueFamily].imageBarriers;
  363. if (initialLayout == VK_IMAGE_LAYOUT_UNDEFINED)
  364. initialLayout = currentLayout;
  365. barriers.push_back(VkImageMemoryBarrier());
  366. VkImageMemoryBarrier& barrier = barriers.back();
  367. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  368. barrier.pNext = nullptr;
  369. barrier.srcAccessMask = resource->getAccessFlags(currentLayout);
  370. barrier.dstAccessMask = resource->getAccessFlags(initialLayout, imageInfo.isInitialReadOnly);
  371. barrier.oldLayout = currentLayout;
  372. barrier.newLayout = initialLayout;
  373. barrier.image = resource->getHandle();
  374. barrier.subresourceRange = imageInfo.range;
  375. barrier.srcQueueFamilyIndex = currentQueueFamily;
  376. barrier.dstQueueFamilyIndex = mQueueFamily;
  377. // Check if queue transition needed
  378. if (queueMismatch)
  379. {
  380. barrier.srcQueueFamilyIndex = currentQueueFamily;
  381. barrier.dstQueueFamilyIndex = mQueueFamily;
  382. }
  383. else
  384. {
  385. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  386. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  387. }
  388. }
  389. resource->setLayout(imageInfo.finalLayout);
  390. }
  391. for (auto& entry : mTransitionInfoTemp)
  392. {
  393. bool empty = entry.second.imageBarriers.size() == 0 && entry.second.bufferBarriers.size() == 0;
  394. if (empty)
  395. continue;
  396. UINT32 entryQueueFamily = entry.first;
  397. // No queue transition needed for entries on this queue (this entry is most likely an image layout transition)
  398. if (entryQueueFamily == -1 || entryQueueFamily == mQueueFamily)
  399. continue;
  400. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(entryQueueFamily, false);
  401. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  402. TransitionInfo& barriers = entry.second;
  403. UINT32 numImgBarriers = (UINT32)barriers.imageBarriers.size();
  404. UINT32 numBufferBarriers = (UINT32)barriers.bufferBarriers.size();
  405. vkCmdPipelineBarrier(vkCmdBuffer,
  406. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  407. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // The main idea is that the barrier executes before the semaphore triggers, no actual stage dependencies are needed.
  408. 0, 0, nullptr,
  409. numBufferBarriers, barriers.bufferBarriers.data(),
  410. numImgBarriers, barriers.imageBarriers.data());
  411. // Find an appropriate queue to execute on
  412. UINT32 otherQueueIdx = 0;
  413. VulkanQueue* otherQueue = nullptr;
  414. GpuQueueType otherQueueType = GQT_GRAPHICS;
  415. for (UINT32 i = 0; i < GQT_COUNT; i++)
  416. {
  417. otherQueueType = (GpuQueueType)i;
  418. if (device.getQueueFamily(otherQueueType) != entryQueueFamily)
  419. continue;
  420. UINT32 numQueues = device.getNumQueues(otherQueueType);
  421. for (UINT32 j = 0; j < numQueues; j++)
  422. {
  423. // Try to find a queue not currently executing
  424. VulkanQueue* curQueue = device.getQueue(otherQueueType, j);
  425. if (!curQueue->isExecuting())
  426. {
  427. otherQueue = curQueue;
  428. otherQueueIdx = j;
  429. }
  430. }
  431. // Can't find empty one, use the first one then
  432. if (otherQueue == nullptr)
  433. {
  434. otherQueue = device.getQueue(otherQueueType, 0);
  435. otherQueueIdx = 0;
  436. }
  437. break;
  438. }
  439. syncMask |= CommandSyncMask::getGlobalQueueMask(otherQueueType, otherQueueIdx);
  440. cmdBuffer->end();
  441. otherQueue->submit(cmdBuffer, nullptr, 0);
  442. // If there are any layout transitions, reset them as we don't need them for the second pipeline barrier
  443. for (auto& barrierEntry : barriers.imageBarriers)
  444. barrierEntry.oldLayout = barrierEntry.newLayout;
  445. }
  446. UINT32 deviceIdx = device.getIndex();
  447. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  448. UINT32 numSemaphores;
  449. cbm.getSyncSemaphores(deviceIdx, syncMask, mSemaphoresTemp, numSemaphores);
  450. // Issue second part of transition pipeline barriers (on this queue)
  451. for (auto& entry : mTransitionInfoTemp)
  452. {
  453. bool empty = entry.second.imageBarriers.size() == 0 && entry.second.bufferBarriers.size() == 0;
  454. if (empty)
  455. continue;
  456. VulkanCmdBuffer* cmdBuffer = device.getCmdBufferPool().getBuffer(mQueueFamily, false);
  457. VkCommandBuffer vkCmdBuffer = cmdBuffer->getHandle();
  458. TransitionInfo& barriers = entry.second;
  459. UINT32 numImgBarriers = (UINT32)barriers.imageBarriers.size();
  460. UINT32 numBufferBarriers = (UINT32)barriers.bufferBarriers.size();
  461. vkCmdPipelineBarrier(vkCmdBuffer,
  462. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  463. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  464. 0, 0, nullptr,
  465. numBufferBarriers, barriers.bufferBarriers.data(),
  466. numImgBarriers, barriers.imageBarriers.data());
  467. cmdBuffer->end();
  468. queue->queueSubmit(cmdBuffer, mSemaphoresTemp, numSemaphores);
  469. numSemaphores = 0; // Semaphores are only needed the first time, since we're adding the buffers on the same queue
  470. }
  471. queue->queueSubmit(this, mSemaphoresTemp, numSemaphores);
  472. queue->submitQueued();
  473. mGlobalQueueIdx = CommandSyncMask::getGlobalQueueIdx(queue->getType(), queueIdx);
  474. for (auto& entry : mResources)
  475. {
  476. ResourceUseHandle& useHandle = entry.second;
  477. assert(!useHandle.used);
  478. useHandle.used = true;
  479. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  480. }
  481. for (auto& entry : mImages)
  482. {
  483. UINT32 imageInfoIdx = entry.second;
  484. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  485. ResourceUseHandle& useHandle = imageInfo.useHandle;
  486. assert(!useHandle.used);
  487. useHandle.used = true;
  488. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  489. }
  490. for (auto& entry : mBuffers)
  491. {
  492. ResourceUseHandle& useHandle = entry.second.useHandle;
  493. assert(!useHandle.used);
  494. useHandle.used = true;
  495. entry.first->notifyUsed(mGlobalQueueIdx, mQueueFamily, useHandle.flags);
  496. }
  497. // Note: Uncomment for debugging only, prevents any device concurrency issues.
  498. // vkQueueWaitIdle(queue->getHandle());
  499. // Clear vectors but don't clear the actual map, as we want to re-use the memory since we expect queue family
  500. // indices to be the same
  501. for (auto& entry : mTransitionInfoTemp)
  502. {
  503. entry.second.imageBarriers.clear();
  504. entry.second.bufferBarriers.clear();
  505. }
  506. mGraphicsPipeline = nullptr;
  507. mComputePipeline = nullptr;
  508. mGfxPipelineRequiresBind = true;
  509. mCmpPipelineRequiresBind = true;
  510. mFramebuffer = nullptr;
  511. mDescriptorSetsBindState = DescriptorSetBindFlag::Graphics | DescriptorSetBindFlag::Compute;
  512. mQueuedLayoutTransitions.clear();
  513. mBoundParams = nullptr;
  514. mBoundParams = false;
  515. }
  516. bool VulkanCmdBuffer::checkFenceStatus() const
  517. {
  518. VkResult result = vkGetFenceStatus(mDevice.getLogical(), mFence);
  519. assert(result == VK_SUCCESS || result == VK_NOT_READY);
  520. return result == VK_SUCCESS;
  521. }
  522. void VulkanCmdBuffer::reset()
  523. {
  524. bool wasSubmitted = mState == State::Submitted;
  525. mState = State::Ready;
  526. vkResetCommandBuffer(mCmdBuffer, VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); // Note: Maybe better not to release resources?
  527. if (wasSubmitted)
  528. {
  529. for (auto& entry : mResources)
  530. {
  531. ResourceUseHandle& useHandle = entry.second;
  532. assert(useHandle.used);
  533. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  534. }
  535. for (auto& entry : mImages)
  536. {
  537. UINT32 imageInfoIdx = entry.second;
  538. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  539. ResourceUseHandle& useHandle = imageInfo.useHandle;
  540. assert(useHandle.used);
  541. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  542. }
  543. for (auto& entry : mBuffers)
  544. {
  545. ResourceUseHandle& useHandle = entry.second.useHandle;
  546. assert(useHandle.used);
  547. entry.first->notifyDone(mGlobalQueueIdx, useHandle.flags);
  548. }
  549. }
  550. else
  551. {
  552. for (auto& entry : mResources)
  553. entry.first->notifyUnbound();
  554. for (auto& entry : mImages)
  555. entry.first->notifyUnbound();
  556. for (auto& entry : mBuffers)
  557. entry.first->notifyUnbound();
  558. }
  559. mResources.clear();
  560. mImages.clear();
  561. mBuffers.clear();
  562. mImageInfos.clear();
  563. }
  564. void VulkanCmdBuffer::setRenderTarget(const SPtr<RenderTarget>& rt, bool readOnlyDepthStencil,
  565. RenderSurfaceMask loadMask)
  566. {
  567. assert(mState != State::Submitted);
  568. VulkanFramebuffer* newFB;
  569. if(rt != nullptr)
  570. {
  571. if (rt->getProperties().isWindow())
  572. {
  573. Win32RenderWindow* window = static_cast<Win32RenderWindow*>(rt.get());
  574. window->acquireBackBuffer();
  575. }
  576. rt->getCustomAttribute("FB", &newFB);
  577. }
  578. else
  579. newFB = nullptr;
  580. if (mFramebuffer == newFB && mRenderTargetDepthReadOnly == readOnlyDepthStencil && mRenderTargetLoadMask == loadMask)
  581. return;
  582. if (isInRenderPass())
  583. endRenderPass();
  584. else
  585. {
  586. // If a clear is queued for previous FB, execute the render pass with no additional instructions
  587. if (mClearMask)
  588. executeClearPass();
  589. }
  590. if(newFB == nullptr)
  591. {
  592. mFramebuffer = nullptr;
  593. mRenderTargetWidth = 0;
  594. mRenderTargetHeight = 0;
  595. mRenderTargetDepthReadOnly = false;
  596. mRenderTargetLoadMask = RT_NONE;
  597. }
  598. else
  599. {
  600. mFramebuffer = newFB;
  601. mRenderTargetWidth = rt->getProperties().getWidth();
  602. mRenderTargetHeight = rt->getProperties().getHeight();
  603. mRenderTargetDepthReadOnly = readOnlyDepthStencil;
  604. mRenderTargetLoadMask = loadMask;
  605. }
  606. // Reset flags that signal image usage
  607. for (auto& entry : mImages)
  608. {
  609. UINT32 imageInfoIdx = entry.second;
  610. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  611. imageInfo.isFBAttachment = false;
  612. }
  613. setGpuParams(nullptr);
  614. if(mFramebuffer != nullptr)
  615. registerResource(mFramebuffer, loadMask, VulkanUseFlag::Write);
  616. mGfxPipelineRequiresBind = true;
  617. }
  618. void VulkanCmdBuffer::clearViewport(const Rect2I& area, UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  619. UINT8 targetMask)
  620. {
  621. if (buffers == 0 || mFramebuffer == nullptr)
  622. return;
  623. // Add clear command if currently in render pass
  624. if (isInRenderPass())
  625. {
  626. VkClearAttachment attachments[BS_MAX_MULTIPLE_RENDER_TARGETS + 1];
  627. UINT32 baseLayer = 0;
  628. UINT32 attachmentIdx = 0;
  629. if ((buffers & FBT_COLOR) != 0)
  630. {
  631. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  632. for (UINT32 i = 0; i < numColorAttachments; i++)
  633. {
  634. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  635. if (((1 << attachment.index) & targetMask) == 0)
  636. continue;
  637. attachments[attachmentIdx].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  638. attachments[attachmentIdx].colorAttachment = i;
  639. VkClearColorValue& colorValue = attachments[attachmentIdx].clearValue.color;
  640. colorValue.float32[0] = color.r;
  641. colorValue.float32[1] = color.g;
  642. colorValue.float32[2] = color.b;
  643. colorValue.float32[3] = color.a;
  644. UINT32 curBaseLayer = attachment.baseLayer;
  645. if (attachmentIdx == 0)
  646. baseLayer = curBaseLayer;
  647. else
  648. {
  649. if (baseLayer != curBaseLayer)
  650. {
  651. // Note: This could be supported relatively easily: we would need to issue multiple separate
  652. // clear commands for such framebuffers.
  653. LOGERR("Attempting to clear a texture that has multiple multi-layer surfaces with mismatching "
  654. "starting layers. This is currently not supported.");
  655. }
  656. }
  657. attachmentIdx++;
  658. }
  659. }
  660. if ((buffers & FBT_DEPTH) != 0 || (buffers & FBT_STENCIL) != 0)
  661. {
  662. if (mFramebuffer->hasDepthAttachment())
  663. {
  664. attachments[attachmentIdx].aspectMask = 0;
  665. if ((buffers & FBT_DEPTH) != 0)
  666. {
  667. attachments[attachmentIdx].aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
  668. attachments[attachmentIdx].clearValue.depthStencil.depth = depth;
  669. }
  670. if ((buffers & FBT_STENCIL) != 0)
  671. {
  672. attachments[attachmentIdx].aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  673. attachments[attachmentIdx].clearValue.depthStencil.stencil = stencil;
  674. }
  675. attachments[attachmentIdx].colorAttachment = 0;
  676. UINT32 curBaseLayer = mFramebuffer->getDepthStencilAttachment().baseLayer;
  677. if (attachmentIdx == 0)
  678. baseLayer = curBaseLayer;
  679. else
  680. {
  681. if (baseLayer != curBaseLayer)
  682. {
  683. // Note: This could be supported relatively easily: we would need to issue multiple separate
  684. // clear commands for such framebuffers.
  685. LOGERR("Attempting to clear a texture that has multiple multi-layer surfaces with mismatching "
  686. "starting layers. This is currently not supported.");
  687. }
  688. }
  689. attachmentIdx++;
  690. }
  691. }
  692. UINT32 numAttachments = attachmentIdx;
  693. if (numAttachments == 0)
  694. return;
  695. VkClearRect clearRect;
  696. clearRect.baseArrayLayer = baseLayer;
  697. clearRect.layerCount = mFramebuffer->getNumLayers();
  698. clearRect.rect.offset.x = area.x;
  699. clearRect.rect.offset.y = area.y;
  700. clearRect.rect.extent.width = area.width;
  701. clearRect.rect.extent.height = area.height;
  702. vkCmdClearAttachments(mCmdBuffer, numAttachments, attachments, 1, &clearRect);
  703. }
  704. // Otherwise we use a render pass that performs a clear on begin
  705. else
  706. {
  707. ClearMask clearMask;
  708. std::array<VkClearValue, BS_MAX_MULTIPLE_RENDER_TARGETS + 1> clearValues = mClearValues;
  709. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  710. if ((buffers & FBT_COLOR) != 0)
  711. {
  712. for (UINT32 i = 0; i < numColorAttachments; i++)
  713. {
  714. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  715. if (((1 << attachment.index) & targetMask) == 0)
  716. continue;
  717. clearMask |= (ClearMaskBits)(1 << attachment.index);
  718. VkClearColorValue& colorValue = clearValues[i].color;
  719. colorValue.float32[0] = color.r;
  720. colorValue.float32[1] = color.g;
  721. colorValue.float32[2] = color.b;
  722. colorValue.float32[3] = color.a;
  723. }
  724. }
  725. if ((buffers & FBT_DEPTH) != 0 || (buffers & FBT_STENCIL) != 0)
  726. {
  727. if (mFramebuffer->hasDepthAttachment())
  728. {
  729. UINT32 depthAttachmentIdx = numColorAttachments;
  730. if ((buffers & FBT_DEPTH) != 0)
  731. {
  732. clearValues[depthAttachmentIdx].depthStencil.depth = depth;
  733. clearMask |= CLEAR_DEPTH;
  734. }
  735. if ((buffers & FBT_STENCIL) != 0)
  736. {
  737. clearValues[depthAttachmentIdx].depthStencil.stencil = stencil;
  738. clearMask |= CLEAR_STENCIL;
  739. }
  740. }
  741. }
  742. if (!clearMask)
  743. return;
  744. // Some previous clear operation is already queued, execute it first
  745. bool previousClearNeedsToFinish = (mClearMask & clearMask) != CLEAR_NONE;
  746. if(previousClearNeedsToFinish)
  747. executeClearPass();
  748. mClearMask |= clearMask;
  749. mClearValues = clearValues;
  750. mClearArea = area;
  751. }
  752. }
  753. void VulkanCmdBuffer::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask)
  754. {
  755. Rect2I area(0, 0, mRenderTargetWidth, mRenderTargetHeight);
  756. clearViewport(area, buffers, color, depth, stencil, targetMask);
  757. }
  758. void VulkanCmdBuffer::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask)
  759. {
  760. Rect2I area;
  761. area.x = (UINT32)(mViewport.x * mRenderTargetWidth);
  762. area.y = (UINT32)(mViewport.y * mRenderTargetHeight);
  763. area.width = (UINT32)(mViewport.width * mRenderTargetWidth);
  764. area.height = (UINT32)(mViewport.height * mRenderTargetHeight);
  765. clearViewport(area, buffers, color, depth, stencil, targetMask);
  766. }
  767. void VulkanCmdBuffer::setPipelineState(const SPtr<GraphicsPipelineState>& state)
  768. {
  769. if (mGraphicsPipeline == state)
  770. return;
  771. mGraphicsPipeline = std::static_pointer_cast<VulkanGraphicsPipelineState>(state);
  772. mGfxPipelineRequiresBind = true;
  773. }
  774. void VulkanCmdBuffer::setPipelineState(const SPtr<ComputePipelineState>& state)
  775. {
  776. if (mComputePipeline == state)
  777. return;
  778. mComputePipeline = std::static_pointer_cast<VulkanComputePipelineState>(state);
  779. mCmpPipelineRequiresBind = true;
  780. }
  781. void VulkanCmdBuffer::setGpuParams(const SPtr<GpuParams>& gpuParams)
  782. {
  783. // Note: We keep an internal reference to GPU params even though we shouldn't keep a reference to a core thread
  784. // object. But it should be fine since we expect the resource to be externally synchronized so it should never
  785. // be allowed to go out of scope on a non-core thread anyway.
  786. mBoundParams = std::static_pointer_cast<VulkanGpuParams>(gpuParams);
  787. if (mBoundParams != nullptr)
  788. mBoundParamsDirty = true;
  789. else
  790. {
  791. mNumBoundDescriptorSets = 0;
  792. mBoundParamsDirty = false;
  793. }
  794. mDescriptorSetsBindState = DescriptorSetBindFlag::Graphics | DescriptorSetBindFlag::Compute;
  795. }
  796. void VulkanCmdBuffer::setViewport(const Rect2& area)
  797. {
  798. if (mViewport == area)
  799. return;
  800. mViewport = area;
  801. mViewportRequiresBind = true;
  802. }
  803. void VulkanCmdBuffer::setScissorRect(const Rect2I& value)
  804. {
  805. if (mScissor == value)
  806. return;
  807. mScissor = value;
  808. mScissorRequiresBind = true;
  809. }
  810. void VulkanCmdBuffer::setStencilRef(UINT32 value)
  811. {
  812. if (mStencilRef == value)
  813. return;
  814. mStencilRef = value;
  815. mStencilRefRequiresBind = true;
  816. }
  817. void VulkanCmdBuffer::setDrawOp(DrawOperationType drawOp)
  818. {
  819. if (mDrawOp == drawOp)
  820. return;
  821. mDrawOp = drawOp;
  822. mGfxPipelineRequiresBind = true;
  823. }
  824. void VulkanCmdBuffer::setVertexBuffers(UINT32 index, SPtr<VertexBuffer>* buffers, UINT32 numBuffers)
  825. {
  826. if (numBuffers == 0)
  827. return;
  828. for(UINT32 i = 0; i < numBuffers; i++)
  829. {
  830. VulkanVertexBuffer* vertexBuffer = static_cast<VulkanVertexBuffer*>(buffers[i].get());
  831. if (vertexBuffer != nullptr)
  832. {
  833. VulkanBuffer* resource = vertexBuffer->getResource(mDevice.getIndex());
  834. if (resource != nullptr)
  835. {
  836. mVertexBuffersTemp[i] = resource->getHandle();
  837. registerResource(resource, VulkanUseFlag::Read);
  838. }
  839. else
  840. mVertexBuffersTemp[i] = VK_NULL_HANDLE;
  841. }
  842. else
  843. mVertexBuffersTemp[i] = VK_NULL_HANDLE;
  844. }
  845. vkCmdBindVertexBuffers(mCmdBuffer, index, numBuffers, mVertexBuffersTemp, mVertexBufferOffsetsTemp);
  846. }
  847. void VulkanCmdBuffer::setIndexBuffer(const SPtr<IndexBuffer>& buffer)
  848. {
  849. VulkanIndexBuffer* indexBuffer = static_cast<VulkanIndexBuffer*>(buffer.get());
  850. VkBuffer vkBuffer = VK_NULL_HANDLE;
  851. VkIndexType indexType = VK_INDEX_TYPE_UINT32;
  852. if (indexBuffer != nullptr)
  853. {
  854. VulkanBuffer* resource = indexBuffer->getResource(mDevice.getIndex());
  855. if (resource != nullptr)
  856. {
  857. vkBuffer = resource->getHandle();
  858. indexType = VulkanUtility::getIndexType(buffer->getProperties().getType());
  859. registerResource(resource, VulkanUseFlag::Read);
  860. }
  861. }
  862. vkCmdBindIndexBuffer(mCmdBuffer, vkBuffer, 0, indexType);
  863. }
  864. void VulkanCmdBuffer::setVertexDeclaration(const SPtr<VertexDeclaration>& decl)
  865. {
  866. if (mVertexDecl == decl)
  867. return;
  868. mVertexDecl = decl;
  869. mGfxPipelineRequiresBind = true;
  870. }
  871. bool VulkanCmdBuffer::isReadyForRender()
  872. {
  873. if (mGraphicsPipeline == nullptr)
  874. return false;
  875. SPtr<VertexDeclaration> inputDecl = mGraphicsPipeline->getInputDeclaration();
  876. if (inputDecl == nullptr)
  877. return false;
  878. return mFramebuffer != nullptr && mVertexDecl != nullptr;
  879. }
  880. bool VulkanCmdBuffer::bindGraphicsPipeline()
  881. {
  882. SPtr<VertexDeclaration> inputDecl = mGraphicsPipeline->getInputDeclaration();
  883. SPtr<VulkanVertexInput> vertexInput = VulkanVertexInputManager::instance().getVertexInfo(mVertexDecl, inputDecl);
  884. VulkanPipeline* pipeline = mGraphicsPipeline->getPipeline(mDevice.getIndex(), mFramebuffer,
  885. mRenderTargetDepthReadOnly, mDrawOp, vertexInput);
  886. if (pipeline == nullptr)
  887. return false;
  888. // Check that pipeline matches the read-only state of any framebuffer attachments
  889. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  890. for (UINT32 i = 0; i < numColorAttachments; i++)
  891. {
  892. VulkanImage* image = mFramebuffer->getColorAttachment(i).image;
  893. UINT32 imageInfoIdx = mImages[image];
  894. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  895. if (imageInfo.isShaderInput && !pipeline->isColorReadOnly(i))
  896. {
  897. LOGWRN("Framebuffer attachment also used as a shader input, but color writes aren't disabled. This will"
  898. " result in undefined behavior.");
  899. }
  900. }
  901. if (mFramebuffer->hasDepthAttachment())
  902. {
  903. VulkanImage* image = mFramebuffer->getDepthStencilAttachment().image;
  904. UINT32 imageInfoIdx = mImages[image];
  905. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  906. if (imageInfo.isShaderInput && !pipeline->isDepthStencilReadOnly())
  907. {
  908. LOGWRN("Framebuffer attachment also used as a shader input, but depth/stencil writes aren't disabled. "
  909. "This will result in undefined behavior.");
  910. }
  911. }
  912. mGraphicsPipeline->registerPipelineResources(this);
  913. registerResource(pipeline, VulkanUseFlag::Read);
  914. vkCmdBindPipeline(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline->getHandle());
  915. bindDynamicStates(true);
  916. mGfxPipelineRequiresBind = false;
  917. return true;
  918. }
  919. void VulkanCmdBuffer::bindDynamicStates(bool forceAll)
  920. {
  921. if (mViewportRequiresBind || forceAll)
  922. {
  923. VkViewport viewport;
  924. viewport.x = mViewport.x * mRenderTargetWidth;
  925. viewport.y = mViewport.y * mRenderTargetHeight;
  926. viewport.width = mViewport.width * mRenderTargetWidth;
  927. viewport.height = mViewport.height * mRenderTargetHeight;
  928. viewport.minDepth = 0.0f;
  929. viewport.maxDepth = 1.0f;
  930. vkCmdSetViewport(mCmdBuffer, 0, 1, &viewport);
  931. mViewportRequiresBind = false;
  932. }
  933. if(mStencilRefRequiresBind || forceAll)
  934. {
  935. vkCmdSetStencilReference(mCmdBuffer, VK_STENCIL_FRONT_AND_BACK, mStencilRef);
  936. mStencilRefRequiresBind = false;
  937. }
  938. if(mScissorRequiresBind || forceAll)
  939. {
  940. VkRect2D scissorRect;
  941. if(mGraphicsPipeline->isScissorEnabled())
  942. {
  943. scissorRect.offset.x = mScissor.x;
  944. scissorRect.offset.y = mScissor.y;
  945. scissorRect.extent.width = mScissor.width;
  946. scissorRect.extent.height = mScissor.height;
  947. }
  948. else
  949. {
  950. scissorRect.offset.x = 0;
  951. scissorRect.offset.y = 0;
  952. scissorRect.extent.width = mRenderTargetWidth;
  953. scissorRect.extent.height = mRenderTargetHeight;
  954. }
  955. vkCmdSetScissor(mCmdBuffer, 0, 1, &scissorRect);
  956. mScissorRequiresBind = false;
  957. }
  958. }
  959. void VulkanCmdBuffer::bindGpuParams()
  960. {
  961. if (mBoundParamsDirty)
  962. {
  963. if (mBoundParams != nullptr)
  964. {
  965. mNumBoundDescriptorSets = mBoundParams->getNumSets();
  966. mBoundParams->prepareForBind(*this, mDescriptorSetsTemp);
  967. }
  968. else
  969. mNumBoundDescriptorSets = 0;
  970. mBoundParamsDirty = false;
  971. }
  972. else
  973. {
  974. mNumBoundDescriptorSets = 0;
  975. }
  976. }
  977. void VulkanCmdBuffer::executeLayoutTransitions()
  978. {
  979. auto createLayoutTransitionBarrier = [&](VulkanImage* image, ImageInfo& imageInfo)
  980. {
  981. mLayoutTransitionBarriersTemp.push_back(VkImageMemoryBarrier());
  982. VkImageMemoryBarrier& barrier = mLayoutTransitionBarriersTemp.back();
  983. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  984. barrier.pNext = nullptr;
  985. barrier.srcAccessMask = image->getAccessFlags(imageInfo.currentLayout);
  986. barrier.dstAccessMask = image->getAccessFlags(imageInfo.requiredLayout, imageInfo.isReadOnly);
  987. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  988. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  989. barrier.oldLayout = imageInfo.currentLayout;
  990. barrier.newLayout = imageInfo.requiredLayout;
  991. barrier.image = image->getHandle();
  992. barrier.subresourceRange = imageInfo.range;
  993. imageInfo.currentLayout = imageInfo.requiredLayout;
  994. imageInfo.isReadOnly = true;
  995. imageInfo.hasTransitioned = true;
  996. };
  997. // Note: These layout transitions will contain transitions for offscreen framebuffer attachments (while they
  998. // transition to shader read-only layout). This can be avoided, since they're immediately used by the render pass
  999. // as color attachments, making the layout change redundant.
  1000. for (auto& entry : mQueuedLayoutTransitions)
  1001. {
  1002. UINT32 imageInfoIdx = entry.second;
  1003. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1004. createLayoutTransitionBarrier(entry.first, imageInfo);
  1005. }
  1006. vkCmdPipelineBarrier(mCmdBuffer,
  1007. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // Note: VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT might be more correct here, according to the spec
  1008. VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  1009. 0, 0, nullptr,
  1010. 0, nullptr,
  1011. (UINT32)mLayoutTransitionBarriersTemp.size(), mLayoutTransitionBarriersTemp.data());
  1012. mQueuedLayoutTransitions.clear();
  1013. mLayoutTransitionBarriersTemp.clear();
  1014. }
  1015. void VulkanCmdBuffer::updateFinalLayouts()
  1016. {
  1017. if (mFramebuffer == nullptr)
  1018. return;
  1019. UINT32 numColorAttachments = mFramebuffer->getNumColorAttachments();
  1020. for (UINT32 i = 0; i < numColorAttachments; i++)
  1021. {
  1022. const VulkanFramebufferAttachment& attachment = mFramebuffer->getColorAttachment(i);
  1023. UINT32 imageInfoIdx = mImages[attachment.image];
  1024. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1025. imageInfo.currentLayout = imageInfo.finalLayout;
  1026. imageInfo.requiredLayout = imageInfo.finalLayout;
  1027. imageInfo.hasTransitioned = true;
  1028. }
  1029. if (mFramebuffer->hasDepthAttachment())
  1030. {
  1031. const VulkanFramebufferAttachment& attachment = mFramebuffer->getDepthStencilAttachment();
  1032. UINT32 imageInfoIdx = mImages[attachment.image];
  1033. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1034. imageInfo.currentLayout = imageInfo.finalLayout;
  1035. imageInfo.requiredLayout = imageInfo.finalLayout;
  1036. imageInfo.hasTransitioned = true;
  1037. }
  1038. }
  1039. void VulkanCmdBuffer::executeClearPass()
  1040. {
  1041. assert(mState == State::Recording);
  1042. executeLayoutTransitions();
  1043. VkRenderPassBeginInfo renderPassBeginInfo;
  1044. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  1045. renderPassBeginInfo.pNext = nullptr;
  1046. renderPassBeginInfo.framebuffer = mFramebuffer->getFramebuffer(RT_NONE, RT_NONE, mClearMask);
  1047. renderPassBeginInfo.renderPass = mFramebuffer->getRenderPass(RT_NONE, RT_NONE, mClearMask);
  1048. renderPassBeginInfo.renderArea.offset.x = mClearArea.x;
  1049. renderPassBeginInfo.renderArea.offset.y = mClearArea.y;
  1050. renderPassBeginInfo.renderArea.extent.width = mClearArea.width;
  1051. renderPassBeginInfo.renderArea.extent.height = mClearArea.height;
  1052. renderPassBeginInfo.clearValueCount = mFramebuffer->getNumAttachments();
  1053. renderPassBeginInfo.pClearValues = mClearValues.data();
  1054. vkCmdBeginRenderPass(mCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
  1055. vkCmdEndRenderPass(mCmdBuffer);
  1056. updateFinalLayouts();
  1057. mClearMask = CLEAR_NONE;
  1058. }
  1059. void VulkanCmdBuffer::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount)
  1060. {
  1061. if (!isReadyForRender())
  1062. return;
  1063. bindGpuParams();
  1064. if (!isInRenderPass())
  1065. beginRenderPass();
  1066. if (mGfxPipelineRequiresBind)
  1067. {
  1068. if (!bindGraphicsPipeline())
  1069. return;
  1070. }
  1071. else
  1072. bindDynamicStates(false);
  1073. if (mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Graphics))
  1074. {
  1075. if (mNumBoundDescriptorSets > 0)
  1076. {
  1077. UINT32 deviceIdx = mDevice.getIndex();
  1078. VkPipelineLayout pipelineLayout = mGraphicsPipeline->getPipelineLayout(deviceIdx);
  1079. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0,
  1080. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1081. }
  1082. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Graphics);
  1083. }
  1084. vkCmdDraw(mCmdBuffer, vertexCount, instanceCount, vertexOffset, 0);
  1085. }
  1086. void VulkanCmdBuffer::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 instanceCount)
  1087. {
  1088. if (!isReadyForRender())
  1089. return;
  1090. bindGpuParams();
  1091. if (!isInRenderPass())
  1092. beginRenderPass();
  1093. if (mGfxPipelineRequiresBind)
  1094. {
  1095. if (!bindGraphicsPipeline())
  1096. return;
  1097. }
  1098. else
  1099. bindDynamicStates(false);
  1100. if (mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Graphics))
  1101. {
  1102. if (mNumBoundDescriptorSets > 0)
  1103. {
  1104. UINT32 deviceIdx = mDevice.getIndex();
  1105. VkPipelineLayout pipelineLayout = mGraphicsPipeline->getPipelineLayout(deviceIdx);
  1106. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0,
  1107. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1108. }
  1109. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Graphics);
  1110. }
  1111. vkCmdDrawIndexed(mCmdBuffer, indexCount, instanceCount, startIndex, vertexOffset, 0);
  1112. }
  1113. void VulkanCmdBuffer::dispatch(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ)
  1114. {
  1115. if (mComputePipeline == nullptr)
  1116. return;
  1117. bindGpuParams();
  1118. if (isInRenderPass())
  1119. endRenderPass();
  1120. UINT32 deviceIdx = mDevice.getIndex();
  1121. if(mCmpPipelineRequiresBind)
  1122. {
  1123. VulkanPipeline* pipeline = mComputePipeline->getPipeline(deviceIdx);
  1124. if (pipeline == nullptr)
  1125. return;
  1126. registerResource(pipeline, VulkanUseFlag::Read);
  1127. mComputePipeline->registerPipelineResources(this);
  1128. vkCmdBindPipeline(mCmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline->getHandle());
  1129. mCmpPipelineRequiresBind = false;
  1130. }
  1131. if(mDescriptorSetsBindState.isSet(DescriptorSetBindFlag::Compute))
  1132. {
  1133. if (mNumBoundDescriptorSets > 0)
  1134. {
  1135. VkPipelineLayout pipelineLayout = mComputePipeline->getPipelineLayout(deviceIdx);
  1136. vkCmdBindDescriptorSets(mCmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0,
  1137. mNumBoundDescriptorSets, mDescriptorSetsTemp, 0, nullptr);
  1138. }
  1139. mDescriptorSetsBindState.unset(DescriptorSetBindFlag::Compute);
  1140. }
  1141. vkCmdDispatch(mCmdBuffer, numGroupsX, numGroupsY, numGroupsZ);
  1142. }
  1143. void VulkanCmdBuffer::setEvent(VulkanEvent* event)
  1144. {
  1145. if(isInRenderPass())
  1146. mQueuedEvents.push_back(event);
  1147. else
  1148. vkCmdSetEvent(mCmdBuffer, event->getHandle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
  1149. }
  1150. void VulkanCmdBuffer::resetQuery(VulkanQuery* query)
  1151. {
  1152. if (isInRenderPass())
  1153. mQueuedQueryResets.push_back(query);
  1154. else
  1155. query->reset(mCmdBuffer);
  1156. }
  1157. void VulkanCmdBuffer::registerResource(VulkanResource* res, VulkanUseFlags flags)
  1158. {
  1159. auto insertResult = mResources.insert(std::make_pair(res, ResourceUseHandle()));
  1160. if(insertResult.second) // New element
  1161. {
  1162. ResourceUseHandle& useHandle = insertResult.first->second;
  1163. useHandle.used = false;
  1164. useHandle.flags = flags;
  1165. res->notifyBound();
  1166. }
  1167. else // Existing element
  1168. {
  1169. ResourceUseHandle& useHandle = insertResult.first->second;
  1170. assert(!useHandle.used);
  1171. useHandle.flags |= flags;
  1172. }
  1173. }
  1174. void VulkanCmdBuffer::registerResource(VulkanImage* res, VulkanUseFlags flags)
  1175. {
  1176. VkImageLayout layout = res->getOptimalLayout();
  1177. registerResource(res, VK_IMAGE_LAYOUT_UNDEFINED, layout, flags, false);
  1178. }
  1179. void VulkanCmdBuffer::registerResource(VulkanImage* res, VkImageLayout newLayout, VkImageLayout finalLayout,
  1180. VulkanUseFlags flags, bool isFBAttachment)
  1181. {
  1182. // Note: I currently always perform pipeline barriers (layout transitions and similar), over the entire image.
  1183. // In the case of render and storage images, the case is often that only a specific subresource requires
  1184. // it. However this makes grouping and tracking of current image layouts much more difficult.
  1185. // If this is ever requires we'll need to track image layout per-subresource instead per-image, and we
  1186. // might also need a smart way to group layout transitions for multiple sub-resources on the same image.
  1187. VkImageSubresourceRange range = res->getRange();
  1188. UINT32 nextImageInfoIdx = (UINT32)mImageInfos.size();
  1189. auto insertResult = mImages.insert(std::make_pair(res, nextImageInfoIdx));
  1190. if (insertResult.second) // New element
  1191. {
  1192. UINT32 imageInfoIdx = insertResult.first->second;
  1193. mImageInfos.push_back(ImageInfo());
  1194. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1195. imageInfo.currentLayout = newLayout;
  1196. imageInfo.initialLayout = newLayout;
  1197. imageInfo.requiredLayout = newLayout;
  1198. imageInfo.finalLayout = finalLayout;
  1199. imageInfo.range = range;
  1200. imageInfo.isFBAttachment = isFBAttachment;
  1201. imageInfo.isShaderInput = !isFBAttachment;
  1202. imageInfo.hasTransitioned = false;
  1203. imageInfo.isReadOnly = !flags.isSet(VulkanUseFlag::Write);
  1204. imageInfo.isInitialReadOnly = imageInfo.isReadOnly;
  1205. imageInfo.useHandle.used = false;
  1206. imageInfo.useHandle.flags = flags;
  1207. res->notifyBound();
  1208. }
  1209. else // Existing element
  1210. {
  1211. UINT32 imageInfoIdx = insertResult.first->second;
  1212. ImageInfo& imageInfo = mImageInfos[imageInfoIdx];
  1213. assert(!imageInfo.useHandle.used);
  1214. imageInfo.useHandle.flags |= flags;
  1215. imageInfo.isReadOnly &= !flags.isSet(VulkanUseFlag::Write);
  1216. // New layout is valid, check for transitions (UNDEFINED signifies the caller doesn't want a layout transition)
  1217. if (newLayout != VK_IMAGE_LAYOUT_UNDEFINED)
  1218. {
  1219. // If layout transition was requested by framebuffer bind, respect it because render-pass will only accept a
  1220. // specific layout (in certain cases), and we have no choice.
  1221. // In the case when a FB attachment is also bound for shader reads, this will override the layout required for
  1222. // shader read (GENERAL or DEPTH_READ_ONLY), but that is fine because those transitions are handled
  1223. // automatically by render-pass layout transitions.
  1224. // Any other texture (non FB attachment) will only even be bound in a single layout and we can keep the one it
  1225. // was originally registered with.
  1226. if (isFBAttachment)
  1227. imageInfo.requiredLayout = newLayout;
  1228. else if(!imageInfo.isFBAttachment) // Layout transition is not being done on a FB image
  1229. {
  1230. // Check if the image had a layout previously assigned, and if so check if multiple different layouts
  1231. // were requested. In that case we wish to transfer the image to GENERAL layout.
  1232. bool firstUseInRenderPass = !imageInfo.isShaderInput && !imageInfo.isFBAttachment;
  1233. if (firstUseInRenderPass || imageInfo.requiredLayout == VK_IMAGE_LAYOUT_UNDEFINED)
  1234. imageInfo.requiredLayout = newLayout;
  1235. else if (imageInfo.requiredLayout != newLayout)
  1236. imageInfo.requiredLayout = VK_IMAGE_LAYOUT_GENERAL;
  1237. }
  1238. }
  1239. // If attached to FB, then the final layout is set by the FB (provided as layout param here), otherwise its
  1240. // the same as required layout
  1241. if(!isFBAttachment && !imageInfo.isFBAttachment)
  1242. imageInfo.finalLayout = imageInfo.requiredLayout;
  1243. else
  1244. {
  1245. if (isFBAttachment)
  1246. imageInfo.finalLayout = finalLayout;
  1247. }
  1248. // If we haven't done a layout transition yet, we can just overwrite the previously written values, and the
  1249. // transition will be handled as the first thing in submit(), otherwise we queue a non-initial transition
  1250. // below.
  1251. if (!imageInfo.hasTransitioned)
  1252. {
  1253. imageInfo.initialLayout = imageInfo.requiredLayout;
  1254. imageInfo.currentLayout = imageInfo.requiredLayout;
  1255. imageInfo.isInitialReadOnly = imageInfo.isReadOnly;
  1256. }
  1257. else
  1258. {
  1259. if (imageInfo.currentLayout != imageInfo.requiredLayout)
  1260. mQueuedLayoutTransitions[res] = imageInfoIdx;
  1261. }
  1262. // If a FB attachment was just bound as a shader input, we might need to restart the render pass with a FB
  1263. // attachment that supports read-only attachments using the GENERAL layout
  1264. bool requiresReadOnlyFB = false;
  1265. if (isFBAttachment)
  1266. {
  1267. if (!imageInfo.isFBAttachment)
  1268. {
  1269. imageInfo.isFBAttachment = true;
  1270. requiresReadOnlyFB = imageInfo.isShaderInput;
  1271. }
  1272. }
  1273. else
  1274. {
  1275. if (!imageInfo.isShaderInput)
  1276. {
  1277. imageInfo.isShaderInput = true;
  1278. requiresReadOnlyFB = imageInfo.isFBAttachment;
  1279. }
  1280. }
  1281. // If we need to switch frame-buffers, end current render pass
  1282. if (requiresReadOnlyFB && isInRenderPass())
  1283. endRenderPass();
  1284. }
  1285. // Register any sub-resources
  1286. for(UINT32 i = 0; i < range.layerCount; i++)
  1287. {
  1288. for(UINT32 j = 0; j < range.levelCount; j++)
  1289. {
  1290. UINT32 layer = range.baseArrayLayer + i;
  1291. UINT32 mipLevel = range.baseMipLevel + j;
  1292. registerResource(res->getSubresource(layer, mipLevel), flags);
  1293. }
  1294. }
  1295. }
  1296. void VulkanCmdBuffer::registerResource(VulkanBuffer* res, VkAccessFlags accessFlags, VulkanUseFlags flags)
  1297. {
  1298. auto insertResult = mBuffers.insert(std::make_pair(res, BufferInfo()));
  1299. if (insertResult.second) // New element
  1300. {
  1301. BufferInfo& bufferInfo = insertResult.first->second;
  1302. bufferInfo.accessFlags = accessFlags;
  1303. bufferInfo.useHandle.used = false;
  1304. bufferInfo.useHandle.flags = flags;
  1305. res->notifyBound();
  1306. }
  1307. else // Existing element
  1308. {
  1309. BufferInfo& bufferInfo = insertResult.first->second;
  1310. assert(!bufferInfo.useHandle.used);
  1311. bufferInfo.useHandle.flags |= flags;
  1312. bufferInfo.accessFlags |= accessFlags;
  1313. }
  1314. }
  1315. void VulkanCmdBuffer::registerResource(VulkanFramebuffer* res, RenderSurfaceMask loadMask, VulkanUseFlags flags)
  1316. {
  1317. auto insertResult = mResources.insert(std::make_pair(res, ResourceUseHandle()));
  1318. if (insertResult.second) // New element
  1319. {
  1320. ResourceUseHandle& useHandle = insertResult.first->second;
  1321. useHandle.used = false;
  1322. useHandle.flags = flags;
  1323. res->notifyBound();
  1324. }
  1325. else // Existing element
  1326. {
  1327. ResourceUseHandle& useHandle = insertResult.first->second;
  1328. assert(!useHandle.used);
  1329. useHandle.flags |= flags;
  1330. }
  1331. // Register any sub-resources
  1332. UINT32 numColorAttachments = res->getNumColorAttachments();
  1333. for (UINT32 i = 0; i < numColorAttachments; i++)
  1334. {
  1335. const VulkanFramebufferAttachment& attachment = res->getColorAttachment(i);
  1336. // If image is being loaded, we need to transfer it to correct layout, otherwise it doesn't matter
  1337. VkImageLayout layout;
  1338. if (loadMask.isSet((RenderSurfaceMaskBits)(1 << i)))
  1339. layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  1340. else
  1341. layout = VK_IMAGE_LAYOUT_UNDEFINED;
  1342. registerResource(attachment.image, layout, attachment.finalLayout, VulkanUseFlag::Write, true);
  1343. }
  1344. if(res->hasDepthAttachment())
  1345. {
  1346. const VulkanFramebufferAttachment& attachment = res->getDepthStencilAttachment();
  1347. // If image is being loaded, we need to transfer it to correct layout, otherwise it doesn't matter
  1348. VkImageLayout layout;
  1349. if (loadMask.isSet(RT_DEPTH))
  1350. layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  1351. else
  1352. layout = VK_IMAGE_LAYOUT_UNDEFINED;
  1353. registerResource(attachment.image, layout, attachment.finalLayout, VulkanUseFlag::Write, true);
  1354. }
  1355. }
  1356. VulkanCommandBuffer::VulkanCommandBuffer(VulkanDevice& device, GpuQueueType type, UINT32 deviceIdx,
  1357. UINT32 queueIdx, bool secondary)
  1358. : CommandBuffer(type, deviceIdx, queueIdx, secondary), mBuffer(nullptr)
  1359. , mDevice(device), mQueue(nullptr), mIdMask(0)
  1360. {
  1361. UINT32 numQueues = device.getNumQueues(mType);
  1362. if (numQueues == 0) // Fall back to graphics queue
  1363. {
  1364. mType = GQT_GRAPHICS;
  1365. numQueues = device.getNumQueues(GQT_GRAPHICS);
  1366. }
  1367. mQueue = device.getQueue(mType, mQueueIdx % numQueues);
  1368. mIdMask = device.getQueueMask(mType, mQueueIdx);
  1369. acquireNewBuffer();
  1370. }
  1371. VulkanCommandBuffer::~VulkanCommandBuffer()
  1372. {
  1373. mBuffer->reset();
  1374. }
  1375. void VulkanCommandBuffer::acquireNewBuffer()
  1376. {
  1377. VulkanCmdBufferPool& pool = mDevice.getCmdBufferPool();
  1378. if (mBuffer != nullptr)
  1379. assert(mBuffer->isSubmitted());
  1380. UINT32 queueFamily = mDevice.getQueueFamily(mType);
  1381. mBuffer = pool.getBuffer(queueFamily, mIsSecondary);
  1382. }
  1383. void VulkanCommandBuffer::submit(UINT32 syncMask)
  1384. {
  1385. // Ignore myself
  1386. syncMask &= ~mIdMask;
  1387. if (mBuffer->isInRenderPass())
  1388. mBuffer->endRenderPass();
  1389. // Execute any queued layout transitions that weren't already handled by the render pass
  1390. mBuffer->executeLayoutTransitions();
  1391. if (mBuffer->isRecording())
  1392. mBuffer->end();
  1393. if (!mBuffer->isReadyForSubmit()) // Possibly nothing was recorded in the buffer
  1394. return;
  1395. mBuffer->submit(mQueue, mQueueIdx, syncMask);
  1396. acquireNewBuffer();
  1397. gVulkanCBManager().refreshStates(mDeviceIdx);
  1398. }
  1399. }}