CommandBufferImpl.inl.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. // Copyright (C) 2009-2017, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <anki/gr/vulkan/CommandBufferImpl.h>
  6. #include <anki/gr/vulkan/GrManagerImpl.h>
  7. #include <anki/gr/vulkan/TextureImpl.h>
  8. #include <anki/gr/OcclusionQuery.h>
  9. #include <anki/gr/vulkan/OcclusionQueryImpl.h>
  10. #include <anki/core/Trace.h>
  11. namespace anki
  12. {
  13. inline void CommandBufferImpl::setStencilCompareMask(FaceSelectionBit face, U32 mask)
  14. {
  15. commandCommon();
  16. VkStencilFaceFlags flags = 0;
  17. if(!!(face & FaceSelectionBit::FRONT) && m_stencilCompareMasks[0] != mask)
  18. {
  19. m_stencilCompareMasks[0] = mask;
  20. flags = VK_STENCIL_FACE_FRONT_BIT;
  21. }
  22. if(!!(face & FaceSelectionBit::BACK) && m_stencilCompareMasks[1] != mask)
  23. {
  24. m_stencilCompareMasks[1] = mask;
  25. flags |= VK_STENCIL_FACE_BACK_BIT;
  26. }
  27. if(flags)
  28. {
  29. ANKI_CMD(vkCmdSetStencilCompareMask(m_handle, flags, mask), ANY_OTHER_COMMAND);
  30. }
  31. }
  32. inline void CommandBufferImpl::setStencilWriteMask(FaceSelectionBit face, U32 mask)
  33. {
  34. commandCommon();
  35. VkStencilFaceFlags flags = 0;
  36. if(!!(face & FaceSelectionBit::FRONT) && m_stencilWriteMasks[0] != mask)
  37. {
  38. m_stencilWriteMasks[0] = mask;
  39. flags = VK_STENCIL_FACE_FRONT_BIT;
  40. }
  41. if(!!(face & FaceSelectionBit::BACK) && m_stencilWriteMasks[1] != mask)
  42. {
  43. m_stencilWriteMasks[1] = mask;
  44. flags |= VK_STENCIL_FACE_BACK_BIT;
  45. }
  46. if(flags)
  47. {
  48. ANKI_CMD(vkCmdSetStencilWriteMask(m_handle, flags, mask), ANY_OTHER_COMMAND);
  49. }
  50. }
  51. inline void CommandBufferImpl::setStencilReference(FaceSelectionBit face, U32 ref)
  52. {
  53. commandCommon();
  54. VkStencilFaceFlags flags = 0;
  55. if(!!(face & FaceSelectionBit::FRONT) && m_stencilReferenceMasks[0] != ref)
  56. {
  57. m_stencilReferenceMasks[0] = ref;
  58. flags = VK_STENCIL_FACE_FRONT_BIT;
  59. }
  60. if(!!(face & FaceSelectionBit::BACK) && m_stencilReferenceMasks[1] != ref)
  61. {
  62. m_stencilWriteMasks[1] = ref;
  63. flags |= VK_STENCIL_FACE_BACK_BIT;
  64. }
  65. if(flags)
  66. {
  67. ANKI_CMD(vkCmdSetStencilReference(m_handle, flags, ref), ANY_OTHER_COMMAND);
  68. }
  69. }
  70. inline void CommandBufferImpl::setImageBarrier(VkPipelineStageFlags srcStage,
  71. VkAccessFlags srcAccess,
  72. VkImageLayout prevLayout,
  73. VkPipelineStageFlags dstStage,
  74. VkAccessFlags dstAccess,
  75. VkImageLayout newLayout,
  76. VkImage img,
  77. const VkImageSubresourceRange& range)
  78. {
  79. ANKI_ASSERT(img);
  80. commandCommon();
  81. VkImageMemoryBarrier inf = {};
  82. inf.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  83. inf.srcAccessMask = srcAccess;
  84. inf.dstAccessMask = dstAccess;
  85. inf.oldLayout = prevLayout;
  86. inf.newLayout = newLayout;
  87. inf.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  88. inf.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  89. inf.image = img;
  90. inf.subresourceRange = range;
  91. #if ANKI_BATCH_COMMANDS
  92. flushBatches(CommandBufferCommandType::SET_BARRIER);
  93. if(m_imgBarriers.getSize() <= m_imgBarrierCount)
  94. {
  95. m_imgBarriers.resize(m_alloc, max<U>(2, m_imgBarrierCount * 2));
  96. }
  97. m_imgBarriers[m_imgBarrierCount++] = inf;
  98. m_srcStageMask |= srcStage;
  99. m_dstStageMask |= dstStage;
  100. #else
  101. ANKI_CMD(vkCmdPipelineBarrier(m_handle, srcStage, dstStage, 0, 0, nullptr, 0, nullptr, 1, &inf), ANY_OTHER_COMMAND);
  102. ANKI_TRACE_INC_COUNTER(VK_PIPELINE_BARRIERS, 1);
  103. #endif
  104. }
  105. inline void CommandBufferImpl::setTextureBarrierRange(
  106. TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const VkImageSubresourceRange& range)
  107. {
  108. const TextureImpl& impl = *tex->m_impl;
  109. ANKI_ASSERT(impl.usageValid(prevUsage));
  110. ANKI_ASSERT(impl.usageValid(nextUsage));
  111. VkPipelineStageFlags srcStage;
  112. VkAccessFlags srcAccess;
  113. VkImageLayout oldLayout;
  114. VkPipelineStageFlags dstStage;
  115. VkAccessFlags dstAccess;
  116. VkImageLayout newLayout;
  117. impl.computeBarrierInfo(prevUsage, nextUsage, range.baseMipLevel, srcStage, srcAccess, dstStage, dstAccess);
  118. oldLayout = impl.computeLayout(prevUsage, range.baseMipLevel);
  119. newLayout = impl.computeLayout(nextUsage, range.baseMipLevel);
  120. setImageBarrier(srcStage, srcAccess, oldLayout, dstStage, dstAccess, newLayout, impl.m_imageHandle, range);
  121. m_microCmdb->pushObjectRef(tex);
  122. }
  123. inline void CommandBufferImpl::setTextureSurfaceBarrier(
  124. TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const TextureSurfaceInfo& surf)
  125. {
  126. if(surf.m_level > 0)
  127. {
  128. ANKI_ASSERT(!(nextUsage & TextureUsageBit::GENERATE_MIPMAPS)
  129. && "This transition happens inside CommandBufferImpl::generateMipmapsX");
  130. }
  131. const TextureImpl& impl = *tex->m_impl;
  132. impl.checkSurfaceOrVolume(surf);
  133. VkImageSubresourceRange range;
  134. impl.computeSubResourceRange(surf, impl.m_akAspect, range);
  135. setTextureBarrierRange(tex, prevUsage, nextUsage, range);
  136. impl.updateTracker(surf, nextUsage, m_texUsageTracker);
  137. }
  138. inline void CommandBufferImpl::setTextureVolumeBarrier(
  139. TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const TextureVolumeInfo& vol)
  140. {
  141. if(vol.m_level > 0)
  142. {
  143. ANKI_ASSERT(!(nextUsage & TextureUsageBit::GENERATE_MIPMAPS)
  144. && "This transition happens inside CommandBufferImpl::generateMipmaps");
  145. }
  146. const TextureImpl& impl = *tex->m_impl;
  147. impl.checkSurfaceOrVolume(vol);
  148. VkImageSubresourceRange range;
  149. impl.computeSubResourceRange(vol, impl.m_akAspect, range);
  150. setTextureBarrierRange(tex, prevUsage, nextUsage, range);
  151. impl.updateTracker(vol, nextUsage, m_texUsageTracker);
  152. }
  153. inline void CommandBufferImpl::setBufferBarrier(VkPipelineStageFlags srcStage,
  154. VkAccessFlags srcAccess,
  155. VkPipelineStageFlags dstStage,
  156. VkAccessFlags dstAccess,
  157. PtrSize offset,
  158. PtrSize size,
  159. VkBuffer buff)
  160. {
  161. ANKI_ASSERT(buff);
  162. commandCommon();
  163. VkBufferMemoryBarrier b = {};
  164. b.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  165. b.srcAccessMask = srcAccess;
  166. b.dstAccessMask = dstAccess;
  167. b.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  168. b.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  169. b.buffer = buff;
  170. b.offset = offset;
  171. b.size = size;
  172. #if ANKI_BATCH_COMMANDS
  173. flushBatches(CommandBufferCommandType::SET_BARRIER);
  174. if(m_buffBarriers.getSize() <= m_buffBarrierCount)
  175. {
  176. m_buffBarriers.resize(m_alloc, max<U>(2, m_buffBarrierCount * 2));
  177. }
  178. m_buffBarriers[m_buffBarrierCount++] = b;
  179. m_srcStageMask |= srcStage;
  180. m_dstStageMask |= dstStage;
  181. #else
  182. ANKI_CMD(vkCmdPipelineBarrier(m_handle, srcStage, dstStage, 0, 0, nullptr, 1, &b, 0, nullptr), ANY_OTHER_COMMAND);
  183. ANKI_TRACE_INC_COUNTER(VK_PIPELINE_BARRIERS, 1);
  184. #endif
  185. }
  186. inline void CommandBufferImpl::setBufferBarrier(
  187. BufferPtr buff, BufferUsageBit before, BufferUsageBit after, PtrSize offset, PtrSize size)
  188. {
  189. const BufferImpl& impl = *buff->m_impl;
  190. VkPipelineStageFlags srcStage;
  191. VkAccessFlags srcAccess;
  192. VkPipelineStageFlags dstStage;
  193. VkAccessFlags dstAccess;
  194. impl.computeBarrierInfo(before, after, srcStage, srcAccess, dstStage, dstAccess);
  195. setBufferBarrier(srcStage, srcAccess, dstStage, dstAccess, offset, size, impl.getHandle());
  196. m_microCmdb->pushObjectRef(buff);
  197. }
  198. inline void CommandBufferImpl::drawArrays(
  199. PrimitiveTopology topology, U32 count, U32 instanceCount, U32 first, U32 baseInstance)
  200. {
  201. m_state.setPrimitiveTopology(topology);
  202. drawcallCommon();
  203. ANKI_CMD(vkCmdDraw(m_handle, count, instanceCount, first, baseInstance), ANY_OTHER_COMMAND);
  204. }
  205. inline void CommandBufferImpl::drawElements(
  206. PrimitiveTopology topology, U32 count, U32 instanceCount, U32 firstIndex, U32 baseVertex, U32 baseInstance)
  207. {
  208. m_state.setPrimitiveTopology(topology);
  209. drawcallCommon();
  210. ANKI_CMD(vkCmdDrawIndexed(m_handle, count, instanceCount, firstIndex, baseVertex, baseInstance), ANY_OTHER_COMMAND);
  211. }
  212. inline void CommandBufferImpl::drawArraysIndirect(
  213. PrimitiveTopology topology, U32 drawCount, PtrSize offset, BufferPtr& buff)
  214. {
  215. m_state.setPrimitiveTopology(topology);
  216. drawcallCommon();
  217. const BufferImpl& impl = *buff->m_impl;
  218. ANKI_ASSERT(impl.usageValid(BufferUsageBit::INDIRECT));
  219. ANKI_ASSERT((offset % 4) == 0);
  220. ANKI_ASSERT((offset + sizeof(DrawArraysIndirectInfo) * drawCount) <= impl.getSize());
  221. ANKI_CMD(vkCmdDrawIndirect(m_handle, impl.getHandle(), offset, drawCount, sizeof(DrawArraysIndirectInfo)),
  222. ANY_OTHER_COMMAND);
  223. }
  224. inline void CommandBufferImpl::drawElementsIndirect(
  225. PrimitiveTopology topology, U32 drawCount, PtrSize offset, BufferPtr& buff)
  226. {
  227. m_state.setPrimitiveTopology(topology);
  228. drawcallCommon();
  229. const BufferImpl& impl = *buff->m_impl;
  230. ANKI_ASSERT(impl.usageValid(BufferUsageBit::INDIRECT));
  231. ANKI_ASSERT((offset % 4) == 0);
  232. ANKI_ASSERT((offset + sizeof(DrawElementsIndirectInfo) * drawCount) <= impl.getSize());
  233. ANKI_CMD(vkCmdDrawIndexedIndirect(m_handle, impl.getHandle(), offset, drawCount, sizeof(DrawElementsIndirectInfo)),
  234. ANY_OTHER_COMMAND);
  235. }
  236. inline void CommandBufferImpl::dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
  237. {
  238. ANKI_ASSERT(m_computeProg);
  239. commandCommon();
  240. // Bind descriptors
  241. for(U i = 0; i < MAX_DESCRIPTOR_SETS; ++i)
  242. {
  243. if(m_computeProg->getReflectionInfo().m_descriptorSetMask.get(i))
  244. {
  245. DescriptorSet dset;
  246. Bool dirty;
  247. Array<U32, MAX_UNIFORM_BUFFER_BINDINGS + MAX_STORAGE_BUFFER_BINDINGS> dynamicOffsets;
  248. U dynamicOffsetCount;
  249. if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(
  250. m_tid, m_dsetState[i], dset, dirty, dynamicOffsets, dynamicOffsetCount))
  251. {
  252. ANKI_VK_LOGF("Cannot recover");
  253. }
  254. if(dirty)
  255. {
  256. VkDescriptorSet dsHandle = dset.getHandle();
  257. ANKI_CMD(vkCmdBindDescriptorSets(m_handle,
  258. VK_PIPELINE_BIND_POINT_GRAPHICS,
  259. m_computeProg->getPipelineLayout().getHandle(),
  260. i,
  261. 1,
  262. &dsHandle,
  263. dynamicOffsetCount,
  264. &dynamicOffsets[0]),
  265. ANY_OTHER_COMMAND);
  266. }
  267. }
  268. }
  269. ANKI_CMD(vkCmdDispatch(m_handle, groupCountX, groupCountY, groupCountZ), ANY_OTHER_COMMAND);
  270. }
  271. inline void CommandBufferImpl::resetOcclusionQuery(OcclusionQueryPtr query)
  272. {
  273. commandCommon();
  274. VkQueryPool handle = query->m_impl->m_handle.m_pool;
  275. U32 idx = query->m_impl->m_handle.m_queryIndex;
  276. ANKI_ASSERT(handle);
  277. #if ANKI_BATCH_COMMANDS
  278. flushBatches(CommandBufferCommandType::RESET_OCCLUSION_QUERY);
  279. if(m_queryResetAtoms.getSize() <= m_queryResetAtomCount)
  280. {
  281. m_queryResetAtoms.resize(m_alloc, max<U>(2, m_queryResetAtomCount * 2));
  282. }
  283. QueryResetAtom atom;
  284. atom.m_pool = handle;
  285. atom.m_queryIdx = idx;
  286. m_queryResetAtoms[m_queryResetAtomCount++] = atom;
  287. #else
  288. ANKI_CMD(vkCmdResetQueryPool(m_handle, handle, idx, 1), ANY_OTHER_COMMAND);
  289. #endif
  290. m_microCmdb->pushObjectRef(query);
  291. }
  292. inline void CommandBufferImpl::beginOcclusionQuery(OcclusionQueryPtr query)
  293. {
  294. commandCommon();
  295. VkQueryPool handle = query->m_impl->m_handle.m_pool;
  296. U32 idx = query->m_impl->m_handle.m_queryIndex;
  297. ANKI_ASSERT(handle);
  298. ANKI_CMD(vkCmdBeginQuery(m_handle, handle, idx, 0), ANY_OTHER_COMMAND);
  299. m_microCmdb->pushObjectRef(query);
  300. }
  301. inline void CommandBufferImpl::endOcclusionQuery(OcclusionQueryPtr query)
  302. {
  303. commandCommon();
  304. VkQueryPool handle = query->m_impl->m_handle.m_pool;
  305. U32 idx = query->m_impl->m_handle.m_queryIndex;
  306. ANKI_ASSERT(handle);
  307. ANKI_CMD(vkCmdEndQuery(m_handle, handle, idx), ANY_OTHER_COMMAND);
  308. m_microCmdb->pushObjectRef(query);
  309. }
  310. inline void CommandBufferImpl::clearTextureInternal(
  311. TexturePtr tex, const ClearValue& clearValue, const VkImageSubresourceRange& range)
  312. {
  313. commandCommon();
  314. VkClearColorValue vclear;
  315. static_assert(sizeof(vclear) == sizeof(clearValue), "See file");
  316. memcpy(&vclear, &clearValue, sizeof(clearValue));
  317. const TextureImpl& impl = *tex->m_impl;
  318. if(impl.m_aspect == VK_IMAGE_ASPECT_COLOR_BIT)
  319. {
  320. ANKI_CMD(vkCmdClearColorImage(
  321. m_handle, impl.m_imageHandle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &vclear, 1, &range),
  322. ANY_OTHER_COMMAND);
  323. }
  324. else
  325. {
  326. ANKI_ASSERT(0 && "TODO");
  327. }
  328. m_microCmdb->pushObjectRef(tex);
  329. }
  330. inline void CommandBufferImpl::clearTextureSurface(
  331. TexturePtr tex, const TextureSurfaceInfo& surf, const ClearValue& clearValue, DepthStencilAspectBit aspect)
  332. {
  333. const TextureImpl& impl = *tex->m_impl;
  334. ANKI_ASSERT(impl.m_type != TextureType::_3D && "Not for 3D");
  335. VkImageSubresourceRange range;
  336. impl.computeSubResourceRange(surf, aspect, range);
  337. clearTextureInternal(tex, clearValue, range);
  338. }
  339. inline void CommandBufferImpl::clearTextureVolume(
  340. TexturePtr tex, const TextureVolumeInfo& vol, const ClearValue& clearValue, DepthStencilAspectBit aspect)
  341. {
  342. const TextureImpl& impl = *tex->m_impl;
  343. ANKI_ASSERT(impl.m_type == TextureType::_3D && "Only for 3D");
  344. VkImageSubresourceRange range;
  345. impl.computeSubResourceRange(vol, aspect, range);
  346. clearTextureInternal(tex, clearValue, range);
  347. }
  348. inline void CommandBufferImpl::pushSecondLevelCommandBuffer(CommandBufferPtr cmdb)
  349. {
  350. commandCommon();
  351. ANKI_ASSERT(insideRenderPass());
  352. ANKI_ASSERT(m_subpassContents == VK_SUBPASS_CONTENTS_MAX_ENUM
  353. || m_subpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
  354. ANKI_ASSERT(cmdb->m_impl->m_finalized);
  355. m_subpassContents = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS;
  356. if(ANKI_UNLIKELY(m_rpCommandCount == 0))
  357. {
  358. beginRenderPassInternal();
  359. }
  360. #if ANKI_BATCH_COMMANDS
  361. flushBatches(CommandBufferCommandType::PUSH_SECOND_LEVEL);
  362. if(m_secondLevelAtoms.getSize() <= m_secondLevelAtomCount)
  363. {
  364. m_secondLevelAtoms.resize(m_alloc, max<U>(8, m_secondLevelAtomCount * 2));
  365. }
  366. m_secondLevelAtoms[m_secondLevelAtomCount++] = cmdb->m_impl->m_handle;
  367. #else
  368. ANKI_CMD(vkCmdExecuteCommands(m_handle, 1, &cmdb->m_impl->m_handle), ANY_OTHER_COMMAND);
  369. #endif
  370. ++m_rpCommandCount;
  371. m_microCmdb->pushObjectRef(cmdb);
  372. }
  373. inline void CommandBufferImpl::drawcallCommon()
  374. {
  375. // Preconditions
  376. commandCommon();
  377. ANKI_ASSERT(insideRenderPass() || secondLevel());
  378. ANKI_ASSERT(m_subpassContents == VK_SUBPASS_CONTENTS_MAX_ENUM || m_subpassContents == VK_SUBPASS_CONTENTS_INLINE);
  379. m_subpassContents = VK_SUBPASS_CONTENTS_INLINE;
  380. if(ANKI_UNLIKELY(m_rpCommandCount == 0) && !secondLevel())
  381. {
  382. beginRenderPassInternal();
  383. }
  384. ++m_rpCommandCount;
  385. // Get or create ppline
  386. ANKI_ASSERT(m_graphicsProg);
  387. Pipeline ppline;
  388. Bool stateDirty;
  389. m_graphicsProg->getPipelineFactory().newPipeline(m_state, ppline, stateDirty);
  390. if(stateDirty)
  391. {
  392. ANKI_CMD(vkCmdBindPipeline(m_handle, VK_PIPELINE_BIND_POINT_GRAPHICS, ppline.getHandle()), ANY_OTHER_COMMAND);
  393. }
  394. // Bind dsets
  395. for(U i = 0; i < MAX_DESCRIPTOR_SETS; ++i)
  396. {
  397. if(m_graphicsProg->getReflectionInfo().m_descriptorSetMask.get(i))
  398. {
  399. DescriptorSet dset;
  400. Bool dirty;
  401. Array<U32, MAX_UNIFORM_BUFFER_BINDINGS + MAX_STORAGE_BUFFER_BINDINGS> dynamicOffsets;
  402. U dynamicOffsetCount;
  403. if(getGrManagerImpl().getDescriptorSetFactory().newDescriptorSet(
  404. m_tid, m_dsetState[i], dset, dirty, dynamicOffsets, dynamicOffsetCount))
  405. {
  406. ANKI_VK_LOGF("Cannot recover");
  407. }
  408. if(dirty)
  409. {
  410. VkDescriptorSet dsHandle = dset.getHandle();
  411. ANKI_CMD(vkCmdBindDescriptorSets(m_handle,
  412. VK_PIPELINE_BIND_POINT_GRAPHICS,
  413. m_graphicsProg->getPipelineLayout().getHandle(),
  414. i,
  415. 1,
  416. &dsHandle,
  417. dynamicOffsetCount,
  418. &dynamicOffsets[0]),
  419. ANY_OTHER_COMMAND);
  420. }
  421. }
  422. }
  423. // Flush viewport
  424. if(ANKI_UNLIKELY(m_viewportDirty))
  425. {
  426. const I minx = m_viewport[0];
  427. const I miny = m_viewport[1];
  428. const I maxx = m_viewport[2];
  429. const I maxy = m_viewport[3];
  430. VkViewport s;
  431. s.x = minx;
  432. s.y = miny;
  433. s.width = maxx - minx;
  434. s.height = maxy - miny;
  435. s.minDepth = 0.0;
  436. s.maxDepth = 1.0;
  437. ANKI_CMD(vkCmdSetViewport(m_handle, 0, 1, &s), ANY_OTHER_COMMAND);
  438. VkRect2D scissor = {};
  439. scissor.extent.width = maxx - minx;
  440. scissor.extent.height = maxy - miny;
  441. scissor.offset.x = minx;
  442. scissor.offset.y = miny;
  443. ANKI_CMD(vkCmdSetScissor(m_handle, 0, 1, &scissor), ANY_OTHER_COMMAND);
  444. m_viewportDirty = false;
  445. }
  446. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  447. }
  448. inline void CommandBufferImpl::commandCommon()
  449. {
  450. ANKI_ASSERT(Thread::getCurrentThreadId() == m_tid
  451. && "Commands must be recorder and flushed by the thread this command buffer was created");
  452. ANKI_ASSERT(!m_finalized);
  453. ANKI_ASSERT(m_handle);
  454. #if ANKI_EXTRA_CHECKS
  455. ++m_commandCount;
  456. #endif
  457. m_empty = false;
  458. if(ANKI_UNLIKELY(!m_beganRecording))
  459. {
  460. beginRecording();
  461. m_beganRecording = true;
  462. }
  463. }
  464. inline void CommandBufferImpl::flushBatches(CommandBufferCommandType type)
  465. {
  466. if(type != m_lastCmdType)
  467. {
  468. switch(m_lastCmdType)
  469. {
  470. case CommandBufferCommandType::SET_BARRIER:
  471. flushBarriers();
  472. break;
  473. case CommandBufferCommandType::RESET_OCCLUSION_QUERY:
  474. flushQueryResets();
  475. break;
  476. case CommandBufferCommandType::WRITE_QUERY_RESULT:
  477. flushWriteQueryResults();
  478. break;
  479. case CommandBufferCommandType::PUSH_SECOND_LEVEL:
  480. ANKI_ASSERT(m_secondLevelAtomCount > 0);
  481. vkCmdExecuteCommands(m_handle, m_secondLevelAtomCount, &m_secondLevelAtoms[0]);
  482. m_secondLevelAtomCount = 0;
  483. break;
  484. case CommandBufferCommandType::ANY_OTHER_COMMAND:
  485. break;
  486. default:
  487. ANKI_ASSERT(0);
  488. }
  489. m_lastCmdType = type;
  490. }
  491. }
  492. inline void CommandBufferImpl::fillBuffer(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  493. {
  494. commandCommon();
  495. ANKI_ASSERT(!insideRenderPass());
  496. const BufferImpl& impl = *buff->m_impl;
  497. ANKI_ASSERT(impl.usageValid(BufferUsageBit::FILL));
  498. ANKI_ASSERT(offset < impl.getSize());
  499. ANKI_ASSERT((offset % 4) == 0 && "Should be multiple of 4");
  500. size = (size == MAX_PTR_SIZE) ? (impl.getSize() - offset) : size;
  501. ANKI_ASSERT(offset + size <= impl.getSize());
  502. ANKI_ASSERT((size % 4) == 0 && "Should be multiple of 4");
  503. ANKI_CMD(vkCmdFillBuffer(m_handle, impl.getHandle(), offset, size, value), ANY_OTHER_COMMAND);
  504. m_microCmdb->pushObjectRef(buff);
  505. }
  506. inline void CommandBufferImpl::writeOcclusionQueryResultToBuffer(
  507. OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  508. {
  509. commandCommon();
  510. ANKI_ASSERT(!insideRenderPass());
  511. const BufferImpl& impl = *buff->m_impl;
  512. ANKI_ASSERT(impl.usageValid(BufferUsageBit::QUERY_RESULT));
  513. ANKI_ASSERT((offset % 4) == 0);
  514. ANKI_ASSERT((offset + sizeof(U32)) <= impl.getSize());
  515. const OcclusionQueryImpl& q = *query->m_impl;
  516. #if ANKI_BATCH_COMMANDS
  517. flushBatches(CommandBufferCommandType::WRITE_QUERY_RESULT);
  518. if(m_writeQueryAtoms.getSize() <= m_writeQueryAtomCount)
  519. {
  520. m_writeQueryAtoms.resize(m_alloc, max<U>(2, m_writeQueryAtomCount * 2));
  521. }
  522. WriteQueryAtom atom;
  523. atom.m_pool = q.m_handle.m_pool;
  524. atom.m_queryIdx = q.m_handle.m_queryIndex;
  525. atom.m_buffer = impl.getHandle();
  526. atom.m_offset = offset;
  527. m_writeQueryAtoms[m_writeQueryAtomCount++] = atom;
  528. #else
  529. ANKI_CMD(vkCmdCopyQueryPoolResults(m_handle,
  530. q.m_handle.m_pool,
  531. q.m_handle.m_queryIndex,
  532. 1,
  533. impl.getHandle(),
  534. offset,
  535. sizeof(U32),
  536. VK_QUERY_RESULT_PARTIAL_BIT),
  537. ANY_OTHER_COMMAND);
  538. #endif
  539. m_microCmdb->pushObjectRef(query);
  540. m_microCmdb->pushObjectRef(buff);
  541. }
  542. inline void CommandBufferImpl::bindShaderProgram(ShaderProgramPtr& prog)
  543. {
  544. ShaderProgramImpl& impl = *prog->m_impl;
  545. if(impl.isGraphics())
  546. {
  547. m_graphicsProg = &impl;
  548. m_computeProg = nullptr; // Unbind the compute prog. Doesn't work like vulkan
  549. m_state.bindShaderProgram(prog);
  550. }
  551. else
  552. {
  553. m_computeProg = &impl;
  554. m_graphicsProg = nullptr; // See comment in the if()
  555. // Bind the pipeline now
  556. ANKI_CMD(vkCmdBindPipeline(m_handle, VK_PIPELINE_BIND_POINT_COMPUTE, impl.getComputePipelineHandle()),
  557. ANY_OTHER_COMMAND);
  558. }
  559. for(U i = 0; i < MAX_DESCRIPTOR_SETS; ++i)
  560. {
  561. if(impl.getReflectionInfo().m_descriptorSetMask.get(i))
  562. {
  563. m_dsetState[i].setLayout(impl.getDescriptorSetLayout(i));
  564. }
  565. }
  566. m_microCmdb->pushObjectRef(prog);
  567. }
  568. inline void CommandBufferImpl::copyBufferToBuffer(
  569. BufferPtr& src, PtrSize srcOffset, BufferPtr& dst, PtrSize dstOffset, PtrSize range)
  570. {
  571. commandCommon();
  572. VkBufferCopy region = {};
  573. region.srcOffset = srcOffset;
  574. region.dstOffset = dstOffset;
  575. region.size = range;
  576. ANKI_CMD(
  577. vkCmdCopyBuffer(m_handle, src->m_impl->getHandle(), dst->m_impl->getHandle(), 1, &region), ANY_OTHER_COMMAND);
  578. m_microCmdb->pushObjectRef(src);
  579. m_microCmdb->pushObjectRef(dst);
  580. }
  581. } // end namespace anki