CommandBuffer.cpp 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594
  1. // Copyright (C) 2009-2021, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/CommandBuffer.h>
  6. #include <AnKi/Gr/gl/CommandBufferImpl.h>
  7. #include <AnKi/Gr/GrManager.h>
  8. #include <AnKi/Gr/gl/GrManagerImpl.h>
  9. #include <AnKi/Gr/gl/RenderingThread.h>
  10. #include <AnKi/Gr/gl/GlState.h>
  11. #include <AnKi/Gr/gl/FramebufferImpl.h>
  12. #include <AnKi/Gr/gl/OcclusionQueryImpl.h>
  13. #include <AnKi/Gr/gl/TextureImpl.h>
  14. #include <AnKi/Gr/gl/BufferImpl.h>
  15. #include <AnKi/Gr/gl/SamplerImpl.h>
  16. #include <AnKi/Gr/gl/ShaderProgramImpl.h>
  17. #include <AnKi/Gr/gl/TextureViewImpl.h>
  18. #include <AnKi/Core/Trace.h>
  19. namespace anki
  20. {
  21. CommandBuffer* CommandBuffer::newInstance(GrManager* manager, const CommandBufferInitInfo& inf)
  22. {
  23. CommandBufferImpl* impl = manager->getAllocator().newInstance<CommandBufferImpl>(manager, inf.getName());
  24. impl->init(inf);
  25. return impl;
  26. }
  27. void CommandBuffer::flush(FencePtr* fence)
  28. {
  29. ANKI_GL_SELF(CommandBufferImpl);
  30. if(!self.isSecondLevel())
  31. {
  32. ANKI_ASSERT(!self.m_state.insideRenderPass());
  33. }
  34. else
  35. {
  36. ANKI_ASSERT(fence == nullptr);
  37. }
  38. if(!self.isSecondLevel())
  39. {
  40. static_cast<GrManagerImpl&>(getManager())
  41. .getRenderingThread()
  42. .flushCommandBuffer(CommandBufferPtr(this), fence);
  43. }
  44. }
  45. void CommandBuffer::bindVertexBuffer(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride,
  46. VertexStepRate stepRate)
  47. {
  48. class Cmd final : public GlCommand
  49. {
  50. public:
  51. BufferPtr m_buff;
  52. U32 m_binding;
  53. PtrSize m_offset;
  54. PtrSize m_stride;
  55. Bool m_instanced;
  56. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride, Bool instanced)
  57. : m_buff(buff)
  58. , m_binding(binding)
  59. , m_offset(offset)
  60. , m_stride(stride)
  61. , m_instanced(instanced)
  62. {
  63. }
  64. Error operator()(GlState& state)
  65. {
  66. glBindVertexBuffer(m_binding, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_stride);
  67. glVertexBindingDivisor(m_binding, (m_instanced) ? 1 : 0);
  68. return Error::NONE;
  69. }
  70. };
  71. ANKI_ASSERT(buff);
  72. ANKI_ASSERT(stride > 0);
  73. ANKI_GL_SELF(CommandBufferImpl);
  74. if(self.m_state.bindVertexBuffer(binding, buff, offset, stride, stepRate))
  75. {
  76. self.pushBackNewCommand<Cmd>(binding, buff, offset, stride, stepRate == VertexStepRate::INSTANCE);
  77. }
  78. }
  79. void CommandBuffer::setVertexAttribute(U32 location, U32 buffBinding, Format fmt, PtrSize relativeOffset)
  80. {
  81. class Cmd final : public GlCommand
  82. {
  83. public:
  84. U32 m_location;
  85. U32 m_buffBinding;
  86. U8 m_compSize;
  87. GLenum m_fmt;
  88. Bool m_normalized;
  89. PtrSize m_relativeOffset;
  90. Cmd(U32 location, U32 buffBinding, U8 compSize, GLenum fmt, Bool normalized, PtrSize relativeOffset)
  91. : m_location(location)
  92. , m_buffBinding(buffBinding)
  93. , m_compSize(compSize)
  94. , m_fmt(fmt)
  95. , m_normalized(normalized)
  96. , m_relativeOffset(relativeOffset)
  97. {
  98. }
  99. Error operator()(GlState& state)
  100. {
  101. glVertexAttribFormat(m_location, m_compSize, m_fmt, m_normalized, m_relativeOffset);
  102. glVertexAttribBinding(m_location, m_buffBinding);
  103. return Error::NONE;
  104. }
  105. };
  106. ANKI_GL_SELF(CommandBufferImpl);
  107. if(self.m_state.setVertexAttribute(location, buffBinding, fmt, relativeOffset))
  108. {
  109. U compCount;
  110. GLenum type;
  111. Bool normalized;
  112. convertVertexFormat(fmt, compCount, type, normalized);
  113. self.pushBackNewCommand<Cmd>(location, buffBinding, compCount, type, normalized, relativeOffset);
  114. }
  115. }
  116. void CommandBuffer::bindIndexBuffer(BufferPtr buff, PtrSize offset, IndexType type)
  117. {
  118. class Cmd final : public GlCommand
  119. {
  120. public:
  121. BufferPtr m_buff;
  122. Cmd(BufferPtr buff)
  123. : m_buff(buff)
  124. {
  125. }
  126. Error operator()(GlState& state)
  127. {
  128. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, static_cast<const BufferImpl&>(*m_buff).getGlName());
  129. return Error::NONE;
  130. }
  131. };
  132. ANKI_ASSERT(buff);
  133. ANKI_GL_SELF(CommandBufferImpl);
  134. if(self.m_state.bindIndexBuffer(buff, offset, type))
  135. {
  136. self.pushBackNewCommand<Cmd>(buff);
  137. }
  138. }
  139. void CommandBuffer::setPrimitiveRestart(Bool enable)
  140. {
  141. class Cmd final : public GlCommand
  142. {
  143. public:
  144. Bool m_enable;
  145. Cmd(Bool enable)
  146. : m_enable(enable)
  147. {
  148. }
  149. Error operator()(GlState& state)
  150. {
  151. if(m_enable)
  152. {
  153. glEnable(GL_PRIMITIVE_RESTART);
  154. }
  155. else
  156. {
  157. glDisable(GL_PRIMITIVE_RESTART);
  158. }
  159. return Error::NONE;
  160. }
  161. };
  162. ANKI_GL_SELF(CommandBufferImpl);
  163. if(self.m_state.setPrimitiveRestart(enable))
  164. {
  165. self.pushBackNewCommand<Cmd>(enable);
  166. }
  167. }
  168. void CommandBuffer::setViewport(U32 minx, U32 miny, U32 width, U32 height)
  169. {
  170. class ViewportCommand final : public GlCommand
  171. {
  172. public:
  173. Array<U32, 4> m_value;
  174. ViewportCommand(U32 a, U32 b, U32 c, U32 d)
  175. {
  176. m_value = {{a, b, c, d}};
  177. }
  178. Error operator()(GlState& state)
  179. {
  180. glViewport(m_value[0], m_value[1], m_value[2], m_value[3]);
  181. return Error::NONE;
  182. }
  183. };
  184. ANKI_GL_SELF(CommandBufferImpl);
  185. if(self.m_state.setViewport(minx, miny, width, height))
  186. {
  187. self.pushBackNewCommand<ViewportCommand>(minx, miny, width, height);
  188. }
  189. }
  190. void CommandBuffer::setScissor(U32 minx, U32 miny, U32 width, U32 height)
  191. {
  192. ANKI_ASSERT(minx < MAX_U32 && miny < MAX_U32);
  193. ANKI_ASSERT(width > 0 && height > 0);
  194. class ScissorCommand final : public GlCommand
  195. {
  196. public:
  197. Array<GLsizei, 4> m_value;
  198. ScissorCommand(GLsizei a, GLsizei b, GLsizei c, GLsizei d)
  199. {
  200. m_value = {{a, b, c, d}};
  201. }
  202. Error operator()(GlState& state)
  203. {
  204. if(state.m_scissor[0] != m_value[0] || state.m_scissor[1] != m_value[1] || state.m_scissor[2] != m_value[2]
  205. || state.m_scissor[3] != m_value[3])
  206. {
  207. state.m_scissor = m_value;
  208. glScissor(m_value[0], m_value[1], m_value[2], m_value[3]);
  209. }
  210. return Error::NONE;
  211. }
  212. };
  213. // Limit the width and height to GLsizei
  214. const GLsizei iwidth = (width == MAX_U32) ? MAX_I32 : width;
  215. const GLsizei iheight = (height == MAX_U32) ? MAX_I32 : height;
  216. const GLsizei iminx = minx;
  217. const GLsizei iminy = miny;
  218. ANKI_GL_SELF(CommandBufferImpl);
  219. if(self.m_state.setScissor(iminx, iminy, iwidth, iheight))
  220. {
  221. self.pushBackNewCommand<ScissorCommand>(iminx, iminy, iwidth, iheight);
  222. }
  223. }
  224. void CommandBuffer::setFillMode(FillMode mode)
  225. {
  226. class Cmd final : public GlCommand
  227. {
  228. public:
  229. GLenum m_fillMode;
  230. Cmd(GLenum fillMode)
  231. : m_fillMode(fillMode)
  232. {
  233. }
  234. Error operator()(GlState& state)
  235. {
  236. glPolygonMode(GL_FRONT_AND_BACK, m_fillMode);
  237. return Error::NONE;
  238. }
  239. };
  240. ANKI_GL_SELF(CommandBufferImpl);
  241. if(self.m_state.setFillMode(mode))
  242. {
  243. self.pushBackNewCommand<Cmd>(convertFillMode(mode));
  244. }
  245. }
  246. void CommandBuffer::setCullMode(FaceSelectionBit mode)
  247. {
  248. class Cmd final : public GlCommand
  249. {
  250. public:
  251. GLenum m_mode;
  252. Cmd(GLenum mode)
  253. : m_mode(mode)
  254. {
  255. }
  256. Error operator()(GlState& state)
  257. {
  258. glCullFace(m_mode);
  259. return Error::NONE;
  260. }
  261. };
  262. ANKI_GL_SELF(CommandBufferImpl);
  263. if(self.m_state.setCullMode(mode))
  264. {
  265. self.pushBackNewCommand<Cmd>(convertFaceMode(mode));
  266. }
  267. }
  268. void CommandBuffer::setPolygonOffset(F32 factor, F32 units)
  269. {
  270. class Cmd final : public GlCommand
  271. {
  272. public:
  273. F32 m_factor;
  274. F32 m_units;
  275. Cmd(F32 factor, F32 units)
  276. : m_factor(factor)
  277. , m_units(units)
  278. {
  279. }
  280. Error operator()(GlState& state)
  281. {
  282. if(m_factor == 0.0 && m_units == 0.0)
  283. {
  284. glDisable(GL_POLYGON_OFFSET_FILL);
  285. }
  286. else
  287. {
  288. glEnable(GL_POLYGON_OFFSET_FILL);
  289. glPolygonOffset(m_factor, m_units);
  290. }
  291. return Error::NONE;
  292. }
  293. };
  294. ANKI_GL_SELF(CommandBufferImpl);
  295. if(self.m_state.setPolygonOffset(factor, units))
  296. {
  297. self.pushBackNewCommand<Cmd>(factor, units);
  298. }
  299. }
  300. void CommandBuffer::setStencilOperations(FaceSelectionBit face, StencilOperation stencilFail,
  301. StencilOperation stencilPassDepthFail, StencilOperation stencilPassDepthPass)
  302. {
  303. class Cmd final : public GlCommand
  304. {
  305. public:
  306. GLenum m_face;
  307. GLenum m_stencilFail;
  308. GLenum m_stencilPassDepthFail;
  309. GLenum m_stencilPassDepthPass;
  310. Cmd(GLenum face, GLenum stencilFail, GLenum stencilPassDepthFail, GLenum stencilPassDepthPass)
  311. : m_face(face)
  312. , m_stencilFail(stencilFail)
  313. , m_stencilPassDepthFail(stencilPassDepthFail)
  314. , m_stencilPassDepthPass(stencilPassDepthPass)
  315. {
  316. }
  317. Error operator()(GlState& state)
  318. {
  319. glStencilOpSeparate(m_face, m_stencilFail, m_stencilPassDepthFail, m_stencilPassDepthPass);
  320. return Error::NONE;
  321. }
  322. };
  323. ANKI_GL_SELF(CommandBufferImpl);
  324. if(self.m_state.setStencilOperations(face, stencilFail, stencilPassDepthFail, stencilPassDepthPass))
  325. {
  326. self.pushBackNewCommand<Cmd>(convertFaceMode(face), convertStencilOperation(stencilFail),
  327. convertStencilOperation(stencilPassDepthFail),
  328. convertStencilOperation(stencilPassDepthPass));
  329. }
  330. }
  331. void CommandBuffer::setStencilCompareOperation(FaceSelectionBit face, CompareOperation comp)
  332. {
  333. ANKI_GL_SELF(CommandBufferImpl);
  334. self.m_state.setStencilCompareOperation(face, comp);
  335. }
  336. void CommandBuffer::setStencilCompareMask(FaceSelectionBit face, U32 mask)
  337. {
  338. ANKI_GL_SELF(CommandBufferImpl);
  339. self.m_state.setStencilCompareMask(face, mask);
  340. }
  341. void CommandBuffer::setStencilWriteMask(FaceSelectionBit face, U32 mask)
  342. {
  343. class Cmd final : public GlCommand
  344. {
  345. public:
  346. GLenum m_face;
  347. U32 m_mask;
  348. Cmd(GLenum face, U32 mask)
  349. : m_face(face)
  350. , m_mask(mask)
  351. {
  352. }
  353. Error operator()(GlState& state)
  354. {
  355. glStencilMaskSeparate(m_face, m_mask);
  356. if(m_face == GL_FRONT)
  357. {
  358. state.m_stencilWriteMask[0] = m_mask;
  359. }
  360. else if(m_face == GL_BACK)
  361. {
  362. state.m_stencilWriteMask[1] = m_mask;
  363. }
  364. else
  365. {
  366. ANKI_ASSERT(m_face == GL_FRONT_AND_BACK);
  367. state.m_stencilWriteMask[0] = state.m_stencilWriteMask[1] = m_mask;
  368. }
  369. return Error::NONE;
  370. }
  371. };
  372. ANKI_GL_SELF(CommandBufferImpl);
  373. if(self.m_state.setStencilWriteMask(face, mask))
  374. {
  375. self.pushBackNewCommand<Cmd>(convertFaceMode(face), mask);
  376. }
  377. }
  378. void CommandBuffer::setStencilReference(FaceSelectionBit face, U32 ref)
  379. {
  380. ANKI_GL_SELF(CommandBufferImpl);
  381. self.m_state.setStencilReference(face, ref);
  382. }
  383. void CommandBuffer::setDepthWrite(Bool enable)
  384. {
  385. class Cmd final : public GlCommand
  386. {
  387. public:
  388. Bool m_enable;
  389. Cmd(Bool enable)
  390. : m_enable(enable)
  391. {
  392. }
  393. Error operator()(GlState& state)
  394. {
  395. glDepthMask(m_enable);
  396. state.m_depthWriteMask = m_enable;
  397. return Error::NONE;
  398. }
  399. };
  400. ANKI_GL_SELF(CommandBufferImpl);
  401. if(self.m_state.setDepthWrite(enable))
  402. {
  403. self.pushBackNewCommand<Cmd>(enable);
  404. }
  405. }
  406. void CommandBuffer::setDepthCompareOperation(CompareOperation op)
  407. {
  408. class Cmd final : public GlCommand
  409. {
  410. public:
  411. GLenum m_op;
  412. Cmd(GLenum op)
  413. : m_op(op)
  414. {
  415. }
  416. Error operator()(GlState& state)
  417. {
  418. glDepthFunc(m_op);
  419. return Error::NONE;
  420. }
  421. };
  422. ANKI_GL_SELF(CommandBufferImpl);
  423. if(self.m_state.setDepthCompareOperation(op))
  424. {
  425. self.pushBackNewCommand<Cmd>(convertCompareOperation(op));
  426. }
  427. }
  428. void CommandBuffer::setAlphaToCoverage(Bool enable)
  429. {
  430. ANKI_ASSERT(!"TODO");
  431. }
  432. void CommandBuffer::setColorChannelWriteMask(U32 attachment, ColorBit mask)
  433. {
  434. class Cmd final : public GlCommand
  435. {
  436. public:
  437. U8 m_attachment;
  438. ColorBit m_mask;
  439. Cmd(U8 attachment, ColorBit mask)
  440. : m_attachment(attachment)
  441. , m_mask(mask)
  442. {
  443. }
  444. Error operator()(GlState& state)
  445. {
  446. const Bool r = !!(m_mask & ColorBit::RED);
  447. const Bool g = !!(m_mask & ColorBit::GREEN);
  448. const Bool b = !!(m_mask & ColorBit::BLUE);
  449. const Bool a = !!(m_mask & ColorBit::ALPHA);
  450. glColorMaski(m_attachment, r, g, b, a);
  451. state.m_colorWriteMasks[m_attachment] = {{r, g, b, a}};
  452. return Error::NONE;
  453. }
  454. };
  455. ANKI_GL_SELF(CommandBufferImpl);
  456. if(self.m_state.setColorChannelWriteMask(attachment, mask))
  457. {
  458. self.pushBackNewCommand<Cmd>(attachment, mask);
  459. }
  460. }
  461. void CommandBuffer::setBlendFactors(U32 attachment, BlendFactor srcRgb, BlendFactor dstRgb, BlendFactor srcA,
  462. BlendFactor dstA)
  463. {
  464. class Cmd final : public GlCommand
  465. {
  466. public:
  467. U8 m_attachment;
  468. GLenum m_srcRgb;
  469. GLenum m_dstRgb;
  470. GLenum m_srcA;
  471. GLenum m_dstA;
  472. Cmd(U8 att, GLenum srcRgb, GLenum dstRgb, GLenum srcA, GLenum dstA)
  473. : m_attachment(att)
  474. , m_srcRgb(srcRgb)
  475. , m_dstRgb(dstRgb)
  476. , m_srcA(srcA)
  477. , m_dstA(dstA)
  478. {
  479. }
  480. Error operator()(GlState&)
  481. {
  482. glBlendFuncSeparatei(m_attachment, m_srcRgb, m_dstRgb, m_srcA, m_dstA);
  483. return Error::NONE;
  484. }
  485. };
  486. ANKI_GL_SELF(CommandBufferImpl);
  487. if(self.m_state.setBlendFactors(attachment, srcRgb, dstRgb, srcA, dstA))
  488. {
  489. self.pushBackNewCommand<Cmd>(attachment, convertBlendFactor(srcRgb), convertBlendFactor(dstRgb),
  490. convertBlendFactor(srcA), convertBlendFactor(dstA));
  491. }
  492. }
  493. void CommandBuffer::setBlendOperation(U32 attachment, BlendOperation funcRgb, BlendOperation funcA)
  494. {
  495. class Cmd final : public GlCommand
  496. {
  497. public:
  498. U8 m_attachment;
  499. GLenum m_funcRgb;
  500. GLenum m_funcA;
  501. Cmd(U8 att, GLenum funcRgb, GLenum funcA)
  502. : m_attachment(att)
  503. , m_funcRgb(funcRgb)
  504. , m_funcA(funcA)
  505. {
  506. }
  507. Error operator()(GlState&)
  508. {
  509. glBlendEquationSeparatei(m_attachment, m_funcRgb, m_funcA);
  510. return Error::NONE;
  511. }
  512. };
  513. ANKI_GL_SELF(CommandBufferImpl);
  514. if(self.m_state.setBlendOperation(attachment, funcRgb, funcA))
  515. {
  516. self.pushBackNewCommand<Cmd>(attachment, convertBlendOperation(funcRgb), convertBlendOperation(funcA));
  517. }
  518. }
  519. void CommandBuffer::bindTextureAndSampler(U32 set, U32 binding, TextureViewPtr texView, SamplerPtr sampler,
  520. TextureUsageBit usage)
  521. {
  522. class Cmd final : public GlCommand
  523. {
  524. public:
  525. U32 m_unit;
  526. TextureViewPtr m_texView;
  527. SamplerPtr m_sampler;
  528. Cmd(U32 unit, TextureViewPtr texView, SamplerPtr sampler)
  529. : m_unit(unit)
  530. , m_texView(texView)
  531. , m_sampler(sampler)
  532. {
  533. }
  534. Error operator()(GlState&)
  535. {
  536. glBindTextureUnit(m_unit, static_cast<const TextureViewImpl&>(*m_texView).m_view.m_glName);
  537. glBindSampler(m_unit, static_cast<const SamplerImpl&>(*m_sampler).getGlName());
  538. return Error::NONE;
  539. }
  540. };
  541. ANKI_GL_SELF(CommandBufferImpl);
  542. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*texView).m_tex->isSubresourceGoodForSampling(
  543. static_cast<const TextureViewImpl&>(*texView).getSubresource()));
  544. if(self.m_state.bindTextureViewAndSampler(set, binding, texView, sampler))
  545. {
  546. U unit = binding + MAX_TEXTURE_BINDINGS * set;
  547. self.pushBackNewCommand<Cmd>(unit, texView, sampler);
  548. }
  549. }
  550. void CommandBuffer::bindUniformBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  551. {
  552. class Cmd final : public GlCommand
  553. {
  554. public:
  555. BufferPtr m_buff;
  556. PtrSize m_binding;
  557. PtrSize m_offset;
  558. PtrSize m_range;
  559. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  560. : m_buff(buff)
  561. , m_binding(binding)
  562. , m_offset(offset)
  563. , m_range(range)
  564. {
  565. }
  566. Error operator()(GlState&)
  567. {
  568. static_cast<const BufferImpl&>(*m_buff).bind(GL_UNIFORM_BUFFER, m_binding, m_offset, m_range);
  569. return Error::NONE;
  570. }
  571. };
  572. ANKI_ASSERT(buff);
  573. ANKI_ASSERT(range > 0);
  574. ANKI_GL_SELF(CommandBufferImpl);
  575. if(self.m_state.bindUniformBuffer(set, binding, buff, offset, range))
  576. {
  577. binding = binding + MAX_UNIFORM_BUFFER_BINDINGS * set;
  578. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  579. }
  580. }
  581. void CommandBuffer::bindStorageBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  582. {
  583. class Cmd final : public GlCommand
  584. {
  585. public:
  586. BufferPtr m_buff;
  587. PtrSize m_binding;
  588. PtrSize m_offset;
  589. PtrSize m_range;
  590. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  591. : m_buff(buff)
  592. , m_binding(binding)
  593. , m_offset(offset)
  594. , m_range(range)
  595. {
  596. }
  597. Error operator()(GlState&)
  598. {
  599. static_cast<const BufferImpl&>(*m_buff).bind(GL_SHADER_STORAGE_BUFFER, m_binding, m_offset, m_range);
  600. return Error::NONE;
  601. }
  602. };
  603. ANKI_ASSERT(buff);
  604. ANKI_ASSERT(range > 0);
  605. ANKI_GL_SELF(CommandBufferImpl);
  606. if(self.m_state.bindStorageBuffer(set, binding, buff, offset, range))
  607. {
  608. binding = binding + MAX_STORAGE_BUFFER_BINDINGS * set;
  609. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  610. }
  611. }
  612. void CommandBuffer::bindImage(U32 set, U32 binding, TextureViewPtr img)
  613. {
  614. class Cmd final : public GlCommand
  615. {
  616. public:
  617. TextureViewPtr m_img;
  618. U16 m_unit;
  619. Cmd(U32 unit, TextureViewPtr img)
  620. : m_img(img)
  621. , m_unit(unit)
  622. {
  623. }
  624. Error operator()(GlState&)
  625. {
  626. const TextureViewImpl& view = static_cast<const TextureViewImpl&>(*m_img);
  627. glBindImageTexture(m_unit, view.m_view.m_glName, 0, GL_TRUE, 0, GL_READ_WRITE,
  628. static_cast<const TextureImpl&>(*view.m_tex).m_internalFormat);
  629. return Error::NONE;
  630. }
  631. };
  632. ANKI_ASSERT(img);
  633. ANKI_GL_SELF(CommandBufferImpl);
  634. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*img).m_tex->isSubresourceGoodForImageLoadStore(
  635. static_cast<const TextureViewImpl&>(*img).getSubresource()));
  636. if(self.m_state.bindImage(set, binding, img))
  637. {
  638. binding = binding + set * MAX_IMAGE_BINDINGS;
  639. self.pushBackNewCommand<Cmd>(binding, img);
  640. }
  641. }
  642. void CommandBuffer::bindTextureBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, Format fmt)
  643. {
  644. class Cmd final : public GlCommand
  645. {
  646. public:
  647. U32 m_set;
  648. U32 m_binding;
  649. BufferPtr m_buff;
  650. PtrSize m_offset;
  651. PtrSize m_range;
  652. GLenum m_fmt;
  653. Cmd(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, GLenum fmt)
  654. : m_set(set)
  655. , m_binding(binding)
  656. , m_buff(buff)
  657. , m_offset(offset)
  658. , m_range(range)
  659. , m_fmt(fmt)
  660. {
  661. }
  662. Error operator()(GlState& state)
  663. {
  664. ANKI_ASSERT(m_offset + m_range <= m_buff->getSize());
  665. const GLuint tex = state.m_texBuffTextures[m_set][m_binding];
  666. glTextureBufferRange(tex, m_fmt, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_range);
  667. return Error::NONE;
  668. }
  669. };
  670. Bool compressed;
  671. GLenum format;
  672. GLenum internalFormat;
  673. GLenum type;
  674. DepthStencilAspectBit dsAspect;
  675. convertTextureInformation(fmt, compressed, format, internalFormat, type, dsAspect);
  676. (void)compressed;
  677. (void)format;
  678. (void)type;
  679. (void)dsAspect;
  680. ANKI_GL_SELF(CommandBufferImpl);
  681. self.pushBackNewCommand<Cmd>(set, binding, buff, offset, range, internalFormat);
  682. }
  683. void CommandBuffer::bindShaderProgram(ShaderProgramPtr prog)
  684. {
  685. class Cmd final : public GlCommand
  686. {
  687. public:
  688. ShaderProgramPtr m_prog;
  689. Cmd(const ShaderProgramPtr& prog)
  690. : m_prog(prog)
  691. {
  692. }
  693. Error operator()(GlState& state)
  694. {
  695. state.m_crntProg = m_prog;
  696. glUseProgram(static_cast<const ShaderProgramImpl&>(*m_prog).getGlName());
  697. return Error::NONE;
  698. }
  699. };
  700. ANKI_ASSERT(prog);
  701. ANKI_GL_SELF(CommandBufferImpl);
  702. if(self.m_state.bindShaderProgram(prog))
  703. {
  704. self.pushBackNewCommand<Cmd>(prog);
  705. }
  706. else
  707. {
  708. ANKI_TRACE_INC_COUNTER(GL_PROGS_SKIPPED, 1);
  709. }
  710. }
  711. void CommandBuffer::beginRenderPass(FramebufferPtr fb,
  712. const Array<TextureUsageBit, MAX_COLOR_ATTACHMENTS>& colorAttachmentUsages,
  713. TextureUsageBit depthStencilAttachmentUsage, U32 minx, U32 miny, U32 width,
  714. U32 height)
  715. {
  716. class BindFramebufferCommand final : public GlCommand
  717. {
  718. public:
  719. FramebufferPtr m_fb;
  720. Array<U32, 4> m_renderArea;
  721. BindFramebufferCommand(FramebufferPtr fb, U32 minx, U32 miny, U32 width, U32 height)
  722. : m_fb(fb)
  723. , m_renderArea{{minx, miny, width, height}}
  724. {
  725. }
  726. Error operator()(GlState& state)
  727. {
  728. static_cast<const FramebufferImpl&>(*m_fb).bind(state, m_renderArea[0], m_renderArea[1], m_renderArea[2],
  729. m_renderArea[3]);
  730. return Error::NONE;
  731. }
  732. };
  733. ANKI_GL_SELF(CommandBufferImpl);
  734. if(self.m_state.beginRenderPass(fb))
  735. {
  736. self.pushBackNewCommand<BindFramebufferCommand>(fb, minx, miny, width, height);
  737. }
  738. }
  739. void CommandBuffer::endRenderPass()
  740. {
  741. class Command final : public GlCommand
  742. {
  743. public:
  744. const FramebufferImpl* m_fb;
  745. Command(const FramebufferImpl* fb)
  746. : m_fb(fb)
  747. {
  748. ANKI_ASSERT(fb);
  749. }
  750. Error operator()(GlState&)
  751. {
  752. m_fb->endRenderPass();
  753. return Error::NONE;
  754. }
  755. };
  756. ANKI_GL_SELF(CommandBufferImpl);
  757. self.pushBackNewCommand<Command>(self.m_state.m_fb);
  758. self.m_state.endRenderPass();
  759. }
  760. void CommandBuffer::drawElements(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 firstIndex,
  761. U32 baseVertex, U32 baseInstance)
  762. {
  763. class Cmd final : public GlCommand
  764. {
  765. public:
  766. GLenum m_topology;
  767. GLenum m_indexType;
  768. DrawElementsIndirectInfo m_info;
  769. Cmd(GLenum topology, GLenum indexType, const DrawElementsIndirectInfo& info)
  770. : m_topology(topology)
  771. , m_indexType(indexType)
  772. , m_info(info)
  773. {
  774. }
  775. Error operator()(GlState&)
  776. {
  777. glDrawElementsInstancedBaseVertexBaseInstance(
  778. m_topology, m_info.m_count, m_indexType, numberToPtr<void*>(m_info.m_firstIndex),
  779. m_info.m_instanceCount, m_info.m_baseVertex, m_info.m_baseInstance);
  780. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  781. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  782. return Error::NONE;
  783. }
  784. };
  785. ANKI_GL_SELF(CommandBufferImpl);
  786. self.m_state.checkIndexedDracall();
  787. self.flushDrawcall(*this);
  788. U idxBytes;
  789. if(self.m_state.m_idx.m_indexType == GL_UNSIGNED_SHORT)
  790. {
  791. idxBytes = sizeof(U16);
  792. }
  793. else
  794. {
  795. ANKI_ASSERT(self.m_state.m_idx.m_indexType == GL_UNSIGNED_INT);
  796. idxBytes = sizeof(U32);
  797. }
  798. firstIndex = firstIndex * idxBytes + self.m_state.m_idx.m_offset;
  799. DrawElementsIndirectInfo info(count, instanceCount, firstIndex, baseVertex, baseInstance);
  800. self.pushBackNewCommand<Cmd>(convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, info);
  801. }
  802. void CommandBuffer::drawArrays(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 first, U32 baseInstance)
  803. {
  804. class DrawArraysCommand final : public GlCommand
  805. {
  806. public:
  807. GLenum m_topology;
  808. DrawArraysIndirectInfo m_info;
  809. DrawArraysCommand(GLenum topology, const DrawArraysIndirectInfo& info)
  810. : m_topology(topology)
  811. , m_info(info)
  812. {
  813. }
  814. Error operator()(GlState& state)
  815. {
  816. glDrawArraysInstancedBaseInstance(m_topology, m_info.m_first, m_info.m_count, m_info.m_instanceCount,
  817. m_info.m_baseInstance);
  818. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  819. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  820. return Error::NONE;
  821. }
  822. };
  823. ANKI_GL_SELF(CommandBufferImpl);
  824. self.m_state.checkNonIndexedDrawcall();
  825. self.flushDrawcall(*this);
  826. DrawArraysIndirectInfo info(count, instanceCount, first, baseInstance);
  827. self.pushBackNewCommand<DrawArraysCommand>(convertPrimitiveTopology(topology), info);
  828. }
  829. void CommandBuffer::drawElementsIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset,
  830. BufferPtr indirectBuff)
  831. {
  832. class DrawElementsIndirectCommand final : public GlCommand
  833. {
  834. public:
  835. GLenum m_topology;
  836. GLenum m_indexType;
  837. U32 m_drawCount;
  838. PtrSize m_offset;
  839. BufferPtr m_buff;
  840. DrawElementsIndirectCommand(GLenum topology, GLenum indexType, U32 drawCount, PtrSize offset, BufferPtr buff)
  841. : m_topology(topology)
  842. , m_indexType(indexType)
  843. , m_drawCount(drawCount)
  844. , m_offset(offset)
  845. , m_buff(buff)
  846. {
  847. ANKI_ASSERT(drawCount > 0);
  848. ANKI_ASSERT((m_offset % 4) == 0);
  849. }
  850. Error operator()(GlState&)
  851. {
  852. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  853. ANKI_ASSERT(m_offset + sizeof(DrawElementsIndirectInfo) * m_drawCount <= buff.getSize());
  854. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  855. glMultiDrawElementsIndirect(m_topology, m_indexType, numberToPtr<void*>(m_offset), m_drawCount,
  856. sizeof(DrawElementsIndirectInfo));
  857. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  858. return Error::NONE;
  859. }
  860. };
  861. ANKI_GL_SELF(CommandBufferImpl);
  862. self.m_state.checkIndexedDracall();
  863. self.flushDrawcall(*this);
  864. self.pushBackNewCommand<DrawElementsIndirectCommand>(
  865. convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, drawCount, offset, indirectBuff);
  866. }
  867. void CommandBuffer::drawArraysIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset,
  868. BufferPtr indirectBuff)
  869. {
  870. class DrawArraysIndirectCommand final : public GlCommand
  871. {
  872. public:
  873. GLenum m_topology;
  874. U32 m_drawCount;
  875. PtrSize m_offset;
  876. BufferPtr m_buff;
  877. DrawArraysIndirectCommand(GLenum topology, U32 drawCount, PtrSize offset, BufferPtr buff)
  878. : m_topology(topology)
  879. , m_drawCount(drawCount)
  880. , m_offset(offset)
  881. , m_buff(buff)
  882. {
  883. ANKI_ASSERT(drawCount > 0);
  884. ANKI_ASSERT((m_offset % 4) == 0);
  885. }
  886. Error operator()(GlState& state)
  887. {
  888. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  889. ANKI_ASSERT(m_offset + sizeof(DrawArraysIndirectInfo) * m_drawCount <= buff.getSize());
  890. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  891. glMultiDrawArraysIndirect(m_topology, numberToPtr<void*>(m_offset), m_drawCount,
  892. sizeof(DrawArraysIndirectInfo));
  893. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  894. return Error::NONE;
  895. }
  896. };
  897. ANKI_GL_SELF(CommandBufferImpl);
  898. self.m_state.checkNonIndexedDrawcall();
  899. self.flushDrawcall(*this);
  900. self.pushBackNewCommand<DrawArraysIndirectCommand>(convertPrimitiveTopology(topology), drawCount, offset,
  901. indirectBuff);
  902. }
  903. void CommandBuffer::dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
  904. {
  905. class DispatchCommand final : public GlCommand
  906. {
  907. public:
  908. Array<U32, 3> m_size;
  909. DispatchCommand(U32 x, U32 y, U32 z)
  910. : m_size({{x, y, z}})
  911. {
  912. }
  913. Error operator()(GlState&)
  914. {
  915. glDispatchCompute(m_size[0], m_size[1], m_size[2]);
  916. return Error::NONE;
  917. }
  918. };
  919. ANKI_GL_SELF(CommandBufferImpl);
  920. ANKI_ASSERT(!!(self.m_flags & CommandBufferFlag::COMPUTE_WORK));
  921. self.m_state.checkDispatch();
  922. self.pushBackNewCommand<DispatchCommand>(groupCountX, groupCountY, groupCountZ);
  923. }
  924. void CommandBuffer::resetOcclusionQuery(OcclusionQueryPtr query)
  925. {
  926. // Nothing for GL
  927. }
  928. void CommandBuffer::beginOcclusionQuery(OcclusionQueryPtr query)
  929. {
  930. class OqBeginCommand final : public GlCommand
  931. {
  932. public:
  933. OcclusionQueryPtr m_handle;
  934. OqBeginCommand(const OcclusionQueryPtr& handle)
  935. : m_handle(handle)
  936. {
  937. }
  938. Error operator()(GlState&)
  939. {
  940. static_cast<OcclusionQueryImpl&>(*m_handle).begin();
  941. return Error::NONE;
  942. }
  943. };
  944. ANKI_GL_SELF(CommandBufferImpl);
  945. self.pushBackNewCommand<OqBeginCommand>(query);
  946. }
  947. void CommandBuffer::endOcclusionQuery(OcclusionQueryPtr query)
  948. {
  949. class OqEndCommand final : public GlCommand
  950. {
  951. public:
  952. OcclusionQueryPtr m_handle;
  953. OqEndCommand(const OcclusionQueryPtr& handle)
  954. : m_handle(handle)
  955. {
  956. }
  957. Error operator()(GlState&)
  958. {
  959. static_cast<OcclusionQueryImpl&>(*m_handle).end();
  960. return Error::NONE;
  961. }
  962. };
  963. ANKI_GL_SELF(CommandBufferImpl);
  964. self.pushBackNewCommand<OqEndCommand>(query);
  965. }
  966. void CommandBuffer::copyBufferToTextureView(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  967. {
  968. class TexSurfUploadCommand final : public GlCommand
  969. {
  970. public:
  971. BufferPtr m_buff;
  972. PtrSize m_offset;
  973. PtrSize m_range;
  974. TextureViewPtr m_texView;
  975. TexSurfUploadCommand(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  976. : m_buff(buff)
  977. , m_offset(offset)
  978. , m_range(range)
  979. , m_texView(texView)
  980. {
  981. }
  982. Error operator()(GlState&)
  983. {
  984. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  985. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  986. texImpl.copyFromBuffer(viewImpl.getSubresource(), static_cast<const BufferImpl&>(*m_buff).getGlName(),
  987. m_offset, m_range);
  988. return Error::NONE;
  989. }
  990. };
  991. ANKI_ASSERT(texView);
  992. ANKI_ASSERT(buff);
  993. ANKI_ASSERT(range > 0);
  994. ANKI_GL_SELF(CommandBufferImpl);
  995. ANKI_ASSERT(!self.m_state.insideRenderPass());
  996. self.pushBackNewCommand<TexSurfUploadCommand>(buff, offset, range, texView);
  997. }
  998. void CommandBuffer::copyBufferToBuffer(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset,
  999. PtrSize range)
  1000. {
  1001. class Cmd final : public GlCommand
  1002. {
  1003. public:
  1004. BufferPtr m_src;
  1005. PtrSize m_srcOffset;
  1006. BufferPtr m_dst;
  1007. PtrSize m_dstOffset;
  1008. PtrSize m_range;
  1009. Cmd(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset, PtrSize range)
  1010. : m_src(src)
  1011. , m_srcOffset(srcOffset)
  1012. , m_dst(dst)
  1013. , m_dstOffset(dstOffset)
  1014. , m_range(range)
  1015. {
  1016. }
  1017. Error operator()(GlState& state)
  1018. {
  1019. static_cast<BufferImpl&>(*m_dst).write(static_cast<const BufferImpl&>(*m_src).getGlName(), m_srcOffset,
  1020. m_dstOffset, m_range);
  1021. return Error::NONE;
  1022. }
  1023. };
  1024. ANKI_ASSERT(src);
  1025. ANKI_ASSERT(dst);
  1026. ANKI_ASSERT(range > 0);
  1027. ANKI_GL_SELF(CommandBufferImpl);
  1028. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1029. self.pushBackNewCommand<Cmd>(src, srcOffset, dst, dstOffset, range);
  1030. }
  1031. void CommandBuffer::generateMipmaps2d(TextureViewPtr texView)
  1032. {
  1033. class GenMipsCommand final : public GlCommand
  1034. {
  1035. public:
  1036. TextureViewPtr m_texView;
  1037. GenMipsCommand(const TextureViewPtr& view)
  1038. : m_texView(view)
  1039. {
  1040. }
  1041. Error operator()(GlState&)
  1042. {
  1043. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1044. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1045. texImpl.generateMipmaps2d(viewImpl);
  1046. return Error::NONE;
  1047. }
  1048. };
  1049. ANKI_GL_SELF(CommandBufferImpl);
  1050. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1051. self.pushBackNewCommand<GenMipsCommand>(texView);
  1052. }
  1053. void CommandBuffer::generateMipmaps3d(TextureViewPtr tex)
  1054. {
  1055. ANKI_ASSERT(!!"TODO");
  1056. }
  1057. void CommandBuffer::pushSecondLevelCommandBuffer(CommandBufferPtr cmdb)
  1058. {
  1059. class ExecCmdbCommand final : public GlCommand
  1060. {
  1061. public:
  1062. CommandBufferPtr m_cmdb;
  1063. ExecCmdbCommand(const CommandBufferPtr& cmdb)
  1064. : m_cmdb(cmdb)
  1065. {
  1066. }
  1067. Error operator()(GlState&)
  1068. {
  1069. ANKI_TRACE_SCOPED_EVENT(GL_2ND_LEVEL_CMD_BUFFER);
  1070. return static_cast<CommandBufferImpl&>(*m_cmdb).executeAllCommands();
  1071. }
  1072. };
  1073. ANKI_GL_SELF(CommandBufferImpl);
  1074. self.m_state.m_lastSecondLevelCmdb = static_cast<CommandBufferImpl*>(cmdb.get());
  1075. self.pushBackNewCommand<ExecCmdbCommand>(cmdb);
  1076. }
  1077. Bool CommandBuffer::isEmpty() const
  1078. {
  1079. ANKI_GL_SELF_CONST(CommandBufferImpl);
  1080. return self.isEmpty();
  1081. }
  1082. void CommandBuffer::blitTextureViews(TextureViewPtr srcView, TextureViewPtr destView)
  1083. {
  1084. ANKI_ASSERT(!"TODO");
  1085. }
  1086. void CommandBuffer::setBufferBarrier(BufferPtr buff, BufferUsageBit prevUsage, BufferUsageBit nextUsage, PtrSize offset,
  1087. PtrSize size)
  1088. {
  1089. class SetBufferMemBarrierCommand final : public GlCommand
  1090. {
  1091. public:
  1092. GLenum m_barrier;
  1093. SetBufferMemBarrierCommand(GLenum barrier)
  1094. : m_barrier(barrier)
  1095. {
  1096. }
  1097. Error operator()(GlState&)
  1098. {
  1099. glMemoryBarrier(m_barrier);
  1100. return Error::NONE;
  1101. }
  1102. };
  1103. GLenum d = GL_NONE;
  1104. BufferUsageBit all = prevUsage | nextUsage;
  1105. if(!!(all & BufferUsageBit::UNIFORM_ALL))
  1106. {
  1107. d |= GL_UNIFORM_BARRIER_BIT;
  1108. }
  1109. if(!!(all & BufferUsageBit::STORAGE_ALL))
  1110. {
  1111. d |= GL_SHADER_STORAGE_BARRIER_BIT;
  1112. }
  1113. if(!!(all & BufferUsageBit::INDEX))
  1114. {
  1115. d |= GL_ELEMENT_ARRAY_BARRIER_BIT;
  1116. }
  1117. if(!!(all & BufferUsageBit::VERTEX))
  1118. {
  1119. d |= GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT;
  1120. }
  1121. if(!!(all & BufferUsageBit::INDIRECT_ALL))
  1122. {
  1123. d |= GL_COMMAND_BARRIER_BIT;
  1124. }
  1125. if(!!(all
  1126. & (BufferUsageBit::FILL | BufferUsageBit::BUFFER_UPLOAD_SOURCE | BufferUsageBit::BUFFER_UPLOAD_DESTINATION)))
  1127. {
  1128. d |= GL_BUFFER_UPDATE_BARRIER_BIT;
  1129. }
  1130. if(!!(all & BufferUsageBit::QUERY_RESULT))
  1131. {
  1132. d |= GL_QUERY_BUFFER_BARRIER_BIT;
  1133. }
  1134. ANKI_ASSERT(d);
  1135. ANKI_GL_SELF(CommandBufferImpl);
  1136. self.pushBackNewCommand<SetBufferMemBarrierCommand>(d);
  1137. }
  1138. void CommandBuffer::setTextureSurfaceBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1139. const TextureSurfaceInfo& surf)
  1140. {
  1141. TextureSubresourceInfo subresource;
  1142. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1143. }
  1144. void CommandBuffer::setTextureVolumeBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1145. const TextureVolumeInfo& vol)
  1146. {
  1147. TextureSubresourceInfo subresource;
  1148. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1149. }
  1150. void CommandBuffer::setTextureBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1151. const TextureSubresourceInfo& subresource)
  1152. {
  1153. class Cmd final : public GlCommand
  1154. {
  1155. public:
  1156. GLenum m_barrier;
  1157. Cmd(GLenum barrier)
  1158. : m_barrier(barrier)
  1159. {
  1160. }
  1161. Error operator()(GlState&)
  1162. {
  1163. glMemoryBarrier(m_barrier);
  1164. return Error::NONE;
  1165. }
  1166. };
  1167. const TextureUsageBit usage = nextUsage;
  1168. GLenum e = 0;
  1169. if(!!(usage & TextureUsageBit::SAMPLED_ALL))
  1170. {
  1171. e |= GL_TEXTURE_FETCH_BARRIER_BIT;
  1172. }
  1173. if(!!(usage & TextureUsageBit::IMAGE_ALL))
  1174. {
  1175. e |= GL_SHADER_IMAGE_ACCESS_BARRIER_BIT;
  1176. }
  1177. if(!!(usage & TextureUsageBit::TRANSFER_DESTINATION))
  1178. {
  1179. e |= GL_TEXTURE_UPDATE_BARRIER_BIT;
  1180. }
  1181. if(!!(usage & TextureUsageBit::FRAMEBUFFER_ATTACHMENT_READ_WRITE))
  1182. {
  1183. e |= GL_FRAMEBUFFER_BARRIER_BIT;
  1184. }
  1185. if(!!(usage & TextureUsageBit::CLEAR))
  1186. {
  1187. // No idea
  1188. }
  1189. if(!!(usage & TextureUsageBit::GENERATE_MIPMAPS))
  1190. {
  1191. // No idea
  1192. }
  1193. if(e != 0)
  1194. {
  1195. ANKI_GL_SELF(CommandBufferImpl);
  1196. self.pushBackNewCommand<Cmd>(e);
  1197. }
  1198. }
  1199. void CommandBuffer::clearTextureView(TextureViewPtr texView, const ClearValue& clearValue)
  1200. {
  1201. class ClearTextCommand final : public GlCommand
  1202. {
  1203. public:
  1204. TextureViewPtr m_texView;
  1205. ClearValue m_val;
  1206. ClearTextCommand(TextureViewPtr texView, const ClearValue& val)
  1207. : m_texView(texView)
  1208. , m_val(val)
  1209. {
  1210. }
  1211. Error operator()(GlState&)
  1212. {
  1213. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1214. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1215. texImpl.clear(viewImpl.getSubresource(), m_val);
  1216. return Error::NONE;
  1217. }
  1218. };
  1219. ANKI_GL_SELF(CommandBufferImpl);
  1220. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1221. self.pushBackNewCommand<ClearTextCommand>(texView, clearValue);
  1222. }
  1223. void CommandBuffer::fillBuffer(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1224. {
  1225. class FillBufferCommand final : public GlCommand
  1226. {
  1227. public:
  1228. BufferPtr m_buff;
  1229. PtrSize m_offset;
  1230. PtrSize m_size;
  1231. U32 m_value;
  1232. FillBufferCommand(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1233. : m_buff(buff)
  1234. , m_offset(offset)
  1235. , m_size(size)
  1236. , m_value(value)
  1237. {
  1238. }
  1239. Error operator()(GlState&)
  1240. {
  1241. static_cast<BufferImpl&>(*m_buff).fill(m_offset, m_size, m_value);
  1242. return Error::NONE;
  1243. }
  1244. };
  1245. ANKI_GL_SELF(CommandBufferImpl);
  1246. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1247. self.pushBackNewCommand<FillBufferCommand>(buff, offset, size, value);
  1248. }
  1249. void CommandBuffer::writeOcclusionQueryResultToBuffer(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1250. {
  1251. class WriteOcclResultToBuff final : public GlCommand
  1252. {
  1253. public:
  1254. OcclusionQueryPtr m_query;
  1255. PtrSize m_offset;
  1256. BufferPtr m_buff;
  1257. WriteOcclResultToBuff(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1258. : m_query(query)
  1259. , m_offset(offset)
  1260. , m_buff(buff)
  1261. {
  1262. ANKI_ASSERT((m_offset % 4) == 0);
  1263. }
  1264. Error operator()(GlState&)
  1265. {
  1266. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  1267. ANKI_ASSERT(m_offset + 4 <= buff.getSize());
  1268. glBindBuffer(GL_QUERY_BUFFER, buff.getGlName());
  1269. glGetQueryObjectuiv(static_cast<const OcclusionQueryImpl&>(*m_query).getGlName(), GL_QUERY_RESULT,
  1270. numberToPtr<GLuint*>(m_offset));
  1271. glBindBuffer(GL_QUERY_BUFFER, 0);
  1272. return Error::NONE;
  1273. }
  1274. };
  1275. ANKI_GL_SELF(CommandBufferImpl);
  1276. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1277. self.pushBackNewCommand<WriteOcclResultToBuff>(query, offset, buff);
  1278. }
  1279. void CommandBuffer::setPushConstants(const void* data, U32 dataSize)
  1280. {
  1281. class PushConstants final : public GlCommand
  1282. {
  1283. public:
  1284. DynamicArrayAuto<Vec4> m_data;
  1285. PushConstants(const void* data, U32 dataSize, const CommandBufferAllocator<F32>& alloc)
  1286. : m_data(alloc)
  1287. {
  1288. m_data.create(dataSize / sizeof(Vec4));
  1289. memcpy(&m_data[0], data, dataSize);
  1290. }
  1291. Error operator()(GlState& state)
  1292. {
  1293. const ShaderProgramImplReflection& refl =
  1294. static_cast<ShaderProgramImpl&>(*state.m_crntProg).getReflection();
  1295. ANKI_ASSERT(refl.m_uniformDataSize == m_data.getSizeInBytes());
  1296. const Bool transpose = true;
  1297. for(const ShaderProgramImplReflection::Uniform& uni : refl.m_uniforms)
  1298. {
  1299. const U8* data = reinterpret_cast<const U8*>(&m_data[0]) + uni.m_pushConstantOffset;
  1300. const U count = uni.m_arrSize;
  1301. const GLint loc = uni.m_location;
  1302. switch(uni.m_type)
  1303. {
  1304. case ShaderVariableDataType::VEC4:
  1305. glUniform4fv(loc, count, reinterpret_cast<const GLfloat*>(data));
  1306. break;
  1307. case ShaderVariableDataType::IVEC4:
  1308. glUniform4iv(loc, count, reinterpret_cast<const GLint*>(data));
  1309. break;
  1310. case ShaderVariableDataType::UVEC4:
  1311. glUniform4uiv(loc, count, reinterpret_cast<const GLuint*>(data));
  1312. break;
  1313. case ShaderVariableDataType::MAT4:
  1314. glUniformMatrix4fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(data));
  1315. break;
  1316. case ShaderVariableDataType::MAT3:
  1317. {
  1318. // Remove the padding
  1319. ANKI_ASSERT(count == 1 && "TODO");
  1320. const Mat3x4* m34 = reinterpret_cast<const Mat3x4*>(data);
  1321. Mat3 m3(m34->getRotationPart());
  1322. glUniformMatrix3fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(&m3));
  1323. break;
  1324. }
  1325. default:
  1326. ANKI_ASSERT(!"TODO");
  1327. }
  1328. }
  1329. return Error::NONE;
  1330. }
  1331. };
  1332. ANKI_ASSERT(data);
  1333. ANKI_ASSERT(dataSize);
  1334. ANKI_ASSERT(dataSize % 16 == 0);
  1335. ANKI_GL_SELF(CommandBufferImpl);
  1336. self.pushBackNewCommand<PushConstants>(data, dataSize, self.m_alloc);
  1337. }
  1338. void CommandBuffer::setRasterizationOrder(RasterizationOrder order)
  1339. {
  1340. // Nothing for GL
  1341. }
  1342. } // end namespace anki