CommandBuffer.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593
  1. // Copyright (C) 2009-2023, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/CommandBuffer.h>
  6. #include <AnKi/Gr/gl/CommandBufferImpl.h>
  7. #include <AnKi/Gr/GrManager.h>
  8. #include <AnKi/Gr/gl/GrManagerImpl.h>
  9. #include <AnKi/Gr/gl/RenderingThread.h>
  10. #include <AnKi/Gr/gl/GlState.h>
  11. #include <AnKi/Gr/gl/FramebufferImpl.h>
  12. #include <AnKi/Gr/gl/OcclusionQueryImpl.h>
  13. #include <AnKi/Gr/gl/TextureImpl.h>
  14. #include <AnKi/Gr/gl/BufferImpl.h>
  15. #include <AnKi/Gr/gl/SamplerImpl.h>
  16. #include <AnKi/Gr/gl/ShaderProgramImpl.h>
  17. #include <AnKi/Gr/gl/TextureViewImpl.h>
  18. #include <AnKi/Core/Trace.h>
  19. namespace anki {
  20. CommandBuffer* CommandBuffer::newInstance(GrManager* manager, const CommandBufferInitInfo& inf)
  21. {
  22. CommandBufferImpl* impl = manager->getAllocator().newInstance<CommandBufferImpl>(manager, inf.getName());
  23. impl->init(inf);
  24. return impl;
  25. }
  26. void CommandBuffer::flush(FencePtr* fence)
  27. {
  28. ANKI_GL_SELF(CommandBufferImpl);
  29. if(!self.isSecondLevel())
  30. {
  31. ANKI_ASSERT(!self.m_state.insideRenderPass());
  32. }
  33. else
  34. {
  35. ANKI_ASSERT(fence == nullptr);
  36. }
  37. if(!self.isSecondLevel())
  38. {
  39. static_cast<GrManagerImpl&>(getManager())
  40. .getRenderingThread()
  41. .flushCommandBuffer(CommandBufferPtr(this), fence);
  42. }
  43. }
  44. void CommandBuffer::bindVertexBuffer(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride,
  45. VertexStepRate stepRate)
  46. {
  47. class Cmd final : public GlCommand
  48. {
  49. public:
  50. BufferPtr m_buff;
  51. U32 m_binding;
  52. PtrSize m_offset;
  53. PtrSize m_stride;
  54. Bool m_instanced;
  55. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride, Bool instanced)
  56. : m_buff(buff)
  57. , m_binding(binding)
  58. , m_offset(offset)
  59. , m_stride(stride)
  60. , m_instanced(instanced)
  61. {
  62. }
  63. Error operator()(GlState& state)
  64. {
  65. glBindVertexBuffer(m_binding, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_stride);
  66. glVertexBindingDivisor(m_binding, (m_instanced) ? 1 : 0);
  67. return Error::kNone;
  68. }
  69. };
  70. ANKI_ASSERT(buff);
  71. ANKI_ASSERT(stride > 0);
  72. ANKI_GL_SELF(CommandBufferImpl);
  73. if(self.m_state.bindVertexBuffer(binding, buff, offset, stride, stepRate))
  74. {
  75. self.pushBackNewCommand<Cmd>(binding, buff, offset, stride, stepRate == VertexStepRate::INSTANCE);
  76. }
  77. }
  78. void CommandBuffer::setVertexAttribute(U32 location, U32 buffBinding, Format fmt, PtrSize relativeOffset)
  79. {
  80. class Cmd final : public GlCommand
  81. {
  82. public:
  83. U32 m_location;
  84. U32 m_buffBinding;
  85. U8 m_compSize;
  86. GLenum m_fmt;
  87. Bool m_normalized;
  88. PtrSize m_relativeOffset;
  89. Cmd(U32 location, U32 buffBinding, U8 compSize, GLenum fmt, Bool normalized, PtrSize relativeOffset)
  90. : m_location(location)
  91. , m_buffBinding(buffBinding)
  92. , m_compSize(compSize)
  93. , m_fmt(fmt)
  94. , m_normalized(normalized)
  95. , m_relativeOffset(relativeOffset)
  96. {
  97. }
  98. Error operator()(GlState& state)
  99. {
  100. glVertexAttribFormat(m_location, m_compSize, m_fmt, m_normalized, m_relativeOffset);
  101. glVertexAttribBinding(m_location, m_buffBinding);
  102. return Error::kNone;
  103. }
  104. };
  105. ANKI_GL_SELF(CommandBufferImpl);
  106. if(self.m_state.setVertexAttribute(location, buffBinding, fmt, relativeOffset))
  107. {
  108. U compCount;
  109. GLenum type;
  110. Bool normalized;
  111. convertVertexFormat(fmt, compCount, type, normalized);
  112. self.pushBackNewCommand<Cmd>(location, buffBinding, compCount, type, normalized, relativeOffset);
  113. }
  114. }
  115. void CommandBuffer::bindIndexBuffer(BufferPtr buff, PtrSize offset, IndexType type)
  116. {
  117. class Cmd final : public GlCommand
  118. {
  119. public:
  120. BufferPtr m_buff;
  121. Cmd(BufferPtr buff)
  122. : m_buff(buff)
  123. {
  124. }
  125. Error operator()(GlState& state)
  126. {
  127. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, static_cast<const BufferImpl&>(*m_buff).getGlName());
  128. return Error::kNone;
  129. }
  130. };
  131. ANKI_ASSERT(buff);
  132. ANKI_GL_SELF(CommandBufferImpl);
  133. if(self.m_state.bindIndexBuffer(buff, offset, type))
  134. {
  135. self.pushBackNewCommand<Cmd>(buff);
  136. }
  137. }
  138. void CommandBuffer::setPrimitiveRestart(Bool enable)
  139. {
  140. class Cmd final : public GlCommand
  141. {
  142. public:
  143. Bool m_enable;
  144. Cmd(Bool enable)
  145. : m_enable(enable)
  146. {
  147. }
  148. Error operator()(GlState& state)
  149. {
  150. if(m_enable)
  151. {
  152. glEnable(GL_PRIMITIVE_RESTART);
  153. }
  154. else
  155. {
  156. glDisable(GL_PRIMITIVE_RESTART);
  157. }
  158. return Error::kNone;
  159. }
  160. };
  161. ANKI_GL_SELF(CommandBufferImpl);
  162. if(self.m_state.setPrimitiveRestart(enable))
  163. {
  164. self.pushBackNewCommand<Cmd>(enable);
  165. }
  166. }
  167. void CommandBuffer::setViewport(U32 minx, U32 miny, U32 width, U32 height)
  168. {
  169. class ViewportCommand final : public GlCommand
  170. {
  171. public:
  172. Array<U32, 4> m_value;
  173. ViewportCommand(U32 a, U32 b, U32 c, U32 d)
  174. {
  175. m_value = {{a, b, c, d}};
  176. }
  177. Error operator()(GlState& state)
  178. {
  179. glViewport(m_value[0], m_value[1], m_value[2], m_value[3]);
  180. return Error::kNone;
  181. }
  182. };
  183. ANKI_GL_SELF(CommandBufferImpl);
  184. if(self.m_state.setViewport(minx, miny, width, height))
  185. {
  186. self.pushBackNewCommand<ViewportCommand>(minx, miny, width, height);
  187. }
  188. }
  189. void CommandBuffer::setScissor(U32 minx, U32 miny, U32 width, U32 height)
  190. {
  191. ANKI_ASSERT(minx < kMaxU32 && miny < kMaxU32);
  192. ANKI_ASSERT(width > 0 && height > 0);
  193. class ScissorCommand final : public GlCommand
  194. {
  195. public:
  196. Array<GLsizei, 4> m_value;
  197. ScissorCommand(GLsizei a, GLsizei b, GLsizei c, GLsizei d)
  198. {
  199. m_value = {{a, b, c, d}};
  200. }
  201. Error operator()(GlState& state)
  202. {
  203. if(state.m_scissor[0] != m_value[0] || state.m_scissor[1] != m_value[1] || state.m_scissor[2] != m_value[2]
  204. || state.m_scissor[3] != m_value[3])
  205. {
  206. state.m_scissor = m_value;
  207. glScissor(m_value[0], m_value[1], m_value[2], m_value[3]);
  208. }
  209. return Error::kNone;
  210. }
  211. };
  212. // Limit the width and height to GLsizei
  213. const GLsizei iwidth = (width == kMaxU32) ? kMaxI32 : width;
  214. const GLsizei iheight = (height == kMaxU32) ? kMaxI32 : height;
  215. const GLsizei iminx = minx;
  216. const GLsizei iminy = miny;
  217. ANKI_GL_SELF(CommandBufferImpl);
  218. if(self.m_state.setScissor(iminx, iminy, iwidth, iheight))
  219. {
  220. self.pushBackNewCommand<ScissorCommand>(iminx, iminy, iwidth, iheight);
  221. }
  222. }
  223. void CommandBuffer::setFillMode(FillMode mode)
  224. {
  225. class Cmd final : public GlCommand
  226. {
  227. public:
  228. GLenum m_fillMode;
  229. Cmd(GLenum fillMode)
  230. : m_fillMode(fillMode)
  231. {
  232. }
  233. Error operator()(GlState& state)
  234. {
  235. glPolygonMode(GL_FRONT_AND_BACK, m_fillMode);
  236. return Error::kNone;
  237. }
  238. };
  239. ANKI_GL_SELF(CommandBufferImpl);
  240. if(self.m_state.setFillMode(mode))
  241. {
  242. self.pushBackNewCommand<Cmd>(convertFillMode(mode));
  243. }
  244. }
  245. void CommandBuffer::setCullMode(FaceSelectionBit mode)
  246. {
  247. class Cmd final : public GlCommand
  248. {
  249. public:
  250. GLenum m_mode;
  251. Cmd(GLenum mode)
  252. : m_mode(mode)
  253. {
  254. }
  255. Error operator()(GlState& state)
  256. {
  257. glCullFace(m_mode);
  258. return Error::kNone;
  259. }
  260. };
  261. ANKI_GL_SELF(CommandBufferImpl);
  262. if(self.m_state.setCullMode(mode))
  263. {
  264. self.pushBackNewCommand<Cmd>(convertFaceMode(mode));
  265. }
  266. }
  267. void CommandBuffer::setPolygonOffset(F32 factor, F32 units)
  268. {
  269. class Cmd final : public GlCommand
  270. {
  271. public:
  272. F32 m_factor;
  273. F32 m_units;
  274. Cmd(F32 factor, F32 units)
  275. : m_factor(factor)
  276. , m_units(units)
  277. {
  278. }
  279. Error operator()(GlState& state)
  280. {
  281. if(m_factor == 0.0 && m_units == 0.0)
  282. {
  283. glDisable(GL_POLYGON_OFFSET_FILL);
  284. }
  285. else
  286. {
  287. glEnable(GL_POLYGON_OFFSET_FILL);
  288. glPolygonOffset(m_factor, m_units);
  289. }
  290. return Error::kNone;
  291. }
  292. };
  293. ANKI_GL_SELF(CommandBufferImpl);
  294. if(self.m_state.setPolygonOffset(factor, units))
  295. {
  296. self.pushBackNewCommand<Cmd>(factor, units);
  297. }
  298. }
  299. void CommandBuffer::setStencilOperations(FaceSelectionBit face, StencilOperation stencilFail,
  300. StencilOperation stencilPassDepthFail, StencilOperation stencilPassDepthPass)
  301. {
  302. class Cmd final : public GlCommand
  303. {
  304. public:
  305. GLenum m_face;
  306. GLenum m_stencilFail;
  307. GLenum m_stencilPassDepthFail;
  308. GLenum m_stencilPassDepthPass;
  309. Cmd(GLenum face, GLenum stencilFail, GLenum stencilPassDepthFail, GLenum stencilPassDepthPass)
  310. : m_face(face)
  311. , m_stencilFail(stencilFail)
  312. , m_stencilPassDepthFail(stencilPassDepthFail)
  313. , m_stencilPassDepthPass(stencilPassDepthPass)
  314. {
  315. }
  316. Error operator()(GlState& state)
  317. {
  318. glStencilOpSeparate(m_face, m_stencilFail, m_stencilPassDepthFail, m_stencilPassDepthPass);
  319. return Error::kNone;
  320. }
  321. };
  322. ANKI_GL_SELF(CommandBufferImpl);
  323. if(self.m_state.setStencilOperations(face, stencilFail, stencilPassDepthFail, stencilPassDepthPass))
  324. {
  325. self.pushBackNewCommand<Cmd>(convertFaceMode(face), convertStencilOperation(stencilFail),
  326. convertStencilOperation(stencilPassDepthFail),
  327. convertStencilOperation(stencilPassDepthPass));
  328. }
  329. }
  330. void CommandBuffer::setStencilCompareOperation(FaceSelectionBit face, CompareOperation comp)
  331. {
  332. ANKI_GL_SELF(CommandBufferImpl);
  333. self.m_state.setStencilCompareOperation(face, comp);
  334. }
  335. void CommandBuffer::setStencilCompareMask(FaceSelectionBit face, U32 mask)
  336. {
  337. ANKI_GL_SELF(CommandBufferImpl);
  338. self.m_state.setStencilCompareMask(face, mask);
  339. }
  340. void CommandBuffer::setStencilWriteMask(FaceSelectionBit face, U32 mask)
  341. {
  342. class Cmd final : public GlCommand
  343. {
  344. public:
  345. GLenum m_face;
  346. U32 m_mask;
  347. Cmd(GLenum face, U32 mask)
  348. : m_face(face)
  349. , m_mask(mask)
  350. {
  351. }
  352. Error operator()(GlState& state)
  353. {
  354. glStencilMaskSeparate(m_face, m_mask);
  355. if(m_face == GL_FRONT)
  356. {
  357. state.m_stencilWriteMask[0] = m_mask;
  358. }
  359. else if(m_face == GL_BACK)
  360. {
  361. state.m_stencilWriteMask[1] = m_mask;
  362. }
  363. else
  364. {
  365. ANKI_ASSERT(m_face == GL_FRONT_AND_BACK);
  366. state.m_stencilWriteMask[0] = state.m_stencilWriteMask[1] = m_mask;
  367. }
  368. return Error::kNone;
  369. }
  370. };
  371. ANKI_GL_SELF(CommandBufferImpl);
  372. if(self.m_state.setStencilWriteMask(face, mask))
  373. {
  374. self.pushBackNewCommand<Cmd>(convertFaceMode(face), mask);
  375. }
  376. }
  377. void CommandBuffer::setStencilReference(FaceSelectionBit face, U32 ref)
  378. {
  379. ANKI_GL_SELF(CommandBufferImpl);
  380. self.m_state.setStencilReference(face, ref);
  381. }
  382. void CommandBuffer::setDepthWrite(Bool enable)
  383. {
  384. class Cmd final : public GlCommand
  385. {
  386. public:
  387. Bool m_enable;
  388. Cmd(Bool enable)
  389. : m_enable(enable)
  390. {
  391. }
  392. Error operator()(GlState& state)
  393. {
  394. glDepthMask(m_enable);
  395. state.m_depthWriteMask = m_enable;
  396. return Error::kNone;
  397. }
  398. };
  399. ANKI_GL_SELF(CommandBufferImpl);
  400. if(self.m_state.setDepthWrite(enable))
  401. {
  402. self.pushBackNewCommand<Cmd>(enable);
  403. }
  404. }
  405. void CommandBuffer::setDepthCompareOperation(CompareOperation op)
  406. {
  407. class Cmd final : public GlCommand
  408. {
  409. public:
  410. GLenum m_op;
  411. Cmd(GLenum op)
  412. : m_op(op)
  413. {
  414. }
  415. Error operator()(GlState& state)
  416. {
  417. glDepthFunc(m_op);
  418. return Error::kNone;
  419. }
  420. };
  421. ANKI_GL_SELF(CommandBufferImpl);
  422. if(self.m_state.setDepthCompareOperation(op))
  423. {
  424. self.pushBackNewCommand<Cmd>(convertCompareOperation(op));
  425. }
  426. }
  427. void CommandBuffer::setAlphaToCoverage(Bool enable)
  428. {
  429. ANKI_ASSERT(!"TODO");
  430. }
  431. void CommandBuffer::setColorChannelWriteMask(U32 attachment, ColorBit mask)
  432. {
  433. class Cmd final : public GlCommand
  434. {
  435. public:
  436. U8 m_attachment;
  437. ColorBit m_mask;
  438. Cmd(U8 attachment, ColorBit mask)
  439. : m_attachment(attachment)
  440. , m_mask(mask)
  441. {
  442. }
  443. Error operator()(GlState& state)
  444. {
  445. const Bool r = !!(m_mask & ColorBit::RED);
  446. const Bool g = !!(m_mask & ColorBit::GREEN);
  447. const Bool b = !!(m_mask & ColorBit::BLUE);
  448. const Bool a = !!(m_mask & ColorBit::ALPHA);
  449. glColorMaski(m_attachment, r, g, b, a);
  450. state.m_colorWriteMasks[m_attachment] = {{r, g, b, a}};
  451. return Error::kNone;
  452. }
  453. };
  454. ANKI_GL_SELF(CommandBufferImpl);
  455. if(self.m_state.setColorChannelWriteMask(attachment, mask))
  456. {
  457. self.pushBackNewCommand<Cmd>(attachment, mask);
  458. }
  459. }
  460. void CommandBuffer::setBlendFactors(U32 attachment, BlendFactor srcRgb, BlendFactor dstRgb, BlendFactor srcA,
  461. BlendFactor dstA)
  462. {
  463. class Cmd final : public GlCommand
  464. {
  465. public:
  466. U8 m_attachment;
  467. GLenum m_srcRgb;
  468. GLenum m_dstRgb;
  469. GLenum m_srcA;
  470. GLenum m_dstA;
  471. Cmd(U8 att, GLenum srcRgb, GLenum dstRgb, GLenum srcA, GLenum dstA)
  472. : m_attachment(att)
  473. , m_srcRgb(srcRgb)
  474. , m_dstRgb(dstRgb)
  475. , m_srcA(srcA)
  476. , m_dstA(dstA)
  477. {
  478. }
  479. Error operator()(GlState&)
  480. {
  481. glBlendFuncSeparatei(m_attachment, m_srcRgb, m_dstRgb, m_srcA, m_dstA);
  482. return Error::kNone;
  483. }
  484. };
  485. ANKI_GL_SELF(CommandBufferImpl);
  486. if(self.m_state.setBlendFactors(attachment, srcRgb, dstRgb, srcA, dstA))
  487. {
  488. self.pushBackNewCommand<Cmd>(attachment, convertBlendFactor(srcRgb), convertBlendFactor(dstRgb),
  489. convertBlendFactor(srcA), convertBlendFactor(dstA));
  490. }
  491. }
  492. void CommandBuffer::setBlendOperation(U32 attachment, BlendOperation funcRgb, BlendOperation funcA)
  493. {
  494. class Cmd final : public GlCommand
  495. {
  496. public:
  497. U8 m_attachment;
  498. GLenum m_funcRgb;
  499. GLenum m_funcA;
  500. Cmd(U8 att, GLenum funcRgb, GLenum funcA)
  501. : m_attachment(att)
  502. , m_funcRgb(funcRgb)
  503. , m_funcA(funcA)
  504. {
  505. }
  506. Error operator()(GlState&)
  507. {
  508. glBlendEquationSeparatei(m_attachment, m_funcRgb, m_funcA);
  509. return Error::kNone;
  510. }
  511. };
  512. ANKI_GL_SELF(CommandBufferImpl);
  513. if(self.m_state.setBlendOperation(attachment, funcRgb, funcA))
  514. {
  515. self.pushBackNewCommand<Cmd>(attachment, convertBlendOperation(funcRgb), convertBlendOperation(funcA));
  516. }
  517. }
  518. void CommandBuffer::bindTextureAndSampler(U32 set, U32 binding, TextureViewPtr texView, SamplerPtr sampler,
  519. TextureUsageBit usage)
  520. {
  521. class Cmd final : public GlCommand
  522. {
  523. public:
  524. U32 m_unit;
  525. TextureViewPtr m_texView;
  526. SamplerPtr m_sampler;
  527. Cmd(U32 unit, TextureViewPtr texView, SamplerPtr sampler)
  528. : m_unit(unit)
  529. , m_texView(texView)
  530. , m_sampler(sampler)
  531. {
  532. }
  533. Error operator()(GlState&)
  534. {
  535. glBindTextureUnit(m_unit, static_cast<const TextureViewImpl&>(*m_texView).m_view.m_glName);
  536. glBindSampler(m_unit, static_cast<const SamplerImpl&>(*m_sampler).getGlName());
  537. return Error::kNone;
  538. }
  539. };
  540. ANKI_GL_SELF(CommandBufferImpl);
  541. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*texView).m_tex->isSubresourceGoodForSampling(
  542. static_cast<const TextureViewImpl&>(*texView).getSubresource()));
  543. if(self.m_state.bindTextureViewAndSampler(set, binding, texView, sampler))
  544. {
  545. U unit = binding + MAX_TEXTURE_BINDINGS * set;
  546. self.pushBackNewCommand<Cmd>(unit, texView, sampler);
  547. }
  548. }
  549. void CommandBuffer::bindUniformBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  550. {
  551. class Cmd final : public GlCommand
  552. {
  553. public:
  554. BufferPtr m_buff;
  555. PtrSize m_binding;
  556. PtrSize m_offset;
  557. PtrSize m_range;
  558. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  559. : m_buff(buff)
  560. , m_binding(binding)
  561. , m_offset(offset)
  562. , m_range(range)
  563. {
  564. }
  565. Error operator()(GlState&)
  566. {
  567. static_cast<const BufferImpl&>(*m_buff).bind(GL_UNIFORM_BUFFER, m_binding, m_offset, m_range);
  568. return Error::kNone;
  569. }
  570. };
  571. ANKI_ASSERT(buff);
  572. ANKI_ASSERT(range > 0);
  573. ANKI_GL_SELF(CommandBufferImpl);
  574. if(self.m_state.bindUniformBuffer(set, binding, buff, offset, range))
  575. {
  576. binding = binding + MAX_UNIFORM_BUFFER_BINDINGS * set;
  577. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  578. }
  579. }
  580. void CommandBuffer::bindStorageBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  581. {
  582. class Cmd final : public GlCommand
  583. {
  584. public:
  585. BufferPtr m_buff;
  586. PtrSize m_binding;
  587. PtrSize m_offset;
  588. PtrSize m_range;
  589. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  590. : m_buff(buff)
  591. , m_binding(binding)
  592. , m_offset(offset)
  593. , m_range(range)
  594. {
  595. }
  596. Error operator()(GlState&)
  597. {
  598. static_cast<const BufferImpl&>(*m_buff).bind(GL_SHADER_STORAGE_BUFFER, m_binding, m_offset, m_range);
  599. return Error::kNone;
  600. }
  601. };
  602. ANKI_ASSERT(buff);
  603. ANKI_ASSERT(range > 0);
  604. ANKI_GL_SELF(CommandBufferImpl);
  605. if(self.m_state.bindStorageBuffer(set, binding, buff, offset, range))
  606. {
  607. binding = binding + MAX_STORAGE_BUFFER_BINDINGS * set;
  608. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  609. }
  610. }
  611. void CommandBuffer::bindImage(U32 set, U32 binding, TextureViewPtr img)
  612. {
  613. class Cmd final : public GlCommand
  614. {
  615. public:
  616. TextureViewPtr m_img;
  617. U16 m_unit;
  618. Cmd(U32 unit, TextureViewPtr img)
  619. : m_img(img)
  620. , m_unit(unit)
  621. {
  622. }
  623. Error operator()(GlState&)
  624. {
  625. const TextureViewImpl& view = static_cast<const TextureViewImpl&>(*m_img);
  626. glBindImageTexture(m_unit, view.m_view.m_glName, 0, GL_TRUE, 0, GL_READ_WRITE,
  627. static_cast<const TextureImpl&>(*view.m_tex).m_internalFormat);
  628. return Error::kNone;
  629. }
  630. };
  631. ANKI_ASSERT(img);
  632. ANKI_GL_SELF(CommandBufferImpl);
  633. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*img).m_tex->isSubresourceGoodForImageLoadStore(
  634. static_cast<const TextureViewImpl&>(*img).getSubresource()));
  635. if(self.m_state.bindImage(set, binding, img))
  636. {
  637. binding = binding + set * MAX_IMAGE_BINDINGS;
  638. self.pushBackNewCommand<Cmd>(binding, img);
  639. }
  640. }
  641. void CommandBuffer::bindTextureBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, Format fmt)
  642. {
  643. class Cmd final : public GlCommand
  644. {
  645. public:
  646. U32 m_set;
  647. U32 m_binding;
  648. BufferPtr m_buff;
  649. PtrSize m_offset;
  650. PtrSize m_range;
  651. GLenum m_fmt;
  652. Cmd(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, GLenum fmt)
  653. : m_set(set)
  654. , m_binding(binding)
  655. , m_buff(buff)
  656. , m_offset(offset)
  657. , m_range(range)
  658. , m_fmt(fmt)
  659. {
  660. }
  661. Error operator()(GlState& state)
  662. {
  663. ANKI_ASSERT(m_offset + m_range <= m_buff->getSize());
  664. const GLuint tex = state.m_texBuffTextures[m_set][m_binding];
  665. glTextureBufferRange(tex, m_fmt, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_range);
  666. return Error::kNone;
  667. }
  668. };
  669. Bool compressed;
  670. GLenum format;
  671. GLenum internalFormat;
  672. GLenum type;
  673. DepthStencilAspectBit dsAspect;
  674. convertTextureInformation(fmt, compressed, format, internalFormat, type, dsAspect);
  675. (void)compressed;
  676. (void)format;
  677. (void)type;
  678. (void)dsAspect;
  679. ANKI_GL_SELF(CommandBufferImpl);
  680. self.pushBackNewCommand<Cmd>(set, binding, buff, offset, range, internalFormat);
  681. }
  682. void CommandBuffer::bindShaderProgram(ShaderProgramPtr prog)
  683. {
  684. class Cmd final : public GlCommand
  685. {
  686. public:
  687. ShaderProgramPtr m_prog;
  688. Cmd(const ShaderProgramPtr& prog)
  689. : m_prog(prog)
  690. {
  691. }
  692. Error operator()(GlState& state)
  693. {
  694. state.m_crntProg = m_prog;
  695. glUseProgram(static_cast<const ShaderProgramImpl&>(*m_prog).getGlName());
  696. return Error::kNone;
  697. }
  698. };
  699. ANKI_ASSERT(prog);
  700. ANKI_GL_SELF(CommandBufferImpl);
  701. if(self.m_state.bindShaderProgram(prog))
  702. {
  703. self.pushBackNewCommand<Cmd>(prog);
  704. }
  705. else
  706. {
  707. ANKI_TRACE_INC_COUNTER(GL_PROGS_SKIPPED, 1);
  708. }
  709. }
  710. void CommandBuffer::beginRenderPass(FramebufferPtr fb,
  711. const Array<TextureUsageBit, kMaxColorRenderTargets>& colorAttachmentUsages,
  712. TextureUsageBit depthStencilAttachmentUsage, U32 minx, U32 miny, U32 width,
  713. U32 height)
  714. {
  715. class BindFramebufferCommand final : public GlCommand
  716. {
  717. public:
  718. FramebufferPtr m_fb;
  719. Array<U32, 4> m_renderArea;
  720. BindFramebufferCommand(FramebufferPtr fb, U32 minx, U32 miny, U32 width, U32 height)
  721. : m_fb(fb)
  722. , m_renderArea{{minx, miny, width, height}}
  723. {
  724. }
  725. Error operator()(GlState& state)
  726. {
  727. static_cast<const FramebufferImpl&>(*m_fb).bind(state, m_renderArea[0], m_renderArea[1], m_renderArea[2],
  728. m_renderArea[3]);
  729. return Error::kNone;
  730. }
  731. };
  732. ANKI_GL_SELF(CommandBufferImpl);
  733. if(self.m_state.beginRenderPass(fb))
  734. {
  735. self.pushBackNewCommand<BindFramebufferCommand>(fb, minx, miny, width, height);
  736. }
  737. }
  738. void CommandBuffer::endRenderPass()
  739. {
  740. class Command final : public GlCommand
  741. {
  742. public:
  743. const FramebufferImpl* m_fb;
  744. Command(const FramebufferImpl* fb)
  745. : m_fb(fb)
  746. {
  747. ANKI_ASSERT(fb);
  748. }
  749. Error operator()(GlState&)
  750. {
  751. m_fb->endRenderPass();
  752. return Error::kNone;
  753. }
  754. };
  755. ANKI_GL_SELF(CommandBufferImpl);
  756. self.pushBackNewCommand<Command>(self.m_state.m_fb);
  757. self.m_state.endRenderPass();
  758. }
  759. void CommandBuffer::drawElements(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 firstIndex,
  760. U32 baseVertex, U32 baseInstance)
  761. {
  762. class Cmd final : public GlCommand
  763. {
  764. public:
  765. GLenum m_topology;
  766. GLenum m_indexType;
  767. DrawElementsIndirectInfo m_info;
  768. Cmd(GLenum topology, GLenum indexType, const DrawElementsIndirectInfo& info)
  769. : m_topology(topology)
  770. , m_indexType(indexType)
  771. , m_info(info)
  772. {
  773. }
  774. Error operator()(GlState&)
  775. {
  776. glDrawElementsInstancedBaseVertexBaseInstance(
  777. m_topology, m_info.m_count, m_indexType, numberToPtr<void*>(m_info.m_firstIndex),
  778. m_info.m_instanceCount, m_info.m_baseVertex, m_info.m_baseInstance);
  779. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  780. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  781. return Error::kNone;
  782. }
  783. };
  784. ANKI_GL_SELF(CommandBufferImpl);
  785. self.m_state.checkIndexedDracall();
  786. self.flushDrawcall(*this);
  787. U idxBytes;
  788. if(self.m_state.m_idx.m_indexType == GL_UNSIGNED_SHORT)
  789. {
  790. idxBytes = sizeof(U16);
  791. }
  792. else
  793. {
  794. ANKI_ASSERT(self.m_state.m_idx.m_indexType == GL_UNSIGNED_INT);
  795. idxBytes = sizeof(U32);
  796. }
  797. firstIndex = firstIndex * idxBytes + self.m_state.m_idx.m_offset;
  798. DrawElementsIndirectInfo info(count, instanceCount, firstIndex, baseVertex, baseInstance);
  799. self.pushBackNewCommand<Cmd>(convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, info);
  800. }
  801. void CommandBuffer::drawArrays(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 first, U32 baseInstance)
  802. {
  803. class DrawArraysCommand final : public GlCommand
  804. {
  805. public:
  806. GLenum m_topology;
  807. DrawArraysIndirectInfo m_info;
  808. DrawArraysCommand(GLenum topology, const DrawArraysIndirectInfo& info)
  809. : m_topology(topology)
  810. , m_info(info)
  811. {
  812. }
  813. Error operator()(GlState& state)
  814. {
  815. glDrawArraysInstancedBaseInstance(m_topology, m_info.m_first, m_info.m_count, m_info.m_instanceCount,
  816. m_info.m_baseInstance);
  817. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  818. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  819. return Error::kNone;
  820. }
  821. };
  822. ANKI_GL_SELF(CommandBufferImpl);
  823. self.m_state.checkNonIndexedDrawcall();
  824. self.flushDrawcall(*this);
  825. DrawArraysIndirectInfo info(count, instanceCount, first, baseInstance);
  826. self.pushBackNewCommand<DrawArraysCommand>(convertPrimitiveTopology(topology), info);
  827. }
  828. void CommandBuffer::drawElementsIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset,
  829. BufferPtr indirectBuff)
  830. {
  831. class DrawElementsIndirectCommand final : public GlCommand
  832. {
  833. public:
  834. GLenum m_topology;
  835. GLenum m_indexType;
  836. U32 m_drawCount;
  837. PtrSize m_offset;
  838. BufferPtr m_buff;
  839. DrawElementsIndirectCommand(GLenum topology, GLenum indexType, U32 drawCount, PtrSize offset, BufferPtr buff)
  840. : m_topology(topology)
  841. , m_indexType(indexType)
  842. , m_drawCount(drawCount)
  843. , m_offset(offset)
  844. , m_buff(buff)
  845. {
  846. ANKI_ASSERT(drawCount > 0);
  847. ANKI_ASSERT((m_offset % 4) == 0);
  848. }
  849. Error operator()(GlState&)
  850. {
  851. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  852. ANKI_ASSERT(m_offset + sizeof(DrawElementsIndirectInfo) * m_drawCount <= buff.getSize());
  853. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  854. glMultiDrawElementsIndirect(m_topology, m_indexType, numberToPtr<void*>(m_offset), m_drawCount,
  855. sizeof(DrawElementsIndirectInfo));
  856. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  857. return Error::kNone;
  858. }
  859. };
  860. ANKI_GL_SELF(CommandBufferImpl);
  861. self.m_state.checkIndexedDracall();
  862. self.flushDrawcall(*this);
  863. self.pushBackNewCommand<DrawElementsIndirectCommand>(
  864. convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, drawCount, offset, indirectBuff);
  865. }
  866. void CommandBuffer::drawArraysIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset,
  867. BufferPtr indirectBuff)
  868. {
  869. class DrawArraysIndirectCommand final : public GlCommand
  870. {
  871. public:
  872. GLenum m_topology;
  873. U32 m_drawCount;
  874. PtrSize m_offset;
  875. BufferPtr m_buff;
  876. DrawArraysIndirectCommand(GLenum topology, U32 drawCount, PtrSize offset, BufferPtr buff)
  877. : m_topology(topology)
  878. , m_drawCount(drawCount)
  879. , m_offset(offset)
  880. , m_buff(buff)
  881. {
  882. ANKI_ASSERT(drawCount > 0);
  883. ANKI_ASSERT((m_offset % 4) == 0);
  884. }
  885. Error operator()(GlState& state)
  886. {
  887. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  888. ANKI_ASSERT(m_offset + sizeof(DrawArraysIndirectInfo) * m_drawCount <= buff.getSize());
  889. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  890. glMultiDrawArraysIndirect(m_topology, numberToPtr<void*>(m_offset), m_drawCount,
  891. sizeof(DrawArraysIndirectInfo));
  892. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  893. return Error::kNone;
  894. }
  895. };
  896. ANKI_GL_SELF(CommandBufferImpl);
  897. self.m_state.checkNonIndexedDrawcall();
  898. self.flushDrawcall(*this);
  899. self.pushBackNewCommand<DrawArraysIndirectCommand>(convertPrimitiveTopology(topology), drawCount, offset,
  900. indirectBuff);
  901. }
  902. void CommandBuffer::dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
  903. {
  904. class DispatchCommand final : public GlCommand
  905. {
  906. public:
  907. Array<U32, 3> m_size;
  908. DispatchCommand(U32 x, U32 y, U32 z)
  909. : m_size({{x, y, z}})
  910. {
  911. }
  912. Error operator()(GlState&)
  913. {
  914. glDispatchCompute(m_size[0], m_size[1], m_size[2]);
  915. return Error::kNone;
  916. }
  917. };
  918. ANKI_GL_SELF(CommandBufferImpl);
  919. ANKI_ASSERT(!!(self.m_flags & CommandBufferFlag::kComputeWork));
  920. self.m_state.checkDispatch();
  921. self.pushBackNewCommand<DispatchCommand>(groupCountX, groupCountY, groupCountZ);
  922. }
  923. void CommandBuffer::resetOcclusionQuery(OcclusionQueryPtr query)
  924. {
  925. // Nothing for GL
  926. }
  927. void CommandBuffer::beginOcclusionQuery(OcclusionQueryPtr query)
  928. {
  929. class OqBeginCommand final : public GlCommand
  930. {
  931. public:
  932. OcclusionQueryPtr m_handle;
  933. OqBeginCommand(const OcclusionQueryPtr& handle)
  934. : m_handle(handle)
  935. {
  936. }
  937. Error operator()(GlState&)
  938. {
  939. static_cast<OcclusionQueryImpl&>(*m_handle).begin();
  940. return Error::kNone;
  941. }
  942. };
  943. ANKI_GL_SELF(CommandBufferImpl);
  944. self.pushBackNewCommand<OqBeginCommand>(query);
  945. }
  946. void CommandBuffer::endOcclusionQuery(OcclusionQueryPtr query)
  947. {
  948. class OqEndCommand final : public GlCommand
  949. {
  950. public:
  951. OcclusionQueryPtr m_handle;
  952. OqEndCommand(const OcclusionQueryPtr& handle)
  953. : m_handle(handle)
  954. {
  955. }
  956. Error operator()(GlState&)
  957. {
  958. static_cast<OcclusionQueryImpl&>(*m_handle).end();
  959. return Error::kNone;
  960. }
  961. };
  962. ANKI_GL_SELF(CommandBufferImpl);
  963. self.pushBackNewCommand<OqEndCommand>(query);
  964. }
  965. void CommandBuffer::copyBufferToTextureView(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  966. {
  967. class TexSurfUploadCommand final : public GlCommand
  968. {
  969. public:
  970. BufferPtr m_buff;
  971. PtrSize m_offset;
  972. PtrSize m_range;
  973. TextureViewPtr m_texView;
  974. TexSurfUploadCommand(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  975. : m_buff(buff)
  976. , m_offset(offset)
  977. , m_range(range)
  978. , m_texView(texView)
  979. {
  980. }
  981. Error operator()(GlState&)
  982. {
  983. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  984. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  985. texImpl.copyFromBuffer(viewImpl.getSubresource(), static_cast<const BufferImpl&>(*m_buff).getGlName(),
  986. m_offset, m_range);
  987. return Error::kNone;
  988. }
  989. };
  990. ANKI_ASSERT(texView);
  991. ANKI_ASSERT(buff);
  992. ANKI_ASSERT(range > 0);
  993. ANKI_GL_SELF(CommandBufferImpl);
  994. ANKI_ASSERT(!self.m_state.insideRenderPass());
  995. self.pushBackNewCommand<TexSurfUploadCommand>(buff, offset, range, texView);
  996. }
  997. void CommandBuffer::copyBufferToBuffer(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset,
  998. PtrSize range)
  999. {
  1000. class Cmd final : public GlCommand
  1001. {
  1002. public:
  1003. BufferPtr m_src;
  1004. PtrSize m_srcOffset;
  1005. BufferPtr m_dst;
  1006. PtrSize m_dstOffset;
  1007. PtrSize m_range;
  1008. Cmd(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset, PtrSize range)
  1009. : m_src(src)
  1010. , m_srcOffset(srcOffset)
  1011. , m_dst(dst)
  1012. , m_dstOffset(dstOffset)
  1013. , m_range(range)
  1014. {
  1015. }
  1016. Error operator()(GlState& state)
  1017. {
  1018. static_cast<BufferImpl&>(*m_dst).write(static_cast<const BufferImpl&>(*m_src).getGlName(), m_srcOffset,
  1019. m_dstOffset, m_range);
  1020. return Error::kNone;
  1021. }
  1022. };
  1023. ANKI_ASSERT(src);
  1024. ANKI_ASSERT(dst);
  1025. ANKI_ASSERT(range > 0);
  1026. ANKI_GL_SELF(CommandBufferImpl);
  1027. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1028. self.pushBackNewCommand<Cmd>(src, srcOffset, dst, dstOffset, range);
  1029. }
  1030. void CommandBuffer::generateMipmaps2d(TextureViewPtr texView)
  1031. {
  1032. class GenMipsCommand final : public GlCommand
  1033. {
  1034. public:
  1035. TextureViewPtr m_texView;
  1036. GenMipsCommand(const TextureViewPtr& view)
  1037. : m_texView(view)
  1038. {
  1039. }
  1040. Error operator()(GlState&)
  1041. {
  1042. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1043. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1044. texImpl.generateMipmaps2d(viewImpl);
  1045. return Error::kNone;
  1046. }
  1047. };
  1048. ANKI_GL_SELF(CommandBufferImpl);
  1049. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1050. self.pushBackNewCommand<GenMipsCommand>(texView);
  1051. }
  1052. void CommandBuffer::generateMipmaps3d(TextureViewPtr tex)
  1053. {
  1054. ANKI_ASSERT(!!"TODO");
  1055. }
  1056. void CommandBuffer::pushSecondLevelCommandBuffer(CommandBufferPtr cmdb)
  1057. {
  1058. class ExecCmdbCommand final : public GlCommand
  1059. {
  1060. public:
  1061. CommandBufferPtr m_cmdb;
  1062. ExecCmdbCommand(const CommandBufferPtr& cmdb)
  1063. : m_cmdb(cmdb)
  1064. {
  1065. }
  1066. Error operator()(GlState&)
  1067. {
  1068. ANKI_TRACE_SCOPED_EVENT(GL_2ND_LEVEL_CMD_BUFFER);
  1069. return static_cast<CommandBufferImpl&>(*m_cmdb).executeAllCommands();
  1070. }
  1071. };
  1072. ANKI_GL_SELF(CommandBufferImpl);
  1073. self.m_state.m_lastSecondLevelCmdb = static_cast<CommandBufferImpl*>(cmdb.get());
  1074. self.pushBackNewCommand<ExecCmdbCommand>(cmdb);
  1075. }
  1076. Bool CommandBuffer::isEmpty() const
  1077. {
  1078. ANKI_GL_SELF_CONST(CommandBufferImpl);
  1079. return self.isEmpty();
  1080. }
  1081. void CommandBuffer::blitTextureViews(TextureViewPtr srcView, TextureViewPtr destView)
  1082. {
  1083. ANKI_ASSERT(!"TODO");
  1084. }
  1085. void CommandBuffer::setBufferBarrier(BufferPtr buff, BufferUsageBit prevUsage, BufferUsageBit nextUsage, PtrSize offset,
  1086. PtrSize size)
  1087. {
  1088. class SetBufferMemBarrierCommand final : public GlCommand
  1089. {
  1090. public:
  1091. GLenum m_barrier;
  1092. SetBufferMemBarrierCommand(GLenum barrier)
  1093. : m_barrier(barrier)
  1094. {
  1095. }
  1096. Error operator()(GlState&)
  1097. {
  1098. glMemoryBarrier(m_barrier);
  1099. return Error::kNone;
  1100. }
  1101. };
  1102. GLenum d = GL_NONE;
  1103. BufferUsageBit all = prevUsage | nextUsage;
  1104. if(!!(all & BufferUsageBit::UNIFORM_ALL))
  1105. {
  1106. d |= GL_UNIFORM_BARRIER_BIT;
  1107. }
  1108. if(!!(all & BufferUsageBit::STORAGE_ALL))
  1109. {
  1110. d |= GL_SHADER_STORAGE_BARRIER_BIT;
  1111. }
  1112. if(!!(all & BufferUsageBit::kIndex))
  1113. {
  1114. d |= GL_ELEMENT_ARRAY_BARRIER_BIT;
  1115. }
  1116. if(!!(all & BufferUsageBit::kVertex))
  1117. {
  1118. d |= GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT;
  1119. }
  1120. if(!!(all & BufferUsageBit::INDIRECT_ALL))
  1121. {
  1122. d |= GL_COMMAND_BARRIER_BIT;
  1123. }
  1124. if(!!(all
  1125. & (BufferUsageBit::FILL | BufferUsageBit::BUFFER_UPLOAD_SOURCE | BufferUsageBit::BUFFER_UPLOAD_DESTINATION)))
  1126. {
  1127. d |= GL_BUFFER_UPDATE_BARRIER_BIT;
  1128. }
  1129. if(!!(all & BufferUsageBit::QUERY_RESULT))
  1130. {
  1131. d |= GL_QUERY_BUFFER_BARRIER_BIT;
  1132. }
  1133. ANKI_ASSERT(d);
  1134. ANKI_GL_SELF(CommandBufferImpl);
  1135. self.pushBackNewCommand<SetBufferMemBarrierCommand>(d);
  1136. }
  1137. void CommandBuffer::setTextureSurfaceBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1138. const TextureSurfaceInfo& surf)
  1139. {
  1140. TextureSubresourceInfo subresource;
  1141. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1142. }
  1143. void CommandBuffer::setTextureVolumeBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1144. const TextureVolumeInfo& vol)
  1145. {
  1146. TextureSubresourceInfo subresource;
  1147. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1148. }
  1149. void CommandBuffer::setTextureBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage,
  1150. const TextureSubresourceInfo& subresource)
  1151. {
  1152. class Cmd final : public GlCommand
  1153. {
  1154. public:
  1155. GLenum m_barrier;
  1156. Cmd(GLenum barrier)
  1157. : m_barrier(barrier)
  1158. {
  1159. }
  1160. Error operator()(GlState&)
  1161. {
  1162. glMemoryBarrier(m_barrier);
  1163. return Error::kNone;
  1164. }
  1165. };
  1166. const TextureUsageBit usage = nextUsage;
  1167. GLenum e = 0;
  1168. if(!!(usage & TextureUsageBit::SAMPLED_ALL))
  1169. {
  1170. e |= GL_TEXTURE_FETCH_BARRIER_BIT;
  1171. }
  1172. if(!!(usage & TextureUsageBit::IMAGE_ALL))
  1173. {
  1174. e |= GL_SHADER_IMAGE_ACCESS_BARRIER_BIT;
  1175. }
  1176. if(!!(usage & TextureUsageBit::kTransferDestination))
  1177. {
  1178. e |= GL_TEXTURE_UPDATE_BARRIER_BIT;
  1179. }
  1180. if(!!(usage & TextureUsageBit::FRAMEBUFFER_ATTACHMENT_READ_WRITE))
  1181. {
  1182. e |= GL_FRAMEBUFFER_BARRIER_BIT;
  1183. }
  1184. if(!!(usage & TextureUsageBit::CLEAR))
  1185. {
  1186. // No idea
  1187. }
  1188. if(!!(usage & TextureUsageBit::kGenerateMipmaps))
  1189. {
  1190. // No idea
  1191. }
  1192. if(e != 0)
  1193. {
  1194. ANKI_GL_SELF(CommandBufferImpl);
  1195. self.pushBackNewCommand<Cmd>(e);
  1196. }
  1197. }
  1198. void CommandBuffer::clearTextureView(TextureViewPtr texView, const ClearValue& clearValue)
  1199. {
  1200. class ClearTextCommand final : public GlCommand
  1201. {
  1202. public:
  1203. TextureViewPtr m_texView;
  1204. ClearValue m_val;
  1205. ClearTextCommand(TextureViewPtr texView, const ClearValue& val)
  1206. : m_texView(texView)
  1207. , m_val(val)
  1208. {
  1209. }
  1210. Error operator()(GlState&)
  1211. {
  1212. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1213. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1214. texImpl.clear(viewImpl.getSubresource(), m_val);
  1215. return Error::kNone;
  1216. }
  1217. };
  1218. ANKI_GL_SELF(CommandBufferImpl);
  1219. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1220. self.pushBackNewCommand<ClearTextCommand>(texView, clearValue);
  1221. }
  1222. void CommandBuffer::fillBuffer(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1223. {
  1224. class FillBufferCommand final : public GlCommand
  1225. {
  1226. public:
  1227. BufferPtr m_buff;
  1228. PtrSize m_offset;
  1229. PtrSize m_size;
  1230. U32 m_value;
  1231. FillBufferCommand(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1232. : m_buff(buff)
  1233. , m_offset(offset)
  1234. , m_size(size)
  1235. , m_value(value)
  1236. {
  1237. }
  1238. Error operator()(GlState&)
  1239. {
  1240. static_cast<BufferImpl&>(*m_buff).fill(m_offset, m_size, m_value);
  1241. return Error::kNone;
  1242. }
  1243. };
  1244. ANKI_GL_SELF(CommandBufferImpl);
  1245. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1246. self.pushBackNewCommand<FillBufferCommand>(buff, offset, size, value);
  1247. }
  1248. void CommandBuffer::writeOcclusionQueryResultToBuffer(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1249. {
  1250. class WriteOcclResultToBuff final : public GlCommand
  1251. {
  1252. public:
  1253. OcclusionQueryPtr m_query;
  1254. PtrSize m_offset;
  1255. BufferPtr m_buff;
  1256. WriteOcclResultToBuff(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1257. : m_query(query)
  1258. , m_offset(offset)
  1259. , m_buff(buff)
  1260. {
  1261. ANKI_ASSERT((m_offset % 4) == 0);
  1262. }
  1263. Error operator()(GlState&)
  1264. {
  1265. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  1266. ANKI_ASSERT(m_offset + 4 <= buff.getSize());
  1267. glBindBuffer(GL_QUERY_BUFFER, buff.getGlName());
  1268. glGetQueryObjectuiv(static_cast<const OcclusionQueryImpl&>(*m_query).getGlName(), GL_QUERY_RESULT,
  1269. numberToPtr<GLuint*>(m_offset));
  1270. glBindBuffer(GL_QUERY_BUFFER, 0);
  1271. return Error::kNone;
  1272. }
  1273. };
  1274. ANKI_GL_SELF(CommandBufferImpl);
  1275. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1276. self.pushBackNewCommand<WriteOcclResultToBuff>(query, offset, buff);
  1277. }
  1278. void CommandBuffer::setPushConstants(const void* data, U32 dataSize)
  1279. {
  1280. class PushConstants final : public GlCommand
  1281. {
  1282. public:
  1283. DynamicArrayRaii<Vec4> m_data;
  1284. PushConstants(const void* data, U32 dataSize, const CommandBufferAllocator<F32>& alloc)
  1285. : m_data(alloc)
  1286. {
  1287. m_data.create(dataSize / sizeof(Vec4));
  1288. memcpy(&m_data[0], data, dataSize);
  1289. }
  1290. Error operator()(GlState& state)
  1291. {
  1292. const ShaderProgramImplReflection& refl =
  1293. static_cast<ShaderProgramImpl&>(*state.m_crntProg).getReflection();
  1294. ANKI_ASSERT(refl.m_uniformDataSize == m_data.getSizeInBytes());
  1295. const Bool transpose = true;
  1296. for(const ShaderProgramImplReflection::Uniform& uni : refl.m_uniforms)
  1297. {
  1298. const U8* data = reinterpret_cast<const U8*>(&m_data[0]) + uni.m_pushConstantOffset;
  1299. const U count = uni.m_arrSize;
  1300. const GLint loc = uni.m_location;
  1301. switch(uni.m_type)
  1302. {
  1303. case ShaderVariableDataType::VEC4:
  1304. glUniform4fv(loc, count, reinterpret_cast<const GLfloat*>(data));
  1305. break;
  1306. case ShaderVariableDataType::IVEC4:
  1307. glUniform4iv(loc, count, reinterpret_cast<const GLint*>(data));
  1308. break;
  1309. case ShaderVariableDataType::UVEC4:
  1310. glUniform4uiv(loc, count, reinterpret_cast<const GLuint*>(data));
  1311. break;
  1312. case ShaderVariableDataType::MAT4:
  1313. glUniformMatrix4fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(data));
  1314. break;
  1315. case ShaderVariableDataType::MAT3:
  1316. {
  1317. // Remove the padding
  1318. ANKI_ASSERT(count == 1 && "TODO");
  1319. const Mat3x4* m34 = reinterpret_cast<const Mat3x4*>(data);
  1320. Mat3 m3(m34->getRotationPart());
  1321. glUniformMatrix3fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(&m3));
  1322. break;
  1323. }
  1324. default:
  1325. ANKI_ASSERT(!"TODO");
  1326. }
  1327. }
  1328. return Error::kNone;
  1329. }
  1330. };
  1331. ANKI_ASSERT(data);
  1332. ANKI_ASSERT(dataSize);
  1333. ANKI_ASSERT(dataSize % 16 == 0);
  1334. ANKI_GL_SELF(CommandBufferImpl);
  1335. self.pushBackNewCommand<PushConstants>(data, dataSize, self.m_alloc);
  1336. }
  1337. void CommandBuffer::setRasterizationOrder(RasterizationOrder order)
  1338. {
  1339. // Nothing for GL
  1340. }
  1341. } // end namespace anki