CommandBuffer.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566
  1. // Copyright (C) 2009-2023, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/CommandBuffer.h>
  6. #include <AnKi/Gr/gl/CommandBufferImpl.h>
  7. #include <AnKi/Gr/GrManager.h>
  8. #include <AnKi/Gr/gl/GrManagerImpl.h>
  9. #include <AnKi/Gr/gl/RenderingThread.h>
  10. #include <AnKi/Gr/gl/GlState.h>
  11. #include <AnKi/Gr/gl/FramebufferImpl.h>
  12. #include <AnKi/Gr/gl/OcclusionQueryImpl.h>
  13. #include <AnKi/Gr/gl/TextureImpl.h>
  14. #include <AnKi/Gr/gl/BufferImpl.h>
  15. #include <AnKi/Gr/gl/SamplerImpl.h>
  16. #include <AnKi/Gr/gl/ShaderProgramImpl.h>
  17. #include <AnKi/Gr/gl/TextureViewImpl.h>
  18. #include <AnKi/Core/Trace.h>
  19. namespace anki {
  20. CommandBuffer* CommandBuffer::newInstance(GrManager* manager, const CommandBufferInitInfo& inf)
  21. {
  22. CommandBufferImpl* impl = manager->getAllocator().newInstance<CommandBufferImpl>(manager, inf.getName());
  23. impl->init(inf);
  24. return impl;
  25. }
  26. void CommandBuffer::flush(FencePtr* fence)
  27. {
  28. ANKI_GL_SELF(CommandBufferImpl);
  29. if(!self.isSecondLevel())
  30. {
  31. ANKI_ASSERT(!self.m_state.insideRenderPass());
  32. }
  33. else
  34. {
  35. ANKI_ASSERT(fence == nullptr);
  36. }
  37. if(!self.isSecondLevel())
  38. {
  39. static_cast<GrManagerImpl&>(getManager()).getRenderingThread().flushCommandBuffer(CommandBufferPtr(this), fence);
  40. }
  41. }
  42. void CommandBuffer::bindVertexBuffer(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride, VertexStepRate stepRate)
  43. {
  44. class Cmd final : public GlCommand
  45. {
  46. public:
  47. BufferPtr m_buff;
  48. U32 m_binding;
  49. PtrSize m_offset;
  50. PtrSize m_stride;
  51. Bool m_instanced;
  52. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize stride, Bool instanced)
  53. : m_buff(buff)
  54. , m_binding(binding)
  55. , m_offset(offset)
  56. , m_stride(stride)
  57. , m_instanced(instanced)
  58. {
  59. }
  60. Error operator()(GlState& state)
  61. {
  62. glBindVertexBuffer(m_binding, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_stride);
  63. glVertexBindingDivisor(m_binding, (m_instanced) ? 1 : 0);
  64. return Error::kNone;
  65. }
  66. };
  67. ANKI_ASSERT(buff);
  68. ANKI_ASSERT(stride > 0);
  69. ANKI_GL_SELF(CommandBufferImpl);
  70. if(self.m_state.bindVertexBuffer(binding, buff, offset, stride, stepRate))
  71. {
  72. self.pushBackNewCommand<Cmd>(binding, buff, offset, stride, stepRate == VertexStepRate::INSTANCE);
  73. }
  74. }
  75. void CommandBuffer::setVertexAttribute(U32 location, U32 buffBinding, Format fmt, PtrSize relativeOffset)
  76. {
  77. class Cmd final : public GlCommand
  78. {
  79. public:
  80. U32 m_location;
  81. U32 m_buffBinding;
  82. U8 m_compSize;
  83. GLenum m_fmt;
  84. Bool m_normalized;
  85. PtrSize m_relativeOffset;
  86. Cmd(U32 location, U32 buffBinding, U8 compSize, GLenum fmt, Bool normalized, PtrSize relativeOffset)
  87. : m_location(location)
  88. , m_buffBinding(buffBinding)
  89. , m_compSize(compSize)
  90. , m_fmt(fmt)
  91. , m_normalized(normalized)
  92. , m_relativeOffset(relativeOffset)
  93. {
  94. }
  95. Error operator()(GlState& state)
  96. {
  97. glVertexAttribFormat(m_location, m_compSize, m_fmt, m_normalized, m_relativeOffset);
  98. glVertexAttribBinding(m_location, m_buffBinding);
  99. return Error::kNone;
  100. }
  101. };
  102. ANKI_GL_SELF(CommandBufferImpl);
  103. if(self.m_state.setVertexAttribute(location, buffBinding, fmt, relativeOffset))
  104. {
  105. U compCount;
  106. GLenum type;
  107. Bool normalized;
  108. convertVertexFormat(fmt, compCount, type, normalized);
  109. self.pushBackNewCommand<Cmd>(location, buffBinding, compCount, type, normalized, relativeOffset);
  110. }
  111. }
  112. void CommandBuffer::bindIndexBuffer(BufferPtr buff, PtrSize offset, IndexType type)
  113. {
  114. class Cmd final : public GlCommand
  115. {
  116. public:
  117. BufferPtr m_buff;
  118. Cmd(BufferPtr buff)
  119. : m_buff(buff)
  120. {
  121. }
  122. Error operator()(GlState& state)
  123. {
  124. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, static_cast<const BufferImpl&>(*m_buff).getGlName());
  125. return Error::kNone;
  126. }
  127. };
  128. ANKI_ASSERT(buff);
  129. ANKI_GL_SELF(CommandBufferImpl);
  130. if(self.m_state.bindIndexBuffer(buff, offset, type))
  131. {
  132. self.pushBackNewCommand<Cmd>(buff);
  133. }
  134. }
  135. void CommandBuffer::setPrimitiveRestart(Bool enable)
  136. {
  137. class Cmd final : public GlCommand
  138. {
  139. public:
  140. Bool m_enable;
  141. Cmd(Bool enable)
  142. : m_enable(enable)
  143. {
  144. }
  145. Error operator()(GlState& state)
  146. {
  147. if(m_enable)
  148. {
  149. glEnable(GL_PRIMITIVE_RESTART);
  150. }
  151. else
  152. {
  153. glDisable(GL_PRIMITIVE_RESTART);
  154. }
  155. return Error::kNone;
  156. }
  157. };
  158. ANKI_GL_SELF(CommandBufferImpl);
  159. if(self.m_state.setPrimitiveRestart(enable))
  160. {
  161. self.pushBackNewCommand<Cmd>(enable);
  162. }
  163. }
  164. void CommandBuffer::setViewport(U32 minx, U32 miny, U32 width, U32 height)
  165. {
  166. class ViewportCommand final : public GlCommand
  167. {
  168. public:
  169. Array<U32, 4> m_value;
  170. ViewportCommand(U32 a, U32 b, U32 c, U32 d)
  171. {
  172. m_value = {{a, b, c, d}};
  173. }
  174. Error operator()(GlState& state)
  175. {
  176. glViewport(m_value[0], m_value[1], m_value[2], m_value[3]);
  177. return Error::kNone;
  178. }
  179. };
  180. ANKI_GL_SELF(CommandBufferImpl);
  181. if(self.m_state.setViewport(minx, miny, width, height))
  182. {
  183. self.pushBackNewCommand<ViewportCommand>(minx, miny, width, height);
  184. }
  185. }
  186. void CommandBuffer::setScissor(U32 minx, U32 miny, U32 width, U32 height)
  187. {
  188. ANKI_ASSERT(minx < kMaxU32 && miny < kMaxU32);
  189. ANKI_ASSERT(width > 0 && height > 0);
  190. class ScissorCommand final : public GlCommand
  191. {
  192. public:
  193. Array<GLsizei, 4> m_value;
  194. ScissorCommand(GLsizei a, GLsizei b, GLsizei c, GLsizei d)
  195. {
  196. m_value = {{a, b, c, d}};
  197. }
  198. Error operator()(GlState& state)
  199. {
  200. if(state.m_scissor[0] != m_value[0] || state.m_scissor[1] != m_value[1] || state.m_scissor[2] != m_value[2]
  201. || state.m_scissor[3] != m_value[3])
  202. {
  203. state.m_scissor = m_value;
  204. glScissor(m_value[0], m_value[1], m_value[2], m_value[3]);
  205. }
  206. return Error::kNone;
  207. }
  208. };
  209. // Limit the width and height to GLsizei
  210. const GLsizei iwidth = (width == kMaxU32) ? kMaxI32 : width;
  211. const GLsizei iheight = (height == kMaxU32) ? kMaxI32 : height;
  212. const GLsizei iminx = minx;
  213. const GLsizei iminy = miny;
  214. ANKI_GL_SELF(CommandBufferImpl);
  215. if(self.m_state.setScissor(iminx, iminy, iwidth, iheight))
  216. {
  217. self.pushBackNewCommand<ScissorCommand>(iminx, iminy, iwidth, iheight);
  218. }
  219. }
  220. void CommandBuffer::setFillMode(FillMode mode)
  221. {
  222. class Cmd final : public GlCommand
  223. {
  224. public:
  225. GLenum m_fillMode;
  226. Cmd(GLenum fillMode)
  227. : m_fillMode(fillMode)
  228. {
  229. }
  230. Error operator()(GlState& state)
  231. {
  232. glPolygonMode(GL_FRONT_AND_BACK, m_fillMode);
  233. return Error::kNone;
  234. }
  235. };
  236. ANKI_GL_SELF(CommandBufferImpl);
  237. if(self.m_state.setFillMode(mode))
  238. {
  239. self.pushBackNewCommand<Cmd>(convertFillMode(mode));
  240. }
  241. }
  242. void CommandBuffer::setCullMode(FaceSelectionBit mode)
  243. {
  244. class Cmd final : public GlCommand
  245. {
  246. public:
  247. GLenum m_mode;
  248. Cmd(GLenum mode)
  249. : m_mode(mode)
  250. {
  251. }
  252. Error operator()(GlState& state)
  253. {
  254. glCullFace(m_mode);
  255. return Error::kNone;
  256. }
  257. };
  258. ANKI_GL_SELF(CommandBufferImpl);
  259. if(self.m_state.setCullMode(mode))
  260. {
  261. self.pushBackNewCommand<Cmd>(convertFaceMode(mode));
  262. }
  263. }
  264. void CommandBuffer::setPolygonOffset(F32 factor, F32 units)
  265. {
  266. class Cmd final : public GlCommand
  267. {
  268. public:
  269. F32 m_factor;
  270. F32 m_units;
  271. Cmd(F32 factor, F32 units)
  272. : m_factor(factor)
  273. , m_units(units)
  274. {
  275. }
  276. Error operator()(GlState& state)
  277. {
  278. if(m_factor == 0.0 && m_units == 0.0)
  279. {
  280. glDisable(GL_POLYGON_OFFSET_FILL);
  281. }
  282. else
  283. {
  284. glEnable(GL_POLYGON_OFFSET_FILL);
  285. glPolygonOffset(m_factor, m_units);
  286. }
  287. return Error::kNone;
  288. }
  289. };
  290. ANKI_GL_SELF(CommandBufferImpl);
  291. if(self.m_state.setPolygonOffset(factor, units))
  292. {
  293. self.pushBackNewCommand<Cmd>(factor, units);
  294. }
  295. }
  296. void CommandBuffer::setStencilOperations(FaceSelectionBit face, StencilOperation stencilFail, StencilOperation stencilPassDepthFail,
  297. StencilOperation stencilPassDepthPass)
  298. {
  299. class Cmd final : public GlCommand
  300. {
  301. public:
  302. GLenum m_face;
  303. GLenum m_stencilFail;
  304. GLenum m_stencilPassDepthFail;
  305. GLenum m_stencilPassDepthPass;
  306. Cmd(GLenum face, GLenum stencilFail, GLenum stencilPassDepthFail, GLenum stencilPassDepthPass)
  307. : m_face(face)
  308. , m_stencilFail(stencilFail)
  309. , m_stencilPassDepthFail(stencilPassDepthFail)
  310. , m_stencilPassDepthPass(stencilPassDepthPass)
  311. {
  312. }
  313. Error operator()(GlState& state)
  314. {
  315. glStencilOpSeparate(m_face, m_stencilFail, m_stencilPassDepthFail, m_stencilPassDepthPass);
  316. return Error::kNone;
  317. }
  318. };
  319. ANKI_GL_SELF(CommandBufferImpl);
  320. if(self.m_state.setStencilOperations(face, stencilFail, stencilPassDepthFail, stencilPassDepthPass))
  321. {
  322. self.pushBackNewCommand<Cmd>(convertFaceMode(face), convertStencilOperation(stencilFail), convertStencilOperation(stencilPassDepthFail),
  323. convertStencilOperation(stencilPassDepthPass));
  324. }
  325. }
  326. void CommandBuffer::setStencilCompareOperation(FaceSelectionBit face, CompareOperation comp)
  327. {
  328. ANKI_GL_SELF(CommandBufferImpl);
  329. self.m_state.setStencilCompareOperation(face, comp);
  330. }
  331. void CommandBuffer::setStencilCompareMask(FaceSelectionBit face, U32 mask)
  332. {
  333. ANKI_GL_SELF(CommandBufferImpl);
  334. self.m_state.setStencilCompareMask(face, mask);
  335. }
  336. void CommandBuffer::setStencilWriteMask(FaceSelectionBit face, U32 mask)
  337. {
  338. class Cmd final : public GlCommand
  339. {
  340. public:
  341. GLenum m_face;
  342. U32 m_mask;
  343. Cmd(GLenum face, U32 mask)
  344. : m_face(face)
  345. , m_mask(mask)
  346. {
  347. }
  348. Error operator()(GlState& state)
  349. {
  350. glStencilMaskSeparate(m_face, m_mask);
  351. if(m_face == GL_FRONT)
  352. {
  353. state.m_stencilWriteMask[0] = m_mask;
  354. }
  355. else if(m_face == GL_BACK)
  356. {
  357. state.m_stencilWriteMask[1] = m_mask;
  358. }
  359. else
  360. {
  361. ANKI_ASSERT(m_face == GL_FRONT_AND_BACK);
  362. state.m_stencilWriteMask[0] = state.m_stencilWriteMask[1] = m_mask;
  363. }
  364. return Error::kNone;
  365. }
  366. };
  367. ANKI_GL_SELF(CommandBufferImpl);
  368. if(self.m_state.setStencilWriteMask(face, mask))
  369. {
  370. self.pushBackNewCommand<Cmd>(convertFaceMode(face), mask);
  371. }
  372. }
  373. void CommandBuffer::setStencilReference(FaceSelectionBit face, U32 ref)
  374. {
  375. ANKI_GL_SELF(CommandBufferImpl);
  376. self.m_state.setStencilReference(face, ref);
  377. }
  378. void CommandBuffer::setDepthWrite(Bool enable)
  379. {
  380. class Cmd final : public GlCommand
  381. {
  382. public:
  383. Bool m_enable;
  384. Cmd(Bool enable)
  385. : m_enable(enable)
  386. {
  387. }
  388. Error operator()(GlState& state)
  389. {
  390. glDepthMask(m_enable);
  391. state.m_depthWriteMask = m_enable;
  392. return Error::kNone;
  393. }
  394. };
  395. ANKI_GL_SELF(CommandBufferImpl);
  396. if(self.m_state.setDepthWrite(enable))
  397. {
  398. self.pushBackNewCommand<Cmd>(enable);
  399. }
  400. }
  401. void CommandBuffer::setDepthCompareOperation(CompareOperation op)
  402. {
  403. class Cmd final : public GlCommand
  404. {
  405. public:
  406. GLenum m_op;
  407. Cmd(GLenum op)
  408. : m_op(op)
  409. {
  410. }
  411. Error operator()(GlState& state)
  412. {
  413. glDepthFunc(m_op);
  414. return Error::kNone;
  415. }
  416. };
  417. ANKI_GL_SELF(CommandBufferImpl);
  418. if(self.m_state.setDepthCompareOperation(op))
  419. {
  420. self.pushBackNewCommand<Cmd>(convertCompareOperation(op));
  421. }
  422. }
  423. void CommandBuffer::setAlphaToCoverage(Bool enable)
  424. {
  425. ANKI_ASSERT(!"TODO");
  426. }
  427. void CommandBuffer::setColorChannelWriteMask(U32 attachment, ColorBit mask)
  428. {
  429. class Cmd final : public GlCommand
  430. {
  431. public:
  432. U8 m_attachment;
  433. ColorBit m_mask;
  434. Cmd(U8 attachment, ColorBit mask)
  435. : m_attachment(attachment)
  436. , m_mask(mask)
  437. {
  438. }
  439. Error operator()(GlState& state)
  440. {
  441. const Bool r = !!(m_mask & ColorBit::RED);
  442. const Bool g = !!(m_mask & ColorBit::GREEN);
  443. const Bool b = !!(m_mask & ColorBit::BLUE);
  444. const Bool a = !!(m_mask & ColorBit::ALPHA);
  445. glColorMaski(m_attachment, r, g, b, a);
  446. state.m_colorWriteMasks[m_attachment] = {{r, g, b, a}};
  447. return Error::kNone;
  448. }
  449. };
  450. ANKI_GL_SELF(CommandBufferImpl);
  451. if(self.m_state.setColorChannelWriteMask(attachment, mask))
  452. {
  453. self.pushBackNewCommand<Cmd>(attachment, mask);
  454. }
  455. }
  456. void CommandBuffer::setBlendFactors(U32 attachment, BlendFactor srcRgb, BlendFactor dstRgb, BlendFactor srcA, BlendFactor dstA)
  457. {
  458. class Cmd final : public GlCommand
  459. {
  460. public:
  461. U8 m_attachment;
  462. GLenum m_srcRgb;
  463. GLenum m_dstRgb;
  464. GLenum m_srcA;
  465. GLenum m_dstA;
  466. Cmd(U8 att, GLenum srcRgb, GLenum dstRgb, GLenum srcA, GLenum dstA)
  467. : m_attachment(att)
  468. , m_srcRgb(srcRgb)
  469. , m_dstRgb(dstRgb)
  470. , m_srcA(srcA)
  471. , m_dstA(dstA)
  472. {
  473. }
  474. Error operator()(GlState&)
  475. {
  476. glBlendFuncSeparatei(m_attachment, m_srcRgb, m_dstRgb, m_srcA, m_dstA);
  477. return Error::kNone;
  478. }
  479. };
  480. ANKI_GL_SELF(CommandBufferImpl);
  481. if(self.m_state.setBlendFactors(attachment, srcRgb, dstRgb, srcA, dstA))
  482. {
  483. self.pushBackNewCommand<Cmd>(attachment, convertBlendFactor(srcRgb), convertBlendFactor(dstRgb), convertBlendFactor(srcA),
  484. convertBlendFactor(dstA));
  485. }
  486. }
  487. void CommandBuffer::setBlendOperation(U32 attachment, BlendOperation funcRgb, BlendOperation funcA)
  488. {
  489. class Cmd final : public GlCommand
  490. {
  491. public:
  492. U8 m_attachment;
  493. GLenum m_funcRgb;
  494. GLenum m_funcA;
  495. Cmd(U8 att, GLenum funcRgb, GLenum funcA)
  496. : m_attachment(att)
  497. , m_funcRgb(funcRgb)
  498. , m_funcA(funcA)
  499. {
  500. }
  501. Error operator()(GlState&)
  502. {
  503. glBlendEquationSeparatei(m_attachment, m_funcRgb, m_funcA);
  504. return Error::kNone;
  505. }
  506. };
  507. ANKI_GL_SELF(CommandBufferImpl);
  508. if(self.m_state.setBlendOperation(attachment, funcRgb, funcA))
  509. {
  510. self.pushBackNewCommand<Cmd>(attachment, convertBlendOperation(funcRgb), convertBlendOperation(funcA));
  511. }
  512. }
  513. void CommandBuffer::bindTextureAndSampler(U32 set, U32 binding, TextureViewPtr texView, SamplerPtr sampler, TextureUsageBit usage)
  514. {
  515. class Cmd final : public GlCommand
  516. {
  517. public:
  518. U32 m_unit;
  519. TextureViewPtr m_texView;
  520. SamplerPtr m_sampler;
  521. Cmd(U32 unit, TextureViewPtr texView, SamplerPtr sampler)
  522. : m_unit(unit)
  523. , m_texView(texView)
  524. , m_sampler(sampler)
  525. {
  526. }
  527. Error operator()(GlState&)
  528. {
  529. glBindTextureUnit(m_unit, static_cast<const TextureViewImpl&>(*m_texView).m_view.m_glName);
  530. glBindSampler(m_unit, static_cast<const SamplerImpl&>(*m_sampler).getGlName());
  531. return Error::kNone;
  532. }
  533. };
  534. ANKI_GL_SELF(CommandBufferImpl);
  535. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*texView).m_tex->isSubresourceGoodForSampling(
  536. static_cast<const TextureViewImpl&>(*texView).getSubresource()));
  537. if(self.m_state.bindTextureViewAndSampler(set, binding, texView, sampler))
  538. {
  539. U unit = binding + MAX_TEXTURE_BINDINGS * set;
  540. self.pushBackNewCommand<Cmd>(unit, texView, sampler);
  541. }
  542. }
  543. void CommandBuffer::bindUniformBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  544. {
  545. class Cmd final : public GlCommand
  546. {
  547. public:
  548. BufferPtr m_buff;
  549. PtrSize m_binding;
  550. PtrSize m_offset;
  551. PtrSize m_range;
  552. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  553. : m_buff(buff)
  554. , m_binding(binding)
  555. , m_offset(offset)
  556. , m_range(range)
  557. {
  558. }
  559. Error operator()(GlState&)
  560. {
  561. static_cast<const BufferImpl&>(*m_buff).bind(GL_UNIFORM_BUFFER, m_binding, m_offset, m_range);
  562. return Error::kNone;
  563. }
  564. };
  565. ANKI_ASSERT(buff);
  566. ANKI_ASSERT(range > 0);
  567. ANKI_GL_SELF(CommandBufferImpl);
  568. if(self.m_state.bindUniformBuffer(set, binding, buff, offset, range))
  569. {
  570. binding = binding + MAX_UNIFORM_BUFFER_BINDINGS * set;
  571. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  572. }
  573. }
  574. void CommandBuffer::bindStorageBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  575. {
  576. class Cmd final : public GlCommand
  577. {
  578. public:
  579. BufferPtr m_buff;
  580. PtrSize m_binding;
  581. PtrSize m_offset;
  582. PtrSize m_range;
  583. Cmd(U32 binding, BufferPtr buff, PtrSize offset, PtrSize range)
  584. : m_buff(buff)
  585. , m_binding(binding)
  586. , m_offset(offset)
  587. , m_range(range)
  588. {
  589. }
  590. Error operator()(GlState&)
  591. {
  592. static_cast<const BufferImpl&>(*m_buff).bind(GL_SHADER_STORAGE_BUFFER, m_binding, m_offset, m_range);
  593. return Error::kNone;
  594. }
  595. };
  596. ANKI_ASSERT(buff);
  597. ANKI_ASSERT(range > 0);
  598. ANKI_GL_SELF(CommandBufferImpl);
  599. if(self.m_state.bindStorageBuffer(set, binding, buff, offset, range))
  600. {
  601. binding = binding + MAX_STORAGE_BUFFER_BINDINGS * set;
  602. self.pushBackNewCommand<Cmd>(binding, buff, offset, range);
  603. }
  604. }
  605. void CommandBuffer::bindImage(U32 set, U32 binding, TextureViewPtr img)
  606. {
  607. class Cmd final : public GlCommand
  608. {
  609. public:
  610. TextureViewPtr m_img;
  611. U16 m_unit;
  612. Cmd(U32 unit, TextureViewPtr img)
  613. : m_img(img)
  614. , m_unit(unit)
  615. {
  616. }
  617. Error operator()(GlState&)
  618. {
  619. const TextureViewImpl& view = static_cast<const TextureViewImpl&>(*m_img);
  620. glBindImageTexture(m_unit, view.m_view.m_glName, 0, GL_TRUE, 0, GL_READ_WRITE,
  621. static_cast<const TextureImpl&>(*view.m_tex).m_internalFormat);
  622. return Error::kNone;
  623. }
  624. };
  625. ANKI_ASSERT(img);
  626. ANKI_GL_SELF(CommandBufferImpl);
  627. ANKI_ASSERT(static_cast<const TextureViewImpl&>(*img).m_tex->isSubresourceGoodForImageLoadStore(
  628. static_cast<const TextureViewImpl&>(*img).getSubresource()));
  629. if(self.m_state.bindImage(set, binding, img))
  630. {
  631. binding = binding + set * MAX_IMAGE_BINDINGS;
  632. self.pushBackNewCommand<Cmd>(binding, img);
  633. }
  634. }
  635. void CommandBuffer::bindTextureBuffer(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, Format fmt)
  636. {
  637. class Cmd final : public GlCommand
  638. {
  639. public:
  640. U32 m_set;
  641. U32 m_binding;
  642. BufferPtr m_buff;
  643. PtrSize m_offset;
  644. PtrSize m_range;
  645. GLenum m_fmt;
  646. Cmd(U32 set, U32 binding, BufferPtr buff, PtrSize offset, PtrSize range, GLenum fmt)
  647. : m_set(set)
  648. , m_binding(binding)
  649. , m_buff(buff)
  650. , m_offset(offset)
  651. , m_range(range)
  652. , m_fmt(fmt)
  653. {
  654. }
  655. Error operator()(GlState& state)
  656. {
  657. ANKI_ASSERT(m_offset + m_range <= m_buff->getSize());
  658. const GLuint tex = state.m_texBuffTextures[m_set][m_binding];
  659. glTextureBufferRange(tex, m_fmt, static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_range);
  660. return Error::kNone;
  661. }
  662. };
  663. Bool compressed;
  664. GLenum format;
  665. GLenum internalFormat;
  666. GLenum type;
  667. DepthStencilAspectBit dsAspect;
  668. convertTextureInformation(fmt, compressed, format, internalFormat, type, dsAspect);
  669. (void)compressed;
  670. (void)format;
  671. (void)type;
  672. (void)dsAspect;
  673. ANKI_GL_SELF(CommandBufferImpl);
  674. self.pushBackNewCommand<Cmd>(set, binding, buff, offset, range, internalFormat);
  675. }
  676. void CommandBuffer::bindShaderProgram(ShaderProgramPtr prog)
  677. {
  678. class Cmd final : public GlCommand
  679. {
  680. public:
  681. ShaderProgramPtr m_prog;
  682. Cmd(const ShaderProgramPtr& prog)
  683. : m_prog(prog)
  684. {
  685. }
  686. Error operator()(GlState& state)
  687. {
  688. state.m_crntProg = m_prog;
  689. glUseProgram(static_cast<const ShaderProgramImpl&>(*m_prog).getGlName());
  690. return Error::kNone;
  691. }
  692. };
  693. ANKI_ASSERT(prog);
  694. ANKI_GL_SELF(CommandBufferImpl);
  695. if(self.m_state.bindShaderProgram(prog))
  696. {
  697. self.pushBackNewCommand<Cmd>(prog);
  698. }
  699. else
  700. {
  701. ANKI_TRACE_INC_COUNTER(GL_PROGS_SKIPPED, 1);
  702. }
  703. }
  704. void CommandBuffer::beginRenderPass(FramebufferPtr fb, const Array<TextureUsageBit, kMaxColorRenderTargets>& colorAttachmentUsages,
  705. TextureUsageBit depthStencilAttachmentUsage, U32 minx, U32 miny, U32 width, U32 height)
  706. {
  707. class BindFramebufferCommand final : public GlCommand
  708. {
  709. public:
  710. FramebufferPtr m_fb;
  711. Array<U32, 4> m_renderArea;
  712. BindFramebufferCommand(FramebufferPtr fb, U32 minx, U32 miny, U32 width, U32 height)
  713. : m_fb(fb)
  714. , m_renderArea{{minx, miny, width, height}}
  715. {
  716. }
  717. Error operator()(GlState& state)
  718. {
  719. static_cast<const FramebufferImpl&>(*m_fb).bind(state, m_renderArea[0], m_renderArea[1], m_renderArea[2], m_renderArea[3]);
  720. return Error::kNone;
  721. }
  722. };
  723. ANKI_GL_SELF(CommandBufferImpl);
  724. if(self.m_state.beginRenderPass(fb))
  725. {
  726. self.pushBackNewCommand<BindFramebufferCommand>(fb, minx, miny, width, height);
  727. }
  728. }
  729. void CommandBuffer::endRenderPass()
  730. {
  731. class Command final : public GlCommand
  732. {
  733. public:
  734. const FramebufferImpl* m_fb;
  735. Command(const FramebufferImpl* fb)
  736. : m_fb(fb)
  737. {
  738. ANKI_ASSERT(fb);
  739. }
  740. Error operator()(GlState&)
  741. {
  742. m_fb->endRenderPass();
  743. return Error::kNone;
  744. }
  745. };
  746. ANKI_GL_SELF(CommandBufferImpl);
  747. self.pushBackNewCommand<Command>(self.m_state.m_fb);
  748. self.m_state.endRenderPass();
  749. }
  750. void CommandBuffer::drawElements(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 firstIndex, U32 baseVertex, U32 baseInstance)
  751. {
  752. class Cmd final : public GlCommand
  753. {
  754. public:
  755. GLenum m_topology;
  756. GLenum m_indexType;
  757. DrawElementsIndirectInfo m_info;
  758. Cmd(GLenum topology, GLenum indexType, const DrawElementsIndirectInfo& info)
  759. : m_topology(topology)
  760. , m_indexType(indexType)
  761. , m_info(info)
  762. {
  763. }
  764. Error operator()(GlState&)
  765. {
  766. glDrawElementsInstancedBaseVertexBaseInstance(m_topology, m_info.m_count, m_indexType, numberToPtr<void*>(m_info.m_firstIndex),
  767. m_info.m_instanceCount, m_info.m_baseVertex, m_info.m_baseInstance);
  768. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  769. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  770. return Error::kNone;
  771. }
  772. };
  773. ANKI_GL_SELF(CommandBufferImpl);
  774. self.m_state.checkIndexedDracall();
  775. self.flushDrawcall(*this);
  776. U idxBytes;
  777. if(self.m_state.m_idx.m_indexType == GL_UNSIGNED_SHORT)
  778. {
  779. idxBytes = sizeof(U16);
  780. }
  781. else
  782. {
  783. ANKI_ASSERT(self.m_state.m_idx.m_indexType == GL_UNSIGNED_INT);
  784. idxBytes = sizeof(U32);
  785. }
  786. firstIndex = firstIndex * idxBytes + self.m_state.m_idx.m_offset;
  787. DrawElementsIndirectInfo info(count, instanceCount, firstIndex, baseVertex, baseInstance);
  788. self.pushBackNewCommand<Cmd>(convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, info);
  789. }
  790. void CommandBuffer::drawArrays(PrimitiveTopology topology, U32 count, U32 instanceCount, U32 first, U32 baseInstance)
  791. {
  792. class DrawArraysCommand final : public GlCommand
  793. {
  794. public:
  795. GLenum m_topology;
  796. DrawArraysIndirectInfo m_info;
  797. DrawArraysCommand(GLenum topology, const DrawArraysIndirectInfo& info)
  798. : m_topology(topology)
  799. , m_info(info)
  800. {
  801. }
  802. Error operator()(GlState& state)
  803. {
  804. glDrawArraysInstancedBaseInstance(m_topology, m_info.m_first, m_info.m_count, m_info.m_instanceCount, m_info.m_baseInstance);
  805. ANKI_TRACE_INC_COUNTER(GR_DRAWCALLS, 1);
  806. ANKI_TRACE_INC_COUNTER(GR_VERTICES, m_info.m_instanceCount * m_info.m_count);
  807. return Error::kNone;
  808. }
  809. };
  810. ANKI_GL_SELF(CommandBufferImpl);
  811. self.m_state.checkNonIndexedDrawcall();
  812. self.flushDrawcall(*this);
  813. DrawArraysIndirectInfo info(count, instanceCount, first, baseInstance);
  814. self.pushBackNewCommand<DrawArraysCommand>(convertPrimitiveTopology(topology), info);
  815. }
  816. void CommandBuffer::drawElementsIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset, BufferPtr indirectBuff)
  817. {
  818. class DrawElementsIndirectCommand final : public GlCommand
  819. {
  820. public:
  821. GLenum m_topology;
  822. GLenum m_indexType;
  823. U32 m_drawCount;
  824. PtrSize m_offset;
  825. BufferPtr m_buff;
  826. DrawElementsIndirectCommand(GLenum topology, GLenum indexType, U32 drawCount, PtrSize offset, BufferPtr buff)
  827. : m_topology(topology)
  828. , m_indexType(indexType)
  829. , m_drawCount(drawCount)
  830. , m_offset(offset)
  831. , m_buff(buff)
  832. {
  833. ANKI_ASSERT(drawCount > 0);
  834. ANKI_ASSERT((m_offset % 4) == 0);
  835. }
  836. Error operator()(GlState&)
  837. {
  838. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  839. ANKI_ASSERT(m_offset + sizeof(DrawElementsIndirectInfo) * m_drawCount <= buff.getSize());
  840. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  841. glMultiDrawElementsIndirect(m_topology, m_indexType, numberToPtr<void*>(m_offset), m_drawCount, sizeof(DrawElementsIndirectInfo));
  842. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  843. return Error::kNone;
  844. }
  845. };
  846. ANKI_GL_SELF(CommandBufferImpl);
  847. self.m_state.checkIndexedDracall();
  848. self.flushDrawcall(*this);
  849. self.pushBackNewCommand<DrawElementsIndirectCommand>(convertPrimitiveTopology(topology), self.m_state.m_idx.m_indexType, drawCount, offset,
  850. indirectBuff);
  851. }
  852. void CommandBuffer::drawArraysIndirect(PrimitiveTopology topology, U32 drawCount, PtrSize offset, BufferPtr indirectBuff)
  853. {
  854. class DrawArraysIndirectCommand final : public GlCommand
  855. {
  856. public:
  857. GLenum m_topology;
  858. U32 m_drawCount;
  859. PtrSize m_offset;
  860. BufferPtr m_buff;
  861. DrawArraysIndirectCommand(GLenum topology, U32 drawCount, PtrSize offset, BufferPtr buff)
  862. : m_topology(topology)
  863. , m_drawCount(drawCount)
  864. , m_offset(offset)
  865. , m_buff(buff)
  866. {
  867. ANKI_ASSERT(drawCount > 0);
  868. ANKI_ASSERT((m_offset % 4) == 0);
  869. }
  870. Error operator()(GlState& state)
  871. {
  872. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  873. ANKI_ASSERT(m_offset + sizeof(DrawArraysIndirectInfo) * m_drawCount <= buff.getSize());
  874. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, buff.getGlName());
  875. glMultiDrawArraysIndirect(m_topology, numberToPtr<void*>(m_offset), m_drawCount, sizeof(DrawArraysIndirectInfo));
  876. glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
  877. return Error::kNone;
  878. }
  879. };
  880. ANKI_GL_SELF(CommandBufferImpl);
  881. self.m_state.checkNonIndexedDrawcall();
  882. self.flushDrawcall(*this);
  883. self.pushBackNewCommand<DrawArraysIndirectCommand>(convertPrimitiveTopology(topology), drawCount, offset, indirectBuff);
  884. }
  885. void CommandBuffer::dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
  886. {
  887. class DispatchCommand final : public GlCommand
  888. {
  889. public:
  890. Array<U32, 3> m_size;
  891. DispatchCommand(U32 x, U32 y, U32 z)
  892. : m_size({{x, y, z}})
  893. {
  894. }
  895. Error operator()(GlState&)
  896. {
  897. glDispatchCompute(m_size[0], m_size[1], m_size[2]);
  898. return Error::kNone;
  899. }
  900. };
  901. ANKI_GL_SELF(CommandBufferImpl);
  902. ANKI_ASSERT(!!(self.m_flags & CommandBufferFlag::kComputeWork));
  903. self.m_state.checkDispatch();
  904. self.pushBackNewCommand<DispatchCommand>(groupCountX, groupCountY, groupCountZ);
  905. }
  906. void CommandBuffer::resetOcclusionQuery(OcclusionQueryPtr query)
  907. {
  908. // Nothing for GL
  909. }
  910. void CommandBuffer::beginOcclusionQuery(OcclusionQueryPtr query)
  911. {
  912. class OqBeginCommand final : public GlCommand
  913. {
  914. public:
  915. OcclusionQueryPtr m_handle;
  916. OqBeginCommand(const OcclusionQueryPtr& handle)
  917. : m_handle(handle)
  918. {
  919. }
  920. Error operator()(GlState&)
  921. {
  922. static_cast<OcclusionQueryImpl&>(*m_handle).begin();
  923. return Error::kNone;
  924. }
  925. };
  926. ANKI_GL_SELF(CommandBufferImpl);
  927. self.pushBackNewCommand<OqBeginCommand>(query);
  928. }
  929. void CommandBuffer::endOcclusionQuery(OcclusionQueryPtr query)
  930. {
  931. class OqEndCommand final : public GlCommand
  932. {
  933. public:
  934. OcclusionQueryPtr m_handle;
  935. OqEndCommand(const OcclusionQueryPtr& handle)
  936. : m_handle(handle)
  937. {
  938. }
  939. Error operator()(GlState&)
  940. {
  941. static_cast<OcclusionQueryImpl&>(*m_handle).end();
  942. return Error::kNone;
  943. }
  944. };
  945. ANKI_GL_SELF(CommandBufferImpl);
  946. self.pushBackNewCommand<OqEndCommand>(query);
  947. }
  948. void CommandBuffer::copyBufferToTextureView(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  949. {
  950. class TexSurfUploadCommand final : public GlCommand
  951. {
  952. public:
  953. BufferPtr m_buff;
  954. PtrSize m_offset;
  955. PtrSize m_range;
  956. TextureViewPtr m_texView;
  957. TexSurfUploadCommand(BufferPtr buff, PtrSize offset, PtrSize range, TextureViewPtr texView)
  958. : m_buff(buff)
  959. , m_offset(offset)
  960. , m_range(range)
  961. , m_texView(texView)
  962. {
  963. }
  964. Error operator()(GlState&)
  965. {
  966. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  967. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  968. texImpl.copyFromBuffer(viewImpl.getSubresource(), static_cast<const BufferImpl&>(*m_buff).getGlName(), m_offset, m_range);
  969. return Error::kNone;
  970. }
  971. };
  972. ANKI_ASSERT(texView);
  973. ANKI_ASSERT(buff);
  974. ANKI_ASSERT(range > 0);
  975. ANKI_GL_SELF(CommandBufferImpl);
  976. ANKI_ASSERT(!self.m_state.insideRenderPass());
  977. self.pushBackNewCommand<TexSurfUploadCommand>(buff, offset, range, texView);
  978. }
  979. void CommandBuffer::copyBufferToBuffer(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset, PtrSize range)
  980. {
  981. class Cmd final : public GlCommand
  982. {
  983. public:
  984. BufferPtr m_src;
  985. PtrSize m_srcOffset;
  986. BufferPtr m_dst;
  987. PtrSize m_dstOffset;
  988. PtrSize m_range;
  989. Cmd(BufferPtr src, PtrSize srcOffset, BufferPtr dst, PtrSize dstOffset, PtrSize range)
  990. : m_src(src)
  991. , m_srcOffset(srcOffset)
  992. , m_dst(dst)
  993. , m_dstOffset(dstOffset)
  994. , m_range(range)
  995. {
  996. }
  997. Error operator()(GlState& state)
  998. {
  999. static_cast<BufferImpl&>(*m_dst).write(static_cast<const BufferImpl&>(*m_src).getGlName(), m_srcOffset, m_dstOffset, m_range);
  1000. return Error::kNone;
  1001. }
  1002. };
  1003. ANKI_ASSERT(src);
  1004. ANKI_ASSERT(dst);
  1005. ANKI_ASSERT(range > 0);
  1006. ANKI_GL_SELF(CommandBufferImpl);
  1007. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1008. self.pushBackNewCommand<Cmd>(src, srcOffset, dst, dstOffset, range);
  1009. }
  1010. void CommandBuffer::generateMipmaps2d(TextureViewPtr texView)
  1011. {
  1012. class GenMipsCommand final : public GlCommand
  1013. {
  1014. public:
  1015. TextureViewPtr m_texView;
  1016. GenMipsCommand(const TextureViewPtr& view)
  1017. : m_texView(view)
  1018. {
  1019. }
  1020. Error operator()(GlState&)
  1021. {
  1022. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1023. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1024. texImpl.generateMipmaps2d(viewImpl);
  1025. return Error::kNone;
  1026. }
  1027. };
  1028. ANKI_GL_SELF(CommandBufferImpl);
  1029. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1030. self.pushBackNewCommand<GenMipsCommand>(texView);
  1031. }
  1032. void CommandBuffer::generateMipmaps3d(TextureViewPtr tex)
  1033. {
  1034. ANKI_ASSERT(!!"TODO");
  1035. }
  1036. void CommandBuffer::pushSecondLevelCommandBuffer(CommandBufferPtr cmdb)
  1037. {
  1038. class ExecCmdbCommand final : public GlCommand
  1039. {
  1040. public:
  1041. CommandBufferPtr m_cmdb;
  1042. ExecCmdbCommand(const CommandBufferPtr& cmdb)
  1043. : m_cmdb(cmdb)
  1044. {
  1045. }
  1046. Error operator()(GlState&)
  1047. {
  1048. ANKI_TRACE_SCOPED_EVENT(GL_2ND_LEVEL_CMD_BUFFER);
  1049. return static_cast<CommandBufferImpl&>(*m_cmdb).executeAllCommands();
  1050. }
  1051. };
  1052. ANKI_GL_SELF(CommandBufferImpl);
  1053. self.m_state.m_lastSecondLevelCmdb = static_cast<CommandBufferImpl*>(cmdb.get());
  1054. self.pushBackNewCommand<ExecCmdbCommand>(cmdb);
  1055. }
  1056. Bool CommandBuffer::isEmpty() const
  1057. {
  1058. ANKI_GL_SELF_CONST(CommandBufferImpl);
  1059. return self.isEmpty();
  1060. }
  1061. void CommandBuffer::blitTextureViews(TextureViewPtr srcView, TextureViewPtr destView)
  1062. {
  1063. ANKI_ASSERT(!"TODO");
  1064. }
  1065. void CommandBuffer::setBufferBarrier(BufferPtr buff, BufferUsageBit prevUsage, BufferUsageBit nextUsage, PtrSize offset, PtrSize size)
  1066. {
  1067. class SetBufferMemBarrierCommand final : public GlCommand
  1068. {
  1069. public:
  1070. GLenum m_barrier;
  1071. SetBufferMemBarrierCommand(GLenum barrier)
  1072. : m_barrier(barrier)
  1073. {
  1074. }
  1075. Error operator()(GlState&)
  1076. {
  1077. glMemoryBarrier(m_barrier);
  1078. return Error::kNone;
  1079. }
  1080. };
  1081. GLenum d = GL_NONE;
  1082. BufferUsageBit all = prevUsage | nextUsage;
  1083. if(!!(all & BufferUsageBit::UNIFORM_ALL))
  1084. {
  1085. d |= GL_UNIFORM_BARRIER_BIT;
  1086. }
  1087. if(!!(all & BufferUsageBit::STORAGE_ALL))
  1088. {
  1089. d |= GL_SHADER_STORAGE_BARRIER_BIT;
  1090. }
  1091. if(!!(all & BufferUsageBit::kIndex))
  1092. {
  1093. d |= GL_ELEMENT_ARRAY_BARRIER_BIT;
  1094. }
  1095. if(!!(all & BufferUsageBit::kVertex))
  1096. {
  1097. d |= GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT;
  1098. }
  1099. if(!!(all & BufferUsageBit::INDIRECT_ALL))
  1100. {
  1101. d |= GL_COMMAND_BARRIER_BIT;
  1102. }
  1103. if(!!(all & (BufferUsageBit::FILL | BufferUsageBit::BUFFER_UPLOAD_SOURCE | BufferUsageBit::BUFFER_UPLOAD_DESTINATION)))
  1104. {
  1105. d |= GL_BUFFER_UPDATE_BARRIER_BIT;
  1106. }
  1107. if(!!(all & BufferUsageBit::QUERY_RESULT))
  1108. {
  1109. d |= GL_QUERY_BUFFER_BARRIER_BIT;
  1110. }
  1111. ANKI_ASSERT(d);
  1112. ANKI_GL_SELF(CommandBufferImpl);
  1113. self.pushBackNewCommand<SetBufferMemBarrierCommand>(d);
  1114. }
  1115. void CommandBuffer::setTextureSurfaceBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const TextureSurfaceInfo& surf)
  1116. {
  1117. TextureSubresourceInfo subresource;
  1118. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1119. }
  1120. void CommandBuffer::setTextureVolumeBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const TextureVolumeInfo& vol)
  1121. {
  1122. TextureSubresourceInfo subresource;
  1123. setTextureBarrier(tex, prevUsage, nextUsage, subresource);
  1124. }
  1125. void CommandBuffer::setTextureBarrier(TexturePtr tex, TextureUsageBit prevUsage, TextureUsageBit nextUsage, const TextureSubresourceInfo& subresource)
  1126. {
  1127. class Cmd final : public GlCommand
  1128. {
  1129. public:
  1130. GLenum m_barrier;
  1131. Cmd(GLenum barrier)
  1132. : m_barrier(barrier)
  1133. {
  1134. }
  1135. Error operator()(GlState&)
  1136. {
  1137. glMemoryBarrier(m_barrier);
  1138. return Error::kNone;
  1139. }
  1140. };
  1141. const TextureUsageBit usage = nextUsage;
  1142. GLenum e = 0;
  1143. if(!!(usage & TextureUsageBit::SAMPLED_ALL))
  1144. {
  1145. e |= GL_TEXTURE_FETCH_BARRIER_BIT;
  1146. }
  1147. if(!!(usage & TextureUsageBit::IMAGE_ALL))
  1148. {
  1149. e |= GL_SHADER_IMAGE_ACCESS_BARRIER_BIT;
  1150. }
  1151. if(!!(usage & TextureUsageBit::kTransferDestination))
  1152. {
  1153. e |= GL_TEXTURE_UPDATE_BARRIER_BIT;
  1154. }
  1155. if(!!(usage & TextureUsageBit::FRAMEBUFFER_ATTACHMENT_READ_WRITE))
  1156. {
  1157. e |= GL_FRAMEBUFFER_BARRIER_BIT;
  1158. }
  1159. if(!!(usage & TextureUsageBit::CLEAR))
  1160. {
  1161. // No idea
  1162. }
  1163. if(!!(usage & TextureUsageBit::kGenerateMipmaps))
  1164. {
  1165. // No idea
  1166. }
  1167. if(e != 0)
  1168. {
  1169. ANKI_GL_SELF(CommandBufferImpl);
  1170. self.pushBackNewCommand<Cmd>(e);
  1171. }
  1172. }
  1173. void CommandBuffer::clearTextureView(TextureViewPtr texView, const ClearValue& clearValue)
  1174. {
  1175. class ClearTextCommand final : public GlCommand
  1176. {
  1177. public:
  1178. TextureViewPtr m_texView;
  1179. ClearValue m_val;
  1180. ClearTextCommand(TextureViewPtr texView, const ClearValue& val)
  1181. : m_texView(texView)
  1182. , m_val(val)
  1183. {
  1184. }
  1185. Error operator()(GlState&)
  1186. {
  1187. const TextureViewImpl& viewImpl = static_cast<TextureViewImpl&>(*m_texView);
  1188. const TextureImpl& texImpl = static_cast<TextureImpl&>(*viewImpl.m_tex);
  1189. texImpl.clear(viewImpl.getSubresource(), m_val);
  1190. return Error::kNone;
  1191. }
  1192. };
  1193. ANKI_GL_SELF(CommandBufferImpl);
  1194. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1195. self.pushBackNewCommand<ClearTextCommand>(texView, clearValue);
  1196. }
  1197. void CommandBuffer::fillBuffer(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1198. {
  1199. class FillBufferCommand final : public GlCommand
  1200. {
  1201. public:
  1202. BufferPtr m_buff;
  1203. PtrSize m_offset;
  1204. PtrSize m_size;
  1205. U32 m_value;
  1206. FillBufferCommand(BufferPtr buff, PtrSize offset, PtrSize size, U32 value)
  1207. : m_buff(buff)
  1208. , m_offset(offset)
  1209. , m_size(size)
  1210. , m_value(value)
  1211. {
  1212. }
  1213. Error operator()(GlState&)
  1214. {
  1215. static_cast<BufferImpl&>(*m_buff).fill(m_offset, m_size, m_value);
  1216. return Error::kNone;
  1217. }
  1218. };
  1219. ANKI_GL_SELF(CommandBufferImpl);
  1220. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1221. self.pushBackNewCommand<FillBufferCommand>(buff, offset, size, value);
  1222. }
  1223. void CommandBuffer::writeOcclusionQueryResultToBuffer(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1224. {
  1225. class WriteOcclResultToBuff final : public GlCommand
  1226. {
  1227. public:
  1228. OcclusionQueryPtr m_query;
  1229. PtrSize m_offset;
  1230. BufferPtr m_buff;
  1231. WriteOcclResultToBuff(OcclusionQueryPtr query, PtrSize offset, BufferPtr buff)
  1232. : m_query(query)
  1233. , m_offset(offset)
  1234. , m_buff(buff)
  1235. {
  1236. ANKI_ASSERT((m_offset % 4) == 0);
  1237. }
  1238. Error operator()(GlState&)
  1239. {
  1240. const BufferImpl& buff = static_cast<const BufferImpl&>(*m_buff);
  1241. ANKI_ASSERT(m_offset + 4 <= buff.getSize());
  1242. glBindBuffer(GL_QUERY_BUFFER, buff.getGlName());
  1243. glGetQueryObjectuiv(static_cast<const OcclusionQueryImpl&>(*m_query).getGlName(), GL_QUERY_RESULT, numberToPtr<GLuint*>(m_offset));
  1244. glBindBuffer(GL_QUERY_BUFFER, 0);
  1245. return Error::kNone;
  1246. }
  1247. };
  1248. ANKI_GL_SELF(CommandBufferImpl);
  1249. ANKI_ASSERT(!self.m_state.insideRenderPass());
  1250. self.pushBackNewCommand<WriteOcclResultToBuff>(query, offset, buff);
  1251. }
  1252. void CommandBuffer::setPushConstants(const void* data, U32 dataSize)
  1253. {
  1254. class PushConstants final : public GlCommand
  1255. {
  1256. public:
  1257. DynamicArrayRaii<Vec4> m_data;
  1258. PushConstants(const void* data, U32 dataSize, const CommandBufferAllocator<F32>& alloc)
  1259. : m_data(alloc)
  1260. {
  1261. m_data.create(dataSize / sizeof(Vec4));
  1262. memcpy(&m_data[0], data, dataSize);
  1263. }
  1264. Error operator()(GlState& state)
  1265. {
  1266. const ShaderProgramImplReflection& refl = static_cast<ShaderProgramImpl&>(*state.m_crntProg).getReflection();
  1267. ANKI_ASSERT(refl.m_uniformDataSize == m_data.getSizeInBytes());
  1268. const Bool transpose = true;
  1269. for(const ShaderProgramImplReflection::Uniform& uni : refl.m_uniforms)
  1270. {
  1271. const U8* data = reinterpret_cast<const U8*>(&m_data[0]) + uni.m_pushConstantOffset;
  1272. const U count = uni.m_arrSize;
  1273. const GLint loc = uni.m_location;
  1274. switch(uni.m_type)
  1275. {
  1276. case ShaderVariableDataType::VEC4:
  1277. glUniform4fv(loc, count, reinterpret_cast<const GLfloat*>(data));
  1278. break;
  1279. case ShaderVariableDataType::IVEC4:
  1280. glUniform4iv(loc, count, reinterpret_cast<const GLint*>(data));
  1281. break;
  1282. case ShaderVariableDataType::UVEC4:
  1283. glUniform4uiv(loc, count, reinterpret_cast<const GLuint*>(data));
  1284. break;
  1285. case ShaderVariableDataType::MAT4:
  1286. glUniformMatrix4fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(data));
  1287. break;
  1288. case ShaderVariableDataType::MAT3:
  1289. {
  1290. // Remove the padding
  1291. ANKI_ASSERT(count == 1 && "TODO");
  1292. const Mat3x4* m34 = reinterpret_cast<const Mat3x4*>(data);
  1293. Mat3 m3(m34->getRotationPart());
  1294. glUniformMatrix3fv(loc, count, transpose, reinterpret_cast<const GLfloat*>(&m3));
  1295. break;
  1296. }
  1297. default:
  1298. ANKI_ASSERT(!"TODO");
  1299. }
  1300. }
  1301. return Error::kNone;
  1302. }
  1303. };
  1304. ANKI_ASSERT(data);
  1305. ANKI_ASSERT(dataSize);
  1306. ANKI_ASSERT(dataSize % 16 == 0);
  1307. ANKI_GL_SELF(CommandBufferImpl);
  1308. self.pushBackNewCommand<PushConstants>(data, dataSize, self.m_alloc);
  1309. }
  1310. void CommandBuffer::setRasterizationOrder(RasterizationOrder order)
  1311. {
  1312. // Nothing for GL
  1313. }
  1314. } // end namespace anki