RenderGraph.inl.h 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336
  1. // Copyright (C) 2009-2021, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/Gr/RenderGraph.h>
  6. namespace anki {
  7. inline void RenderPassWorkContext::bindAccelerationStructure(U32 set, U32 binding, AccelerationStructureHandle handle)
  8. {
  9. m_commandBuffer->bindAccelerationStructure(set, binding, m_rgraph->getAs(handle));
  10. }
  11. inline void RenderPassWorkContext::getBufferState(BufferHandle handle, BufferPtr& buff) const
  12. {
  13. buff = m_rgraph->getBuffer(handle);
  14. }
  15. inline void RenderPassWorkContext::getRenderTargetState(RenderTargetHandle handle,
  16. const TextureSubresourceInfo& subresource,
  17. TexturePtr& tex) const
  18. {
  19. TextureUsageBit usage;
  20. m_rgraph->getCrntUsage(handle, m_batchIdx, subresource, usage);
  21. tex = m_rgraph->getTexture(handle);
  22. }
  23. inline TexturePtr RenderPassWorkContext::getTexture(RenderTargetHandle handle) const
  24. {
  25. return m_rgraph->getTexture(handle);
  26. }
  27. inline void RenderPassDescriptionBase::fixSubresource(RenderPassDependency& dep) const
  28. {
  29. ANKI_ASSERT(dep.m_type == RenderPassDependency::Type::TEXTURE);
  30. TextureSubresourceInfo& subresource = dep.m_texture.m_subresource;
  31. const Bool wholeTexture = subresource.m_mipmapCount == MAX_U32;
  32. const RenderGraphDescription::RT& rt = m_descr->m_renderTargets[dep.m_texture.m_handle.m_idx];
  33. if(wholeTexture)
  34. {
  35. ANKI_ASSERT(subresource.m_firstFace == 0);
  36. ANKI_ASSERT(subresource.m_firstMipmap == 0);
  37. ANKI_ASSERT(subresource.m_firstLayer == 0);
  38. if(rt.m_importedTex)
  39. {
  40. subresource.m_faceCount = textureTypeIsCube(rt.m_importedTex->getTextureType()) ? 6 : 1;
  41. subresource.m_mipmapCount = rt.m_importedTex->getMipmapCount();
  42. subresource.m_layerCount = rt.m_importedTex->getLayerCount();
  43. }
  44. else
  45. {
  46. subresource.m_faceCount = textureTypeIsCube(rt.m_initInfo.m_type) ? 6 : 1;
  47. subresource.m_mipmapCount = rt.m_initInfo.m_mipmapCount;
  48. subresource.m_layerCount = rt.m_initInfo.m_layerCount;
  49. }
  50. }
  51. ANKI_ASSERT(dep.m_texture.m_subresource.m_firstMipmap + dep.m_texture.m_subresource.m_mipmapCount
  52. <= ((rt.m_importedTex) ? rt.m_importedTex->getMipmapCount() : rt.m_initInfo.m_mipmapCount));
  53. }
  54. inline void RenderPassDescriptionBase::validateDep(const RenderPassDependency& dep)
  55. {
  56. // Validate dep
  57. if(dep.m_type == RenderPassDependency::Type::TEXTURE)
  58. {
  59. const TextureUsageBit usage = dep.m_texture.m_usage;
  60. (void)usage;
  61. if(m_type == Type::GRAPHICS)
  62. {
  63. ANKI_ASSERT(!(usage & TextureUsageBit::ALL_COMPUTE));
  64. }
  65. else
  66. {
  67. ANKI_ASSERT(!(usage & TextureUsageBit::ALL_GRAPHICS));
  68. }
  69. ANKI_ASSERT(!!(usage & TextureUsageBit::ALL_READ) || !!(usage & TextureUsageBit::ALL_WRITE));
  70. }
  71. else if(dep.m_type == RenderPassDependency::Type::BUFFER)
  72. {
  73. const BufferUsageBit usage = dep.m_buffer.m_usage;
  74. (void)usage;
  75. if(m_type == Type::GRAPHICS)
  76. {
  77. ANKI_ASSERT(!(usage & BufferUsageBit::ALL_COMPUTE));
  78. }
  79. else
  80. {
  81. ANKI_ASSERT(!(usage & BufferUsageBit::ALL_GRAPHICS));
  82. }
  83. ANKI_ASSERT(!!(usage & BufferUsageBit::ALL_READ) || !!(usage & BufferUsageBit::ALL_WRITE));
  84. }
  85. else
  86. {
  87. ANKI_ASSERT(dep.m_type == RenderPassDependency::Type::ACCELERATION_STRUCTURE);
  88. if(m_type == Type::GRAPHICS)
  89. {
  90. ANKI_ASSERT(!(dep.m_as.m_usage & ~AccelerationStructureUsageBit::ALL_GRAPHICS));
  91. }
  92. else
  93. {
  94. ANKI_ASSERT(!(dep.m_as.m_usage & AccelerationStructureUsageBit::ALL_GRAPHICS));
  95. }
  96. }
  97. }
  98. inline void RenderPassDescriptionBase::newDependency(const RenderPassDependency& dep)
  99. {
  100. validateDep(dep);
  101. if(dep.m_type == RenderPassDependency::Type::TEXTURE)
  102. {
  103. m_rtDeps.emplaceBack(m_alloc, dep);
  104. fixSubresource(m_rtDeps.getBack());
  105. if(!!(dep.m_texture.m_usage & TextureUsageBit::ALL_READ))
  106. {
  107. m_readRtMask.set(dep.m_texture.m_handle.m_idx);
  108. }
  109. if(!!(dep.m_texture.m_usage & TextureUsageBit::ALL_WRITE))
  110. {
  111. m_writeRtMask.set(dep.m_texture.m_handle.m_idx);
  112. }
  113. // Try to derive the usage by that dep
  114. m_descr->m_renderTargets[dep.m_texture.m_handle.m_idx].m_usageDerivedByDeps |= dep.m_texture.m_usage;
  115. }
  116. else if(dep.m_type == RenderPassDependency::Type::BUFFER)
  117. {
  118. m_buffDeps.emplaceBack(m_alloc, dep);
  119. if(!!(dep.m_buffer.m_usage & BufferUsageBit::ALL_READ))
  120. {
  121. m_readBuffMask.set(dep.m_buffer.m_handle.m_idx);
  122. }
  123. if(!!(dep.m_buffer.m_usage & BufferUsageBit::ALL_WRITE))
  124. {
  125. m_writeBuffMask.set(dep.m_buffer.m_handle.m_idx);
  126. }
  127. }
  128. else
  129. {
  130. ANKI_ASSERT(dep.m_type == RenderPassDependency::Type::ACCELERATION_STRUCTURE);
  131. m_asDeps.emplaceBack(m_alloc, dep);
  132. if(!!(dep.m_as.m_usage & AccelerationStructureUsageBit::ALL_READ))
  133. {
  134. m_readAsMask.set(dep.m_as.m_handle.m_idx);
  135. }
  136. if(!!(dep.m_as.m_usage & AccelerationStructureUsageBit::ALL_WRITE))
  137. {
  138. m_writeAsMask.set(dep.m_as.m_handle.m_idx);
  139. }
  140. }
  141. }
  142. inline void GraphicsRenderPassDescription::setFramebufferInfo(
  143. const FramebufferDescription& fbInfo, std::initializer_list<RenderTargetHandle> colorRenderTargetHandles,
  144. RenderTargetHandle depthStencilRenderTargetHandle, U32 minx, U32 miny, U32 maxx, U32 maxy)
  145. {
  146. Array<RenderTargetHandle, MAX_COLOR_ATTACHMENTS> rts;
  147. U32 count = 0;
  148. for(const RenderTargetHandle& h : colorRenderTargetHandles)
  149. {
  150. rts[count++] = h;
  151. }
  152. setFramebufferInfo(fbInfo, ConstWeakArray<RenderTargetHandle>(&rts[0], count), depthStencilRenderTargetHandle, minx,
  153. miny, maxx, maxy);
  154. }
  155. inline void GraphicsRenderPassDescription::setFramebufferInfo(
  156. const FramebufferDescription& fbInfo, ConstWeakArray<RenderTargetHandle> colorRenderTargetHandles,
  157. RenderTargetHandle depthStencilRenderTargetHandle, U32 minx, U32 miny, U32 maxx, U32 maxy)
  158. {
  159. #if ANKI_ENABLE_ASSERTIONS
  160. ANKI_ASSERT(fbInfo.isBacked() && "Forgot call GraphicsRenderPassFramebufferInfo::bake");
  161. for(U32 i = 0; i < colorRenderTargetHandles.getSize(); ++i)
  162. {
  163. if(i >= fbInfo.m_colorAttachmentCount)
  164. {
  165. ANKI_ASSERT(!colorRenderTargetHandles[i].isValid());
  166. }
  167. else
  168. {
  169. ANKI_ASSERT(colorRenderTargetHandles[i].isValid());
  170. }
  171. }
  172. if(!fbInfo.m_depthStencilAttachment.m_aspect)
  173. {
  174. ANKI_ASSERT(!depthStencilRenderTargetHandle.isValid());
  175. }
  176. else
  177. {
  178. ANKI_ASSERT(depthStencilRenderTargetHandle.isValid());
  179. }
  180. #endif
  181. m_fbDescr = fbInfo;
  182. memcpy(m_rtHandles.getBegin(), colorRenderTargetHandles.getBegin(), colorRenderTargetHandles.getSizeInBytes());
  183. m_rtHandles[MAX_COLOR_ATTACHMENTS] = depthStencilRenderTargetHandle;
  184. m_fbRenderArea = {minx, miny, maxx, maxy};
  185. }
  186. inline RenderGraphDescription::~RenderGraphDescription()
  187. {
  188. for(RenderPassDescriptionBase* pass : m_passes)
  189. {
  190. m_alloc.deleteInstance(pass);
  191. }
  192. m_passes.destroy(m_alloc);
  193. m_renderTargets.destroy(m_alloc);
  194. m_buffers.destroy(m_alloc);
  195. m_as.destroy(m_alloc);
  196. }
  197. inline GraphicsRenderPassDescription& RenderGraphDescription::newGraphicsRenderPass(CString name)
  198. {
  199. GraphicsRenderPassDescription* pass = m_alloc.newInstance<GraphicsRenderPassDescription>(this);
  200. pass->m_alloc = m_alloc;
  201. pass->setName(name);
  202. m_passes.emplaceBack(m_alloc, pass);
  203. return *pass;
  204. }
  205. inline ComputeRenderPassDescription& RenderGraphDescription::newComputeRenderPass(CString name)
  206. {
  207. ComputeRenderPassDescription* pass = m_alloc.newInstance<ComputeRenderPassDescription>(this);
  208. pass->m_alloc = m_alloc;
  209. pass->setName(name);
  210. m_passes.emplaceBack(m_alloc, pass);
  211. return *pass;
  212. }
  213. inline RenderTargetHandle RenderGraphDescription::importRenderTarget(TexturePtr tex, TextureUsageBit usage)
  214. {
  215. for(const RT& rt : m_renderTargets)
  216. {
  217. (void)rt;
  218. ANKI_ASSERT(rt.m_importedTex != tex && "Already imported");
  219. }
  220. RT& rt = *m_renderTargets.emplaceBack(m_alloc);
  221. rt.m_importedTex = tex;
  222. rt.m_importedLastKnownUsage = usage;
  223. rt.m_usageDerivedByDeps = TextureUsageBit::NONE;
  224. rt.setName(tex->getName());
  225. RenderTargetHandle out;
  226. out.m_idx = m_renderTargets.getSize() - 1;
  227. return out;
  228. }
  229. inline RenderTargetHandle RenderGraphDescription::importRenderTarget(TexturePtr tex)
  230. {
  231. RenderTargetHandle out = importRenderTarget(tex, TextureUsageBit::NONE);
  232. m_renderTargets.getBack().m_importedAndUndefinedUsage = true;
  233. return out;
  234. }
  235. inline RenderTargetHandle RenderGraphDescription::newRenderTarget(const RenderTargetDescription& initInf)
  236. {
  237. ANKI_ASSERT(initInf.m_hash && "Forgot to call RenderTargetDescription::bake");
  238. ANKI_ASSERT(initInf.m_usage == TextureUsageBit::NONE && "Don't need to supply the usage. Render grap will find it");
  239. RT& rt = *m_renderTargets.emplaceBack(m_alloc);
  240. rt.m_initInfo = initInf;
  241. rt.m_hash = initInf.m_hash;
  242. rt.m_importedLastKnownUsage = TextureUsageBit::NONE;
  243. rt.m_usageDerivedByDeps = TextureUsageBit::NONE;
  244. rt.setName(initInf.getName());
  245. RenderTargetHandle out;
  246. out.m_idx = m_renderTargets.getSize() - 1;
  247. return out;
  248. }
  249. inline BufferHandle RenderGraphDescription::importBuffer(BufferPtr buff, BufferUsageBit usage, PtrSize offset,
  250. PtrSize range)
  251. {
  252. // Checks
  253. if(range == MAX_PTR_SIZE)
  254. {
  255. ANKI_ASSERT(offset < buff->getSize());
  256. }
  257. else
  258. {
  259. ANKI_ASSERT((offset + range) <= buff->getSize());
  260. }
  261. for(const Buffer& bb : m_buffers)
  262. {
  263. (void)bb;
  264. ANKI_ASSERT((bb.m_importedBuff != buff || !bufferRangeOverlaps(bb.m_offset, bb.m_range, offset, range))
  265. && "Range already imported");
  266. }
  267. Buffer& b = *m_buffers.emplaceBack(m_alloc);
  268. b.setName(buff->getName());
  269. b.m_usage = usage;
  270. b.m_importedBuff = buff;
  271. b.m_offset = offset;
  272. b.m_range = range;
  273. BufferHandle out;
  274. out.m_idx = m_buffers.getSize() - 1;
  275. return out;
  276. }
  277. inline AccelerationStructureHandle
  278. RenderGraphDescription::importAccelerationStructure(AccelerationStructurePtr as, AccelerationStructureUsageBit usage)
  279. {
  280. for(const AS& a : m_as)
  281. {
  282. (void)a;
  283. ANKI_ASSERT(a.m_importedAs != as && "Already imported");
  284. }
  285. AS& a = *m_as.emplaceBack(m_alloc);
  286. a.setName(as->getName());
  287. a.m_importedAs = as;
  288. a.m_usage = usage;
  289. AccelerationStructureHandle handle;
  290. handle.m_idx = m_as.getSize() - 1;
  291. return handle;
  292. }
  293. } // end namespace anki