Ir.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709
  1. // Copyright (C) 2009-2016, Panagiotis Christopoulos Charitos.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <anki/renderer/Ir.h>
  6. #include <anki/renderer/Is.h>
  7. #include <anki/renderer/Pps.h>
  8. #include <anki/core/Config.h>
  9. #include <anki/scene/SceneNode.h>
  10. #include <anki/scene/Visibility.h>
  11. #include <anki/scene/FrustumComponent.h>
  12. #include <anki/scene/ReflectionProbeComponent.h>
  13. #include <anki/core/Trace.h>
  14. namespace anki
  15. {
  16. //==============================================================================
  17. // Misc =
  18. //==============================================================================
  19. struct IrShaderReflectionProbe
  20. {
  21. Vec3 m_pos;
  22. F32 m_radiusSq;
  23. F32 m_cubemapIndex;
  24. U32 _m_pading[3];
  25. };
  26. struct IrShaderCluster
  27. {
  28. U32 m_indexOffset;
  29. U32 m_probeCount;
  30. };
  31. static const U MAX_PROBES_PER_CLUSTER = 16;
  32. /// Store the probe radius for sorting the indices.
  33. class ClusterDataIndex
  34. {
  35. public:
  36. U32 m_index = 0;
  37. F32 m_probeRadius = 0.0;
  38. };
  39. class IrClusterData
  40. {
  41. public:
  42. Atomic<U32> m_probeCount = {0};
  43. Array<ClusterDataIndex, MAX_PROBES_PER_CLUSTER> m_probeIds;
  44. Bool operator==(const IrClusterData& b) const
  45. {
  46. const U probeCount = m_probeCount.load() % MAX_PROBES_PER_CLUSTER;
  47. const U bProbeCount = b.m_probeCount.load() % MAX_PROBES_PER_CLUSTER;
  48. if(probeCount != bProbeCount)
  49. {
  50. return false;
  51. }
  52. if(probeCount > 0)
  53. {
  54. if(memcmp(&m_probeIds[0],
  55. &b.m_probeIds[0],
  56. sizeof(m_probeIds[0]) * probeCount)
  57. != 0)
  58. {
  59. return false;
  60. }
  61. }
  62. return true;
  63. }
  64. /// Sort the indices from the smallest probe to the biggest.
  65. void sort()
  66. {
  67. const U probeCount = m_probeCount.load() % MAX_PROBES_PER_CLUSTER;
  68. if(probeCount > 1)
  69. {
  70. std::sort(m_probeIds.getBegin(),
  71. m_probeIds.getBegin() + probeCount,
  72. [](const ClusterDataIndex& a, const ClusterDataIndex& b) {
  73. ANKI_ASSERT(a.m_probeRadius > 0.0 && b.m_probeRadius > 0.0);
  74. return a.m_probeRadius < b.m_probeRadius;
  75. });
  76. }
  77. }
  78. };
  79. /// Context for the whole run.
  80. class IrRunContext
  81. {
  82. public:
  83. Ir* m_ir ANKI_DBG_NULLIFY_PTR;
  84. DArray<IrClusterData> m_clusterData;
  85. SArray<IrShaderCluster> m_clusters;
  86. SArray<U32> m_indices;
  87. Atomic<U32> m_indexCount = {0};
  88. VisibilityTestResults* m_visRez ANKI_DBG_NULLIFY_PTR;
  89. /// An atomic that will help allocating the index buffer
  90. Atomic<U32> m_probeIndicesAllocate = {0};
  91. /// Same as m_probeIndicesAllocate
  92. Atomic<U32> m_clustersAllocate = {0};
  93. StackAllocator<U8> m_alloc;
  94. ~IrRunContext()
  95. {
  96. // Deallocate. Watch the order
  97. m_clusterData.destroy(m_alloc);
  98. }
  99. };
  100. /// Thread specific context.
  101. class IrTaskContext
  102. {
  103. public:
  104. ClustererTestResult m_clustererTestResult;
  105. SceneNode* m_node ANKI_DBG_NULLIFY_PTR;
  106. };
  107. /// Write the lights to the GPU buffers.
  108. class IrTask : public ThreadPool::Task
  109. {
  110. public:
  111. IrRunContext* m_ctx ANKI_DBG_NULLIFY_PTR;
  112. Error operator()(U32 threadId, PtrSize threadsCount) override
  113. {
  114. m_ctx->m_ir->binProbes(threadId, threadsCount, *m_ctx);
  115. return ErrorCode::NONE;
  116. }
  117. };
  118. //==============================================================================
  119. // Ir =
  120. //==============================================================================
  121. //==============================================================================
  122. Ir::Ir(Renderer* r)
  123. : RenderingPass(r)
  124. , m_barrier(r->getThreadPool().getThreadsCount())
  125. {
  126. }
  127. //==============================================================================
  128. Ir::~Ir()
  129. {
  130. m_cacheEntries.destroy(getAllocator());
  131. }
  132. //==============================================================================
  133. Error Ir::init(const ConfigSet& config)
  134. {
  135. ANKI_LOGI("Initializing IR (Image Reflections)");
  136. m_fbSize = config.getNumber("ir.rendererSize");
  137. if(m_fbSize < Renderer::TILE_SIZE)
  138. {
  139. ANKI_LOGE("Too low ir.rendererSize");
  140. return ErrorCode::USER_DATA;
  141. }
  142. m_cubemapArrSize = config.getNumber("ir.cubemapTextureArraySize");
  143. if(m_cubemapArrSize < 2)
  144. {
  145. ANKI_LOGE("Too low ir.cubemapTextureArraySize");
  146. return ErrorCode::USER_DATA;
  147. }
  148. m_cacheEntries.create(getAllocator(), m_cubemapArrSize);
  149. // Init the renderer
  150. Config nestedRConfig;
  151. nestedRConfig.set("dbg.enabled", false);
  152. nestedRConfig.set("is.sm.bilinearEnabled", true);
  153. nestedRConfig.set("is.groundLightEnabled", false);
  154. nestedRConfig.set("is.sm.enabled", false);
  155. nestedRConfig.set("is.sm.maxLights", 8);
  156. nestedRConfig.set("is.sm.poissonEnabled", false);
  157. nestedRConfig.set("is.sm.resolution", 16);
  158. nestedRConfig.set("lf.maxFlares", 8);
  159. nestedRConfig.set("pps.enabled", false);
  160. nestedRConfig.set("renderingQuality", 1.0);
  161. nestedRConfig.set("clusterSizeZ", 16);
  162. nestedRConfig.set("width", m_fbSize);
  163. nestedRConfig.set("height", m_fbSize);
  164. nestedRConfig.set("lodDistance", 10.0);
  165. nestedRConfig.set("samples", 1);
  166. nestedRConfig.set("ir.enabled", false); // Very important to disable that
  167. nestedRConfig.set("sslr.enabled", false); // And that
  168. ANKI_CHECK(m_nestedR.init(&m_r->getThreadPool(),
  169. &m_r->getResourceManager(),
  170. &m_r->getGrManager(),
  171. m_r->getAllocator(),
  172. m_r->getFrameAllocator(),
  173. nestedRConfig,
  174. m_r->getGlobalTimestampPtr()));
  175. // Init the textures
  176. TextureInitInfo texinit;
  177. texinit.m_width = m_fbSize;
  178. texinit.m_height = m_fbSize;
  179. texinit.m_depth = m_cubemapArrSize;
  180. texinit.m_type = TextureType::CUBE_ARRAY;
  181. texinit.m_format = Is::RT_PIXEL_FORMAT;
  182. texinit.m_mipmapsCount = MAX_U8;
  183. texinit.m_samples = 1;
  184. texinit.m_sampling.m_minMagFilter = SamplingFilter::LINEAR;
  185. texinit.m_sampling.m_mipmapFilter = SamplingFilter::LINEAR;
  186. m_envCubemapArr = getGrManager().newInstance<Texture>(texinit);
  187. texinit.m_width = IRRADIANCE_SIZE;
  188. texinit.m_height = IRRADIANCE_SIZE;
  189. m_irradianceCubemapArr = getGrManager().newInstance<Texture>(texinit);
  190. m_cubemapArrMipCount = computeMaxMipmapCount(m_fbSize, m_fbSize);
  191. // Create irradiance stuff
  192. ANKI_CHECK(initIrradiance());
  193. // Load split sum integration LUT
  194. ANKI_CHECK(getResourceManager().loadResource(
  195. "engine_data/SplitSumIntegration.ankitex", m_integrationLut));
  196. SamplerInitInfo sinit;
  197. sinit.m_minMagFilter = SamplingFilter::LINEAR;
  198. sinit.m_mipmapFilter = SamplingFilter::BASE;
  199. sinit.m_minLod = 0.0;
  200. sinit.m_maxLod = 1.0;
  201. sinit.m_repeat = false;
  202. m_integrationLutSampler = getGrManager().newInstance<Sampler>(sinit);
  203. // Init the resource group
  204. ResourceGroupInitInfo rcInit;
  205. rcInit.m_textures[0].m_texture = m_envCubemapArr;
  206. rcInit.m_textures[1].m_texture = m_irradianceCubemapArr;
  207. rcInit.m_textures[2].m_texture = m_integrationLut->getGrTexture();
  208. rcInit.m_textures[2].m_sampler = m_integrationLutSampler;
  209. rcInit.m_storageBuffers[0].m_dynamic = true;
  210. rcInit.m_storageBuffers[1].m_dynamic = true;
  211. rcInit.m_storageBuffers[2].m_dynamic = true;
  212. m_rcGroup = getGrManager().newInstance<ResourceGroup>(rcInit);
  213. return ErrorCode::NONE;
  214. }
  215. //==============================================================================
  216. Error Ir::initIrradiance()
  217. {
  218. // Create the shader
  219. StringAuto pps(getFrameAllocator());
  220. pps.sprintf("#define CUBEMAP_SIZE %u\n", IRRADIANCE_SIZE);
  221. ANKI_CHECK(getResourceManager().loadResourceToCache(m_computeIrradianceFrag,
  222. "shaders/Irradiance.frag.glsl",
  223. pps.toCString(),
  224. "r_ir_"));
  225. // Create the ppline
  226. ColorStateInfo colorInf;
  227. colorInf.m_attachmentCount = 1;
  228. colorInf.m_attachments[0].m_format = Is::RT_PIXEL_FORMAT;
  229. m_r->createDrawQuadPipeline(m_computeIrradianceFrag->getGrShader(),
  230. colorInf,
  231. m_computeIrradiancePpline);
  232. // Create the resources
  233. ResourceGroupInitInfo rcInit;
  234. rcInit.m_uniformBuffers[0].m_dynamic = true;
  235. rcInit.m_textures[0].m_texture = m_envCubemapArr;
  236. m_computeIrradianceResources =
  237. getGrManager().newInstance<ResourceGroup>(rcInit);
  238. return ErrorCode::NONE;
  239. }
  240. //==============================================================================
  241. Error Ir::run(CommandBufferPtr cmdb)
  242. {
  243. ANKI_TRACE_START_EVENT(RENDER_IR);
  244. FrustumComponent& frc = m_r->getActiveFrustumComponent();
  245. VisibilityTestResults& visRez = frc.getVisibilityTestResults();
  246. if(visRez.getCount(VisibilityGroupType::REFLECTION_PROBES)
  247. > m_cubemapArrSize)
  248. {
  249. ANKI_LOGW("Increase the ir.cubemapTextureArraySize");
  250. }
  251. //
  252. // Perform some initialization
  253. //
  254. IrRunContext ctx;
  255. ctx.m_visRez = &visRez;
  256. ctx.m_ir = this;
  257. ctx.m_alloc = getFrameAllocator();
  258. // Allocate temp CPU mem
  259. ctx.m_clusterData.create(
  260. getFrameAllocator(), m_r->getClusterer().getClusterCount());
  261. //
  262. // Render and populate probes GPU mem
  263. //
  264. // Probes GPU mem
  265. void* data = getGrManager().allocateFrameHostVisibleMemory(
  266. sizeof(IrShaderReflectionProbe)
  267. * visRez.getCount(VisibilityGroupType::REFLECTION_PROBES)
  268. + sizeof(Mat3x4)
  269. + sizeof(Vec4),
  270. BufferUsage::STORAGE,
  271. m_probesToken);
  272. Mat3x4* invViewRotation = static_cast<Mat3x4*>(data);
  273. *invViewRotation =
  274. Mat3x4(frc.getViewMatrix().getInverse().getRotationPart());
  275. Vec4* nearClusterDivisor = reinterpret_cast<Vec4*>(invViewRotation + 1);
  276. nearClusterDivisor->x() = frc.getFrustum().getNear();
  277. nearClusterDivisor->y() = m_r->getClusterer().getShaderMagicValue();
  278. nearClusterDivisor->z() = 0.0;
  279. nearClusterDivisor->w() = 0.0;
  280. SArray<IrShaderReflectionProbe> probes(
  281. reinterpret_cast<IrShaderReflectionProbe*>(nearClusterDivisor + 1),
  282. visRez.getCount(VisibilityGroupType::REFLECTION_PROBES));
  283. // Render some of the probes
  284. const VisibleNode* it =
  285. visRez.getBegin(VisibilityGroupType::REFLECTION_PROBES);
  286. const VisibleNode* end =
  287. visRez.getEnd(VisibilityGroupType::REFLECTION_PROBES);
  288. U probeIdx = 0;
  289. while(it != end)
  290. {
  291. // Write and render probe
  292. ANKI_CHECK(writeProbeAndRender(*it->m_node, probes[probeIdx], cmdb));
  293. ++it;
  294. ++probeIdx;
  295. }
  296. ANKI_ASSERT(
  297. probeIdx == visRez.getCount(VisibilityGroupType::REFLECTION_PROBES));
  298. //
  299. // Start the jobs that can run in parallel
  300. //
  301. ThreadPool& threadPool = m_r->getThreadPool();
  302. Array<IrTask, ThreadPool::MAX_THREADS> tasks;
  303. for(U i = 0; i < threadPool.getThreadsCount(); i++)
  304. {
  305. tasks[i].m_ctx = &ctx;
  306. threadPool.assignNewTask(i, &tasks[i]);
  307. }
  308. // Sync
  309. ANKI_CHECK(threadPool.waitForAllThreadsToFinish());
  310. // Bye
  311. ANKI_TRACE_STOP_EVENT(RENDER_IR);
  312. return ErrorCode::NONE;
  313. }
  314. //==============================================================================
  315. void Ir::binProbes(U32 threadId, PtrSize threadsCount, IrRunContext& ctx)
  316. {
  317. ANKI_TRACE_START_EVENT(RENDER_IR);
  318. IrTaskContext task;
  319. //
  320. // Bin the probes
  321. //
  322. PtrSize start, end;
  323. ThreadPool::Task::choseStartEnd(threadId,
  324. threadsCount,
  325. ctx.m_visRez->getCount(VisibilityGroupType::REFLECTION_PROBES),
  326. start,
  327. end);
  328. // Init clusterer test result for this thread
  329. if(start < end)
  330. {
  331. m_r->getClusterer().initTestResults(
  332. getFrameAllocator(), task.m_clustererTestResult);
  333. }
  334. for(auto i = start; i < end; i++)
  335. {
  336. VisibleNode* vnode =
  337. ctx.m_visRez->getBegin(VisibilityGroupType::REFLECTION_PROBES) + i;
  338. SceneNode& node = *vnode->m_node;
  339. task.m_node = &node;
  340. // Bin it to temp clusters
  341. binProbe(i, ctx, task);
  342. }
  343. //
  344. // Write the clusters
  345. //
  346. // Allocate the cluster buffer. First come first served
  347. U who = ctx.m_clustersAllocate.fetchAdd(1);
  348. if(who == 0)
  349. {
  350. void* mem = getGrManager().allocateFrameHostVisibleMemory(
  351. m_r->getClusterer().getClusterCount() * sizeof(IrShaderCluster),
  352. BufferUsage::STORAGE,
  353. m_clustersToken);
  354. ctx.m_clusters =
  355. SArray<IrShaderCluster>(static_cast<IrShaderCluster*>(mem),
  356. m_r->getClusterer().getClusterCount());
  357. }
  358. // Use the same trick to allocate the indices
  359. ANKI_TRACE_STOP_EVENT(RENDER_IR);
  360. m_barrier.wait();
  361. ANKI_TRACE_START_EVENT(RENDER_IR);
  362. who = ctx.m_probeIndicesAllocate.fetchAdd(1);
  363. if(who == 0)
  364. {
  365. // Set it to zero in order to reuse it
  366. U indexCount = ctx.m_indexCount.exchange(0);
  367. if(indexCount > 0)
  368. {
  369. void* mem = getGrManager().allocateFrameHostVisibleMemory(
  370. indexCount * sizeof(U32), BufferUsage::STORAGE, m_indicesToken);
  371. ctx.m_indices = SArray<U32>(static_cast<U32*>(mem), indexCount);
  372. }
  373. else
  374. {
  375. m_indicesToken.markUnused();
  376. }
  377. }
  378. // Sync
  379. ANKI_TRACE_STOP_EVENT(RENDER_IR);
  380. m_barrier.wait();
  381. ANKI_TRACE_START_EVENT(RENDER_IR);
  382. ThreadPool::Task::choseStartEnd(threadId,
  383. threadsCount,
  384. m_r->getClusterer().getClusterCount(),
  385. start,
  386. end);
  387. for(auto i = start; i < end; i++)
  388. {
  389. Bool hasPrevCluster = (i != start);
  390. writeIndicesAndCluster(i, hasPrevCluster, ctx);
  391. }
  392. ANKI_TRACE_STOP_EVENT(RENDER_IR);
  393. }
  394. //==============================================================================
  395. Error Ir::writeProbeAndRender(
  396. SceneNode& node, IrShaderReflectionProbe& probe, CommandBufferPtr cmdb)
  397. {
  398. const FrustumComponent& frc = m_r->getActiveFrustumComponent();
  399. ReflectionProbeComponent& reflc =
  400. node.getComponent<ReflectionProbeComponent>();
  401. Bool render = false;
  402. U entry;
  403. findCacheEntry(node, entry, render);
  404. // Write shader var
  405. probe.m_pos = (frc.getViewMatrix() * reflc.getPosition().xyz1()).xyz();
  406. probe.m_radiusSq = reflc.getRadius() * reflc.getRadius();
  407. probe.m_cubemapIndex = entry;
  408. if(reflc.getMarkedForRendering())
  409. {
  410. reflc.setMarkedForRendering(false);
  411. ANKI_CHECK(renderReflection(node, reflc, entry, cmdb));
  412. }
  413. // If you need to render it mark it for the next frame
  414. if(render)
  415. {
  416. reflc.setMarkedForRendering(true);
  417. }
  418. return ErrorCode::NONE;
  419. }
  420. //==============================================================================
  421. void Ir::binProbe(U probeIdx, IrRunContext& ctx, IrTaskContext& task) const
  422. {
  423. const SpatialComponent& sp = task.m_node->getComponent<SpatialComponent>();
  424. const ReflectionProbeComponent& reflc =
  425. task.m_node->getComponent<ReflectionProbeComponent>();
  426. // Perform the expensive tests
  427. m_r->getClusterer().bin(sp.getSpatialCollisionShape(),
  428. sp.getAabb(),
  429. task.m_clustererTestResult);
  430. // Bin to the correct tiles
  431. auto it = task.m_clustererTestResult.getClustersBegin();
  432. auto end = task.m_clustererTestResult.getClustersEnd();
  433. for(; it != end; ++it)
  434. {
  435. U x = (*it)[0];
  436. U y = (*it)[1];
  437. U z = (*it)[2];
  438. U i = m_r->getClusterer().getClusterCountX()
  439. * (z * m_r->getClusterer().getClusterCountY() + y)
  440. + x;
  441. auto& cluster = ctx.m_clusterData[i];
  442. i = cluster.m_probeCount.fetchAdd(1) % MAX_PROBES_PER_CLUSTER;
  443. cluster.m_probeIds[i].m_index = probeIdx;
  444. cluster.m_probeIds[i].m_probeRadius = reflc.getRadius();
  445. }
  446. ctx.m_indexCount.fetchAdd(task.m_clustererTestResult.getClusterCount());
  447. }
  448. //==============================================================================
  449. void Ir::writeIndicesAndCluster(
  450. U clusterIdx, Bool hasPrevCluster, IrRunContext& ctx)
  451. {
  452. IrClusterData& cdata = ctx.m_clusterData[clusterIdx];
  453. IrShaderCluster& cluster = ctx.m_clusters[clusterIdx];
  454. const U probeCount = cdata.m_probeCount.load() % MAX_PROBES_PER_CLUSTER;
  455. if(probeCount > 0)
  456. {
  457. // Sort to satisfy the probe hierarchy
  458. cdata.sort();
  459. // Check if the cdata is the same for the previous
  460. if(hasPrevCluster && cdata == ctx.m_clusterData[clusterIdx - 1])
  461. {
  462. // Same data
  463. cluster = ctx.m_clusters[clusterIdx - 1];
  464. }
  465. else
  466. {
  467. // Have to store the indices
  468. U idx = ctx.m_indexCount.fetchAdd(probeCount);
  469. cluster.m_indexOffset = idx;
  470. cluster.m_probeCount = probeCount;
  471. for(U j = 0; j < probeCount; ++j)
  472. {
  473. ctx.m_indices[idx] = cdata.m_probeIds[j].m_index;
  474. ++idx;
  475. }
  476. }
  477. }
  478. else
  479. {
  480. cluster.m_indexOffset = 0;
  481. cluster.m_probeCount = 0;
  482. }
  483. }
  484. //==============================================================================
  485. Error Ir::renderReflection(SceneNode& node,
  486. ReflectionProbeComponent& reflc,
  487. U cubemapIdx,
  488. CommandBufferPtr cmdb)
  489. {
  490. ANKI_TRACE_INC_COUNTER(RENDERER_REFLECTIONS, 1);
  491. // Render cubemap
  492. for(U i = 0; i < 6; ++i)
  493. {
  494. // Render
  495. ANKI_CHECK(m_nestedR.render(node, i, cmdb));
  496. // Copy env texture
  497. cmdb->copyTextureToTexture(m_nestedR.getIs().getRt(),
  498. 0,
  499. 0,
  500. m_envCubemapArr,
  501. 6 * cubemapIdx + i,
  502. 0);
  503. // Gen mips of env tex
  504. cmdb->generateMipmaps(m_envCubemapArr, 6 * cubemapIdx + i);
  505. }
  506. // Compute irradiance
  507. cmdb->setViewport(0, 0, IRRADIANCE_SIZE, IRRADIANCE_SIZE);
  508. for(U i = 0; i < 6; ++i)
  509. {
  510. DynamicBufferInfo dinf;
  511. UVec4* faceIdxArrayIdx =
  512. static_cast<UVec4*>(getGrManager().allocateFrameHostVisibleMemory(
  513. sizeof(UVec4), BufferUsage::UNIFORM, dinf.m_uniformBuffers[0]));
  514. faceIdxArrayIdx->x() = i;
  515. faceIdxArrayIdx->y() = cubemapIdx;
  516. cmdb->bindResourceGroup(m_computeIrradianceResources, 0, &dinf);
  517. FramebufferInitInfo fbinit;
  518. fbinit.m_colorAttachmentsCount = 1;
  519. fbinit.m_colorAttachments[0].m_texture = m_irradianceCubemapArr;
  520. fbinit.m_colorAttachments[0].m_arrayIndex = cubemapIdx;
  521. fbinit.m_colorAttachments[0].m_faceIndex = i;
  522. fbinit.m_colorAttachments[0].m_format = Is::RT_PIXEL_FORMAT;
  523. fbinit.m_colorAttachments[0].m_loadOperation =
  524. AttachmentLoadOperation::DONT_CARE;
  525. FramebufferPtr fb = getGrManager().newInstance<Framebuffer>(fbinit);
  526. cmdb->bindFramebuffer(fb);
  527. cmdb->bindPipeline(m_computeIrradiancePpline);
  528. m_r->drawQuad(cmdb);
  529. cmdb->generateMipmaps(m_irradianceCubemapArr, 6 * cubemapIdx + i);
  530. }
  531. return ErrorCode::NONE;
  532. }
  533. //==============================================================================
  534. void Ir::findCacheEntry(SceneNode& node, U& entry, Bool& render)
  535. {
  536. CacheEntry* it = m_cacheEntries.getBegin();
  537. const CacheEntry* const end = m_cacheEntries.getEnd();
  538. CacheEntry* canditate = nullptr;
  539. CacheEntry* empty = nullptr;
  540. CacheEntry* kick = nullptr;
  541. Timestamp kickTime = MAX_TIMESTAMP;
  542. while(it != end)
  543. {
  544. if(it->m_node == &node)
  545. {
  546. // Already there
  547. canditate = it;
  548. break;
  549. }
  550. else if(empty == nullptr && it->m_node == nullptr)
  551. {
  552. // Found empty
  553. empty = it;
  554. }
  555. else if(it->m_timestamp < kickTime)
  556. {
  557. // Found one to kick
  558. kick = it;
  559. kickTime = it->m_timestamp;
  560. }
  561. ++it;
  562. }
  563. if(canditate)
  564. {
  565. // Update timestamp
  566. canditate->m_timestamp = m_r->getFrameCount();
  567. it = canditate;
  568. render = false;
  569. }
  570. else if(empty)
  571. {
  572. ANKI_ASSERT(empty->m_node == nullptr);
  573. empty->m_node = &node;
  574. empty->m_timestamp = m_r->getFrameCount();
  575. it = empty;
  576. render = true;
  577. }
  578. else if(kick)
  579. {
  580. kick->m_node = &node;
  581. kick->m_timestamp = m_r->getFrameCount();
  582. it = kick;
  583. render = true;
  584. }
  585. else
  586. {
  587. ANKI_ASSERT(0);
  588. }
  589. entry = it - m_cacheEntries.getBegin();
  590. }
  591. } // end namespace anki