BsRendererView.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "BsCamera.h"
  5. #include "BsRenderable.h"
  6. #include "BsMaterial.h"
  7. #include "BsShader.h"
  8. #include "BsRendererUtility.h"
  9. #include "BsLightRendering.h"
  10. #include "BsGpuParamsSet.h"
  11. #include "BsRendererScene.h"
  12. namespace bs { namespace ct
  13. {
  14. PerCameraParamDef gPerCameraParamDef;
  15. SkyboxParamDef gSkyboxParamDef;
  16. ShaderVariation SkyboxMat::VAR_Texture = ShaderVariation({
  17. ShaderVariation::Param("SOLID_COLOR", false)
  18. });
  19. ShaderVariation SkyboxMat::VAR_Color = ShaderVariation({
  20. ShaderVariation::Param("SOLID_COLOR", true)
  21. });
  22. SkyboxMat::SkyboxMat()
  23. {
  24. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  25. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  26. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  27. mParamBuffer = gSkyboxParamDef.createBuffer();
  28. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  29. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  30. }
  31. void SkyboxMat::_initVariations(ShaderVariations& variations)
  32. {
  33. variations.add(VAR_Color);
  34. variations.add(VAR_Texture);
  35. }
  36. void SkyboxMat::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  37. {
  38. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  39. gRendererUtility().setPass(mMaterial, 0);
  40. }
  41. void SkyboxMat::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  42. {
  43. mSkyTextureParam.set(texture, TextureSurface(1, 1, 0, 0));
  44. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  45. mParamBuffer->flushToGPU();
  46. gRendererUtility().setPassParams(mParamsSet);
  47. }
  48. SkyboxMat* SkyboxMat::getVariation(bool color)
  49. {
  50. if (color)
  51. return get(VAR_Color);
  52. return get(VAR_Texture);
  53. }
  54. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  55. :RendererViewData(src)
  56. {
  57. viewProjTransform = src.projTransform * src.viewTransform;
  58. target = src.target.target;
  59. viewRect = src.target.viewRect;
  60. nrmViewRect = src.target.nrmViewRect;
  61. numSamples = src.target.numSamples;
  62. clearFlags = src.target.clearFlags;
  63. clearColor = src.target.clearColor;
  64. clearDepthValue = src.target.clearDepthValue;
  65. clearStencilValue = src.target.clearStencilValue;
  66. }
  67. RendererView::RendererView()
  68. :mRenderSettingsHash(0)
  69. {
  70. mParamBuffer = gPerCameraParamDef.createBuffer();
  71. }
  72. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  73. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mRenderSettingsHash(0)
  74. {
  75. mParamBuffer = gPerCameraParamDef.createBuffer();
  76. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  77. setStateReductionMode(desc.stateReduction);
  78. }
  79. void RendererView::setStateReductionMode(StateReduction reductionMode)
  80. {
  81. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  82. StateReduction transparentStateReduction = reductionMode;
  83. if (transparentStateReduction == StateReduction::Material)
  84. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  85. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  86. }
  87. void RendererView::setRenderSettings(const SPtr<RenderSettings>& settings)
  88. {
  89. if (mRenderSettings == nullptr)
  90. mRenderSettings = bs_shared_ptr_new<RenderSettings>();
  91. if (settings != nullptr)
  92. *mRenderSettings = *settings;
  93. mRenderSettingsHash++;
  94. // Update compositor hierarchy
  95. mCompositor.build(*this, RCNodeFinalResolve::getNodeId());
  96. }
  97. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  98. const Matrix4& proj, const ConvexVolume& worldFrustum)
  99. {
  100. mProperties.viewOrigin = origin;
  101. mProperties.viewDirection = direction;
  102. mProperties.viewTransform = view;
  103. mProperties.projTransform = proj;
  104. mProperties.cullFrustum = worldFrustum;
  105. mProperties.viewProjTransform = proj * view;
  106. }
  107. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  108. {
  109. mCamera = desc.sceneCamera;
  110. mProperties = desc;
  111. mTargetDesc = desc.target;
  112. setStateReductionMode(desc.stateReduction);
  113. }
  114. void RendererView::beginFrame()
  115. {
  116. }
  117. void RendererView::endFrame()
  118. {
  119. // Save view-projection matrix to use for temporal filtering
  120. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  121. mOpaqueQueue->clear();
  122. mTransparentQueue->clear();
  123. }
  124. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  125. Vector<bool>* visibility)
  126. {
  127. mVisibility.renderables.clear();
  128. mVisibility.renderables.resize(renderables.size(), false);
  129. if (mRenderSettings->overlayOnly)
  130. return;
  131. calculateVisibility(cullInfos, mVisibility.renderables);
  132. // Update per-object param buffers and queue render elements
  133. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  134. {
  135. if (!mVisibility.renderables[i])
  136. continue;
  137. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  138. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  139. for (auto& renderElem : renderables[i]->elements)
  140. {
  141. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  142. // check here
  143. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  144. if (isTransparent)
  145. mTransparentQueue->add(&renderElem, distanceToCamera);
  146. else
  147. mOpaqueQueue->add(&renderElem, distanceToCamera);
  148. }
  149. }
  150. if(visibility != nullptr)
  151. {
  152. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  153. {
  154. bool visible = (*visibility)[i];
  155. (*visibility)[i] = visible || mVisibility.renderables[i];
  156. }
  157. }
  158. mOpaqueQueue->sort();
  159. mTransparentQueue->sort();
  160. }
  161. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  162. LightType lightType, Vector<bool>* visibility)
  163. {
  164. // Special case for directional lights, they're always visible
  165. if(lightType == LightType::Directional)
  166. {
  167. if (visibility)
  168. visibility->assign(lights.size(), true);
  169. return;
  170. }
  171. Vector<bool>* perViewVisibility;
  172. if(lightType == LightType::Radial)
  173. {
  174. mVisibility.radialLights.clear();
  175. mVisibility.radialLights.resize(lights.size(), false);
  176. perViewVisibility = &mVisibility.radialLights;
  177. }
  178. else // Spot
  179. {
  180. mVisibility.spotLights.clear();
  181. mVisibility.spotLights.resize(lights.size(), false);
  182. perViewVisibility = &mVisibility.spotLights;
  183. }
  184. if (mRenderSettings->overlayOnly)
  185. return;
  186. calculateVisibility(bounds, *perViewVisibility);
  187. if(visibility != nullptr)
  188. {
  189. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  190. {
  191. bool visible = (*visibility)[i];
  192. (*visibility)[i] = visible || (*perViewVisibility)[i];
  193. }
  194. }
  195. }
  196. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  197. {
  198. UINT64 cameraLayers = mProperties.visibleLayers;
  199. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  200. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  201. {
  202. if ((cullInfos[i].layer & cameraLayers) == 0)
  203. continue;
  204. // Do frustum culling
  205. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  206. // operations, as it is trivial to update them. Also consider spatial partitioning.
  207. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  208. if (worldFrustum.intersects(boundingSphere))
  209. {
  210. // More precise with the box
  211. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  212. if (worldFrustum.intersects(boundingBox))
  213. visibility[i] = true;
  214. }
  215. }
  216. }
  217. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  218. {
  219. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  220. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  221. {
  222. if (worldFrustum.intersects(bounds[i]))
  223. visibility[i] = true;
  224. }
  225. }
  226. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  227. {
  228. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  229. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  230. {
  231. if (worldFrustum.intersects(bounds[i]))
  232. visibility[i] = true;
  233. }
  234. }
  235. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  236. {
  237. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  238. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  239. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  240. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  241. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  242. // formula is:
  243. // depth = (Az + B) / (C * z)
  244. // To get the z coordinate back we simply do the opposite:
  245. // z = B / (depth * C - A)
  246. // However some APIs will also do a transformation on the depth values before storing them to the texture
  247. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  248. // formula is:
  249. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  250. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  251. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  252. RenderAPI& rapi = RenderAPI::instance();
  253. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  254. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  255. float minDepth = rapiInfo.getMinimumDepthInputValue();
  256. float a = projMatrix[2][2];
  257. float b = projMatrix[2][3];
  258. float c = projMatrix[3][2];
  259. Vector2 output;
  260. if (c != 0.0f)
  261. {
  262. output.x = b / (depthRange * c);
  263. output.y = minDepth / depthRange - a / (depthRange * c);
  264. }
  265. else // Ortographic, assuming viewing towards negative Z
  266. {
  267. output.x = b / -depthRange;
  268. output.y = minDepth / depthRange - a / -depthRange;
  269. }
  270. return output;
  271. }
  272. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  273. {
  274. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  275. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  276. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  277. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  278. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  279. // formula is:
  280. // depth = (Az + B) / (C * z)
  281. // To get the z coordinate back we simply do the opposite:
  282. // z = B / (depth * C - A)
  283. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  284. // z = 1.0f / (depth - A/C) * B/C
  285. RenderAPI& rapi = RenderAPI::instance();
  286. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  287. float a = projMatrix[2][2];
  288. float b = projMatrix[2][3];
  289. float c = projMatrix[3][2];
  290. Vector2 output;
  291. if (c != 0.0f)
  292. {
  293. output.x = b / c;
  294. output.y = -a / c;
  295. }
  296. else // Ortographic, assuming viewing towards negative Z
  297. {
  298. output.x = -b;
  299. output.y = a;
  300. }
  301. return output;
  302. }
  303. Vector2 RendererView::getNDCZToDeviceZ()
  304. {
  305. RenderAPI& rapi = RenderAPI::instance();
  306. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  307. Vector2 ndcZToDeviceZ;
  308. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  309. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  310. return ndcZToDeviceZ;
  311. }
  312. void RendererView::updatePerViewBuffer()
  313. {
  314. RenderAPI& rapi = RenderAPI::instance();
  315. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  316. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  317. Matrix4 invViewProj = viewProj.inverse();
  318. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  319. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  320. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  321. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  322. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, mProperties.projTransform.inverse());
  323. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  324. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  325. // view_z/view_w in view space, into world space.
  326. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  327. Matrix4 projZ = Matrix4::IDENTITY;
  328. projZ[2][2] = mProperties.projTransform[2][2];
  329. projZ[2][3] = mProperties.projTransform[2][3];
  330. projZ[3][2] = mProperties.projTransform[3][2];
  331. projZ[3][3] = 0.0f;
  332. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  333. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, mProperties.prevViewProjTransform * invViewProj);
  334. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  335. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  336. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  337. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  338. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  339. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  340. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  341. const Rect2I& viewRect = mTargetDesc.viewRect;
  342. Vector4I viewportRect;
  343. viewportRect[0] = viewRect.x;
  344. viewportRect[1] = viewRect.y;
  345. viewportRect[2] = viewRect.width;
  346. viewportRect[3] = viewRect.height;
  347. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  348. float halfWidth = viewRect.width * 0.5f;
  349. float halfHeight = viewRect.height * 0.5f;
  350. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  351. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  352. Vector4 clipToUVScaleOffset;
  353. clipToUVScaleOffset.x = halfWidth / rtWidth;
  354. clipToUVScaleOffset.y = -halfHeight / rtHeight;
  355. clipToUVScaleOffset.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  356. clipToUVScaleOffset.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  357. // Either of these flips the Y axis, but if they're both true they cancel out
  358. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  359. clipToUVScaleOffset.y = -clipToUVScaleOffset.y;
  360. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, clipToUVScaleOffset);
  361. if (!mRenderSettings->enableLighting)
  362. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  363. else
  364. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  365. }
  366. void RendererView::updateLightGrid(const VisibleLightData& visibleLightData,
  367. const VisibleReflProbeData& visibleReflProbeData)
  368. {
  369. mLightGrid.updateGrid(*this, visibleLightData, visibleReflProbeData, !mRenderSettings->enableLighting);
  370. }
  371. RendererViewGroup::RendererViewGroup()
  372. :mShadowRenderer(1024)
  373. { }
  374. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews, UINT32 shadowMapSize)
  375. :mShadowRenderer(shadowMapSize)
  376. {
  377. setViews(views, numViews);
  378. }
  379. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  380. {
  381. mViews.clear();
  382. for (UINT32 i = 0; i < numViews; i++)
  383. mViews.push_back(views[i]);
  384. }
  385. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  386. {
  387. UINT32 numViews = (UINT32)mViews.size();
  388. // Early exit if no views render scene geometry
  389. bool allViewsOverlay = false;
  390. for (UINT32 i = 0; i < numViews; i++)
  391. {
  392. if (!mViews[i]->getRenderSettings().overlayOnly)
  393. {
  394. allViewsOverlay = false;
  395. break;
  396. }
  397. }
  398. if (allViewsOverlay)
  399. return;
  400. // Generate render queues per camera
  401. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  402. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  403. for(UINT32 i = 0; i < numViews; i++)
  404. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  405. // Calculate light visibility for all views
  406. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  407. mVisibility.radialLights.resize(numRadialLights, false);
  408. mVisibility.radialLights.assign(numRadialLights, false);
  409. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  410. mVisibility.spotLights.resize(numSpotLights, false);
  411. mVisibility.spotLights.assign(numSpotLights, false);
  412. for (UINT32 i = 0; i < numViews; i++)
  413. {
  414. if (mViews[i]->getRenderSettings().overlayOnly)
  415. continue;
  416. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  417. &mVisibility.radialLights);
  418. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  419. &mVisibility.spotLights);
  420. }
  421. // Calculate refl. probe visibility for all views
  422. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  423. mVisibility.reflProbes.resize(numProbes, false);
  424. mVisibility.reflProbes.assign(numProbes, false);
  425. // Note: Per-view visibility for refl. probes currently isn't calculated
  426. for (UINT32 i = 0; i < numViews; i++)
  427. {
  428. const auto& viewProps = mViews[i]->getProperties();
  429. // Don't recursively render reflection probes when generating reflection probe maps
  430. if (viewProps.renderingReflections)
  431. continue;
  432. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  433. }
  434. // Organize light and refl. probe visibility infomation in a more GPU friendly manner
  435. // Note: I'm determining light and refl. probe visibility for the entire group. It might be more performance
  436. // efficient to do it per view. Additionally I'm using a single GPU buffer to hold their information, which is
  437. // then updated when each view group is rendered. It might be better to keep one buffer reserved per-view.
  438. mVisibleLightData.update(sceneInfo, *this);
  439. mVisibleReflProbeData.update(sceneInfo, *this);
  440. for (UINT32 i = 0; i < numViews; i++)
  441. {
  442. if (mViews[i]->getRenderSettings().overlayOnly)
  443. continue;
  444. mViews[i]->updateLightGrid(mVisibleLightData, mVisibleReflProbeData);
  445. }
  446. }
  447. }}