BsRendererView.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "Renderer/BsCamera.h"
  5. #include "Renderer/BsRenderable.h"
  6. #include "Material/BsMaterial.h"
  7. #include "Material/BsShader.h"
  8. #include "Renderer/BsRendererUtility.h"
  9. #include "BsLightRendering.h"
  10. #include "Material/BsGpuParamsSet.h"
  11. #include "BsRendererScene.h"
  12. #include "BsRenderBeast.h"
  13. namespace bs { namespace ct
  14. {
  15. PerCameraParamDef gPerCameraParamDef;
  16. SkyboxParamDef gSkyboxParamDef;
  17. SkyboxMat::SkyboxMat()
  18. {
  19. if(mParams->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  20. mParams->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  21. mParamBuffer = gSkyboxParamDef.createBuffer();
  22. if(mParams->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  23. mParams->setParamBlockBuffer("Params", mParamBuffer);
  24. }
  25. void SkyboxMat::bind(const SPtr<GpuParamBlockBuffer>& perCamera, const SPtr<Texture>& texture, const Color& solidColor)
  26. {
  27. mParams->setParamBlockBuffer("PerCamera", perCamera);
  28. mSkyTextureParam.set(texture);
  29. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  30. mParamBuffer->flushToGPU();
  31. RendererMaterial::bind();
  32. }
  33. SkyboxMat* SkyboxMat::getVariation(bool color)
  34. {
  35. if (color)
  36. return get(getVariation<true>());
  37. return get(getVariation<false>());
  38. }
  39. RendererViewData::RendererViewData()
  40. :encodeDepth(false), depthEncodeNear(0.0f), depthEncodeFar(0.0f)
  41. {
  42. }
  43. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  44. :RendererViewData(src), frameIdx(0)
  45. {
  46. viewProjTransform = src.projTransform * src.viewTransform;
  47. target = src.target.target;
  48. viewRect = src.target.viewRect;
  49. nrmViewRect = src.target.nrmViewRect;
  50. numSamples = src.target.numSamples;
  51. clearFlags = src.target.clearFlags;
  52. clearColor = src.target.clearColor;
  53. clearDepthValue = src.target.clearDepthValue;
  54. clearStencilValue = src.target.clearStencilValue;
  55. }
  56. RendererView::RendererView()
  57. : mCamera(nullptr), mRenderSettingsHash(0), mViewIdx(-1)
  58. {
  59. mParamBuffer = gPerCameraParamDef.createBuffer();
  60. }
  61. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  62. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mRenderSettingsHash(0), mViewIdx(-1)
  63. {
  64. mParamBuffer = gPerCameraParamDef.createBuffer();
  65. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  66. setStateReductionMode(desc.stateReduction);
  67. }
  68. void RendererView::setStateReductionMode(StateReduction reductionMode)
  69. {
  70. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  71. StateReduction transparentStateReduction = reductionMode;
  72. if (transparentStateReduction == StateReduction::Material)
  73. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  74. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  75. }
  76. void RendererView::setRenderSettings(const SPtr<RenderSettings>& settings)
  77. {
  78. if (mRenderSettings == nullptr)
  79. mRenderSettings = bs_shared_ptr_new<RenderSettings>();
  80. if (settings != nullptr)
  81. *mRenderSettings = *settings;
  82. mRenderSettingsHash++;
  83. // Update compositor hierarchy (Note: Needs to be called even when viewport size (or other information) changes,
  84. // but we're currently calling it here as all such calls are followed by setRenderSettings.
  85. mCompositor.build(*this, RCNodeFinalResolve::getNodeId());
  86. }
  87. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  88. const Matrix4& proj, const ConvexVolume& worldFrustum)
  89. {
  90. mProperties.viewOrigin = origin;
  91. mProperties.viewDirection = direction;
  92. mProperties.viewTransform = view;
  93. mProperties.projTransform = proj;
  94. mProperties.cullFrustum = worldFrustum;
  95. mProperties.viewProjTransform = proj * view;
  96. }
  97. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  98. {
  99. mCamera = desc.sceneCamera;
  100. mProperties = desc;
  101. mProperties.viewProjTransform = desc.projTransform * desc.viewTransform;
  102. mProperties.prevViewProjTransform = Matrix4::IDENTITY;
  103. mTargetDesc = desc.target;
  104. setStateReductionMode(desc.stateReduction);
  105. }
  106. void RendererView::beginFrame()
  107. {
  108. // Note: inverse view-projection can be cached, it doesn't change every frame
  109. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  110. Matrix4 invViewProj = viewProj.inverse();
  111. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  112. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  113. }
  114. void RendererView::endFrame()
  115. {
  116. // Save view-projection matrix to use for temporal filtering
  117. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  118. // Advance per-view frame index. This is used primarily by temporal rendering effects, and pausing the frame index
  119. // allows you to freeze the current rendering as is, without temporal artifacts.
  120. mProperties.frameIdx++;
  121. mOpaqueQueue->clear();
  122. mTransparentQueue->clear();
  123. }
  124. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  125. Vector<bool>* visibility)
  126. {
  127. mVisibility.renderables.clear();
  128. mVisibility.renderables.resize(renderables.size(), false);
  129. if (mRenderSettings->overlayOnly)
  130. return;
  131. calculateVisibility(cullInfos, mVisibility.renderables);
  132. // Update per-object param buffers and queue render elements
  133. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  134. {
  135. if (!mVisibility.renderables[i])
  136. continue;
  137. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  138. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  139. for (auto& renderElem : renderables[i]->elements)
  140. {
  141. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  142. // check here
  143. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  144. if (isTransparent)
  145. mTransparentQueue->add(&renderElem, distanceToCamera);
  146. else
  147. mOpaqueQueue->add(&renderElem, distanceToCamera);
  148. }
  149. }
  150. if(visibility != nullptr)
  151. {
  152. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  153. {
  154. bool visible = (*visibility)[i];
  155. (*visibility)[i] = visible || mVisibility.renderables[i];
  156. }
  157. }
  158. mOpaqueQueue->sort();
  159. mTransparentQueue->sort();
  160. }
  161. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  162. LightType lightType, Vector<bool>* visibility)
  163. {
  164. // Special case for directional lights, they're always visible
  165. if(lightType == LightType::Directional)
  166. {
  167. if (visibility)
  168. visibility->assign(lights.size(), true);
  169. return;
  170. }
  171. Vector<bool>* perViewVisibility;
  172. if(lightType == LightType::Radial)
  173. {
  174. mVisibility.radialLights.clear();
  175. mVisibility.radialLights.resize(lights.size(), false);
  176. perViewVisibility = &mVisibility.radialLights;
  177. }
  178. else // Spot
  179. {
  180. mVisibility.spotLights.clear();
  181. mVisibility.spotLights.resize(lights.size(), false);
  182. perViewVisibility = &mVisibility.spotLights;
  183. }
  184. if (mRenderSettings->overlayOnly)
  185. return;
  186. calculateVisibility(bounds, *perViewVisibility);
  187. if(visibility != nullptr)
  188. {
  189. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  190. {
  191. bool visible = (*visibility)[i];
  192. (*visibility)[i] = visible || (*perViewVisibility)[i];
  193. }
  194. }
  195. }
  196. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  197. {
  198. UINT64 cameraLayers = mProperties.visibleLayers;
  199. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  200. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  201. {
  202. if ((cullInfos[i].layer & cameraLayers) == 0)
  203. continue;
  204. // Do frustum culling
  205. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  206. // operations, as it is trivial to update them. Also consider spatial partitioning.
  207. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  208. if (worldFrustum.intersects(boundingSphere))
  209. {
  210. // More precise with the box
  211. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  212. if (worldFrustum.intersects(boundingBox))
  213. visibility[i] = true;
  214. }
  215. }
  216. }
  217. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  218. {
  219. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  220. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  221. {
  222. if (worldFrustum.intersects(bounds[i]))
  223. visibility[i] = true;
  224. }
  225. }
  226. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  227. {
  228. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  229. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  230. {
  231. if (worldFrustum.intersects(bounds[i]))
  232. visibility[i] = true;
  233. }
  234. }
  235. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  236. {
  237. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  238. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  239. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  240. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  241. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  242. // formula is:
  243. // depth = (Az + B) / (C * z)
  244. // To get the z coordinate back we simply do the opposite:
  245. // z = B / (depth * C - A)
  246. // However some APIs will also do a transformation on the depth values before storing them to the texture
  247. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  248. // formula is:
  249. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  250. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  251. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  252. RenderAPI& rapi = RenderAPI::instance();
  253. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  254. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  255. float minDepth = rapiInfo.getMinimumDepthInputValue();
  256. float a = projMatrix[2][2];
  257. float b = projMatrix[2][3];
  258. float c = projMatrix[3][2];
  259. Vector2 output;
  260. if (c != 0.0f)
  261. {
  262. output.x = b / (depthRange * c);
  263. output.y = minDepth / depthRange - a / (depthRange * c);
  264. }
  265. else // Ortographic, assuming viewing towards negative Z
  266. {
  267. output.x = b / -depthRange;
  268. output.y = minDepth / depthRange - a / -depthRange;
  269. }
  270. return output;
  271. }
  272. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  273. {
  274. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  275. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  276. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  277. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  278. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  279. // formula is:
  280. // depth = (Az + B) / (C * z)
  281. // To get the z coordinate back we simply do the opposite:
  282. // z = B / (depth * C - A)
  283. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  284. // z = 1.0f / (depth - A/C) * B/C
  285. float a = projMatrix[2][2];
  286. float b = projMatrix[2][3];
  287. float c = projMatrix[3][2];
  288. Vector2 output;
  289. if (c != 0.0f)
  290. {
  291. output.x = b / c;
  292. output.y = -a / c;
  293. }
  294. else // Ortographic, assuming viewing towards negative Z
  295. {
  296. output.x = -b;
  297. output.y = a;
  298. }
  299. return output;
  300. }
  301. Vector2 RendererView::getNDCZToDeviceZ()
  302. {
  303. RenderAPI& rapi = RenderAPI::instance();
  304. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  305. Vector2 ndcZToDeviceZ;
  306. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  307. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  308. return ndcZToDeviceZ;
  309. }
  310. Matrix4 invertProjectionMatrix(const Matrix4& mat)
  311. {
  312. // Try to solve the most common case using high percision calculations, in order to reduce depth error
  313. if(mat[0][1] == 0.0f && mat[0][3] == 0.0f &&
  314. mat[1][0] == 0.0f && mat[1][3] == 0.0f &&
  315. mat[2][0] == 0.0f && mat[2][1] == 0.0f &&
  316. mat[3][0] == 0.0f && mat[3][1] == 0.0f &&
  317. mat[3][2] == -1.0f && mat[3][3] == 0.0f)
  318. {
  319. double a = mat[0][0];
  320. double b = mat[1][1];
  321. double c = mat[2][2];
  322. double d = mat[2][3];
  323. double s = mat[0][2];
  324. double t = mat[1][2];
  325. return Matrix4(
  326. (float)(1.0/a), 0.0f, 0.0f, (float)(-s/a),
  327. 0.0f, (float)(1.0/b), 0.0f, (float)(-t/b),
  328. 0.0f, 0.0f, 0.0f, -1.0f,
  329. 0.0f, 0.0f, (float)(1.0/d), (float)(c/d)
  330. );
  331. }
  332. else
  333. {
  334. return mat.inverse();
  335. }
  336. }
  337. void RendererView::updatePerViewBuffer()
  338. {
  339. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  340. Matrix4 invProj = invertProjectionMatrix(mProperties.projTransform);
  341. Matrix4 invView = mProperties.viewTransform.inverseAffine();
  342. Matrix4 invViewProj = invView * invProj;
  343. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  344. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  345. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  346. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj);
  347. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, invProj);
  348. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  349. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  350. // view_z/view_w in view space, into world space.
  351. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  352. Matrix4 projZ = Matrix4::IDENTITY;
  353. projZ[2][2] = mProperties.projTransform[2][2];
  354. projZ[2][3] = mProperties.projTransform[2][3];
  355. projZ[3][2] = mProperties.projTransform[3][2];
  356. projZ[3][3] = 0.0f;
  357. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  358. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  359. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  360. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  361. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  362. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  363. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  364. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  365. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  366. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  367. const Rect2I& viewRect = mTargetDesc.viewRect;
  368. Vector4I viewportRect;
  369. viewportRect[0] = viewRect.x;
  370. viewportRect[1] = viewRect.y;
  371. viewportRect[2] = viewRect.width;
  372. viewportRect[3] = viewRect.height;
  373. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  374. Vector4 ndcToUV = getNDCToUV();
  375. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, ndcToUV);
  376. if (!mRenderSettings->enableLighting)
  377. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  378. else
  379. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  380. }
  381. Vector4 RendererView::getNDCToUV() const
  382. {
  383. RenderAPI& rapi = RenderAPI::instance();
  384. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  385. const Rect2I& viewRect = mTargetDesc.viewRect;
  386. float halfWidth = viewRect.width * 0.5f;
  387. float halfHeight = viewRect.height * 0.5f;
  388. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  389. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  390. Vector4 ndcToUV;
  391. ndcToUV.x = halfWidth / rtWidth;
  392. ndcToUV.y = -halfHeight / rtHeight;
  393. ndcToUV.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  394. ndcToUV.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  395. // Either of these flips the Y axis, but if they're both true they cancel out
  396. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  397. ndcToUV.y = -ndcToUV.y;
  398. return ndcToUV;
  399. }
  400. void RendererView::updateLightGrid(const VisibleLightData& visibleLightData,
  401. const VisibleReflProbeData& visibleReflProbeData)
  402. {
  403. mLightGrid.updateGrid(*this, visibleLightData, visibleReflProbeData, !mRenderSettings->enableLighting);
  404. }
  405. RendererViewGroup::RendererViewGroup()
  406. :mShadowRenderer(2048)
  407. { }
  408. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews, UINT32 shadowMapSize)
  409. :mShadowRenderer(shadowMapSize)
  410. {
  411. setViews(views, numViews);
  412. }
  413. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  414. {
  415. mViews.clear();
  416. for (UINT32 i = 0; i < numViews; i++)
  417. {
  418. mViews.push_back(views[i]);
  419. views[i]->_setViewIdx(i);
  420. }
  421. }
  422. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  423. {
  424. UINT32 numViews = (UINT32)mViews.size();
  425. // Early exit if no views render scene geometry
  426. bool allViewsOverlay = false;
  427. for (UINT32 i = 0; i < numViews; i++)
  428. {
  429. if (!mViews[i]->getRenderSettings().overlayOnly)
  430. {
  431. allViewsOverlay = false;
  432. break;
  433. }
  434. }
  435. if (allViewsOverlay)
  436. return;
  437. // Generate render queues per camera
  438. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  439. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  440. for(UINT32 i = 0; i < numViews; i++)
  441. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  442. // Calculate light visibility for all views
  443. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  444. mVisibility.radialLights.resize(numRadialLights, false);
  445. mVisibility.radialLights.assign(numRadialLights, false);
  446. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  447. mVisibility.spotLights.resize(numSpotLights, false);
  448. mVisibility.spotLights.assign(numSpotLights, false);
  449. for (UINT32 i = 0; i < numViews; i++)
  450. {
  451. if (mViews[i]->getRenderSettings().overlayOnly)
  452. continue;
  453. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  454. &mVisibility.radialLights);
  455. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  456. &mVisibility.spotLights);
  457. }
  458. // Calculate refl. probe visibility for all views
  459. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  460. mVisibility.reflProbes.resize(numProbes, false);
  461. mVisibility.reflProbes.assign(numProbes, false);
  462. // Note: Per-view visibility for refl. probes currently isn't calculated
  463. for (UINT32 i = 0; i < numViews; i++)
  464. {
  465. const auto& viewProps = mViews[i]->getProperties();
  466. // Don't recursively render reflection probes when generating reflection probe maps
  467. if (viewProps.capturingReflections)
  468. continue;
  469. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  470. }
  471. // Organize light and refl. probe visibility infomation in a more GPU friendly manner
  472. // Note: I'm determining light and refl. probe visibility for the entire group. It might be more performance
  473. // efficient to do it per view. Additionally I'm using a single GPU buffer to hold their information, which is
  474. // then updated when each view group is rendered. It might be better to keep one buffer reserved per-view.
  475. mVisibleLightData.update(sceneInfo, *this);
  476. mVisibleReflProbeData.update(sceneInfo, *this);
  477. bool supportsClusteredForward = gRenderBeast()->getFeatureSet() == RenderBeastFeatureSet::Desktop;
  478. if(supportsClusteredForward)
  479. {
  480. for (UINT32 i = 0; i < numViews; i++)
  481. {
  482. if (mViews[i]->getRenderSettings().overlayOnly)
  483. continue;
  484. mViews[i]->updateLightGrid(mVisibleLightData, mVisibleReflProbeData);
  485. }
  486. }
  487. }
  488. }}