BsRendererView.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "Renderer/BsCamera.h"
  5. #include "Renderer/BsRenderable.h"
  6. #include "Material/BsMaterial.h"
  7. #include "Material/BsShader.h"
  8. #include "Renderer/BsRendererUtility.h"
  9. #include "BsLightRendering.h"
  10. #include "Material/BsGpuParamsSet.h"
  11. #include "BsRendererScene.h"
  12. namespace bs { namespace ct
  13. {
  14. PerCameraParamDef gPerCameraParamDef;
  15. SkyboxParamDef gSkyboxParamDef;
  16. ShaderVariation SkyboxMat::VAR_Texture = ShaderVariation({
  17. ShaderVariation::Param("SOLID_COLOR", false)
  18. });
  19. ShaderVariation SkyboxMat::VAR_Color = ShaderVariation({
  20. ShaderVariation::Param("SOLID_COLOR", true)
  21. });
  22. SkyboxMat::SkyboxMat()
  23. {
  24. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  25. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  26. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  27. mParamBuffer = gSkyboxParamDef.createBuffer();
  28. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  29. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  30. }
  31. void SkyboxMat::_initVariations(ShaderVariations& variations)
  32. {
  33. variations.add(VAR_Color);
  34. variations.add(VAR_Texture);
  35. }
  36. void SkyboxMat::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  37. {
  38. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  39. gRendererUtility().setPass(mMaterial, 0);
  40. }
  41. void SkyboxMat::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  42. {
  43. mSkyTextureParam.set(texture, TextureSurface(1, 1, 0, 0));
  44. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  45. mParamBuffer->flushToGPU();
  46. gRendererUtility().setPassParams(mParamsSet);
  47. }
  48. SkyboxMat* SkyboxMat::getVariation(bool color)
  49. {
  50. if (color)
  51. return get(VAR_Color);
  52. return get(VAR_Texture);
  53. }
  54. RendererViewData::RendererViewData()
  55. :encodeDepth(false), depthEncodeNear(0.0f), depthEncodeFar(0.0f)
  56. {
  57. }
  58. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  59. :RendererViewData(src), frameIdx(0)
  60. {
  61. viewProjTransform = src.projTransform * src.viewTransform;
  62. target = src.target.target;
  63. viewRect = src.target.viewRect;
  64. nrmViewRect = src.target.nrmViewRect;
  65. numSamples = src.target.numSamples;
  66. clearFlags = src.target.clearFlags;
  67. clearColor = src.target.clearColor;
  68. clearDepthValue = src.target.clearDepthValue;
  69. clearStencilValue = src.target.clearStencilValue;
  70. }
  71. RendererView::RendererView()
  72. : mCamera(nullptr), mRenderSettingsHash(0), mViewIdx(-1)
  73. {
  74. mParamBuffer = gPerCameraParamDef.createBuffer();
  75. }
  76. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  77. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mRenderSettingsHash(0), mViewIdx(-1)
  78. {
  79. mParamBuffer = gPerCameraParamDef.createBuffer();
  80. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  81. setStateReductionMode(desc.stateReduction);
  82. }
  83. void RendererView::setStateReductionMode(StateReduction reductionMode)
  84. {
  85. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  86. StateReduction transparentStateReduction = reductionMode;
  87. if (transparentStateReduction == StateReduction::Material)
  88. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  89. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  90. }
  91. void RendererView::setRenderSettings(const SPtr<RenderSettings>& settings)
  92. {
  93. if (mRenderSettings == nullptr)
  94. mRenderSettings = bs_shared_ptr_new<RenderSettings>();
  95. if (settings != nullptr)
  96. *mRenderSettings = *settings;
  97. mRenderSettingsHash++;
  98. // Update compositor hierarchy (Note: Needs to be called even when viewport size (or other information) changes,
  99. // but we're currently calling it here as all such calls are followed by setRenderSettings.
  100. mCompositor.build(*this, RCNodeFinalResolve::getNodeId());
  101. }
  102. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  103. const Matrix4& proj, const ConvexVolume& worldFrustum)
  104. {
  105. mProperties.viewOrigin = origin;
  106. mProperties.viewDirection = direction;
  107. mProperties.viewTransform = view;
  108. mProperties.projTransform = proj;
  109. mProperties.cullFrustum = worldFrustum;
  110. mProperties.viewProjTransform = proj * view;
  111. }
  112. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  113. {
  114. mCamera = desc.sceneCamera;
  115. mProperties = desc;
  116. mProperties.viewProjTransform = desc.projTransform * desc.viewTransform;
  117. mProperties.prevViewProjTransform = Matrix4::IDENTITY;
  118. mTargetDesc = desc.target;
  119. setStateReductionMode(desc.stateReduction);
  120. }
  121. void RendererView::beginFrame()
  122. {
  123. // Note: inverse view-projection can be cached, it doesn't change every frame
  124. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  125. Matrix4 invViewProj = viewProj.inverse();
  126. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  127. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  128. }
  129. void RendererView::endFrame()
  130. {
  131. // Save view-projection matrix to use for temporal filtering
  132. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  133. // Advance per-view frame index. This is used primarily by temporal rendering effects, and pausing the frame index
  134. // allows you to freeze the current rendering as is, without temporal artifacts.
  135. mProperties.frameIdx++;
  136. mOpaqueQueue->clear();
  137. mTransparentQueue->clear();
  138. }
  139. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  140. Vector<bool>* visibility)
  141. {
  142. mVisibility.renderables.clear();
  143. mVisibility.renderables.resize(renderables.size(), false);
  144. if (mRenderSettings->overlayOnly)
  145. return;
  146. calculateVisibility(cullInfos, mVisibility.renderables);
  147. // Update per-object param buffers and queue render elements
  148. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  149. {
  150. if (!mVisibility.renderables[i])
  151. continue;
  152. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  153. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  154. for (auto& renderElem : renderables[i]->elements)
  155. {
  156. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  157. // check here
  158. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  159. if (isTransparent)
  160. mTransparentQueue->add(&renderElem, distanceToCamera);
  161. else
  162. mOpaqueQueue->add(&renderElem, distanceToCamera);
  163. }
  164. }
  165. if(visibility != nullptr)
  166. {
  167. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  168. {
  169. bool visible = (*visibility)[i];
  170. (*visibility)[i] = visible || mVisibility.renderables[i];
  171. }
  172. }
  173. mOpaqueQueue->sort();
  174. mTransparentQueue->sort();
  175. }
  176. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  177. LightType lightType, Vector<bool>* visibility)
  178. {
  179. // Special case for directional lights, they're always visible
  180. if(lightType == LightType::Directional)
  181. {
  182. if (visibility)
  183. visibility->assign(lights.size(), true);
  184. return;
  185. }
  186. Vector<bool>* perViewVisibility;
  187. if(lightType == LightType::Radial)
  188. {
  189. mVisibility.radialLights.clear();
  190. mVisibility.radialLights.resize(lights.size(), false);
  191. perViewVisibility = &mVisibility.radialLights;
  192. }
  193. else // Spot
  194. {
  195. mVisibility.spotLights.clear();
  196. mVisibility.spotLights.resize(lights.size(), false);
  197. perViewVisibility = &mVisibility.spotLights;
  198. }
  199. if (mRenderSettings->overlayOnly)
  200. return;
  201. calculateVisibility(bounds, *perViewVisibility);
  202. if(visibility != nullptr)
  203. {
  204. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  205. {
  206. bool visible = (*visibility)[i];
  207. (*visibility)[i] = visible || (*perViewVisibility)[i];
  208. }
  209. }
  210. }
  211. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  212. {
  213. UINT64 cameraLayers = mProperties.visibleLayers;
  214. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  215. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  216. {
  217. if ((cullInfos[i].layer & cameraLayers) == 0)
  218. continue;
  219. // Do frustum culling
  220. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  221. // operations, as it is trivial to update them. Also consider spatial partitioning.
  222. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  223. if (worldFrustum.intersects(boundingSphere))
  224. {
  225. // More precise with the box
  226. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  227. if (worldFrustum.intersects(boundingBox))
  228. visibility[i] = true;
  229. }
  230. }
  231. }
  232. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  233. {
  234. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  235. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  236. {
  237. if (worldFrustum.intersects(bounds[i]))
  238. visibility[i] = true;
  239. }
  240. }
  241. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  242. {
  243. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  244. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  245. {
  246. if (worldFrustum.intersects(bounds[i]))
  247. visibility[i] = true;
  248. }
  249. }
  250. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  251. {
  252. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  253. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  254. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  255. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  256. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  257. // formula is:
  258. // depth = (Az + B) / (C * z)
  259. // To get the z coordinate back we simply do the opposite:
  260. // z = B / (depth * C - A)
  261. // However some APIs will also do a transformation on the depth values before storing them to the texture
  262. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  263. // formula is:
  264. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  265. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  266. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  267. RenderAPI& rapi = RenderAPI::instance();
  268. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  269. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  270. float minDepth = rapiInfo.getMinimumDepthInputValue();
  271. float a = projMatrix[2][2];
  272. float b = projMatrix[2][3];
  273. float c = projMatrix[3][2];
  274. Vector2 output;
  275. if (c != 0.0f)
  276. {
  277. output.x = b / (depthRange * c);
  278. output.y = minDepth / depthRange - a / (depthRange * c);
  279. }
  280. else // Ortographic, assuming viewing towards negative Z
  281. {
  282. output.x = b / -depthRange;
  283. output.y = minDepth / depthRange - a / -depthRange;
  284. }
  285. return output;
  286. }
  287. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  288. {
  289. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  290. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  291. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  292. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  293. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  294. // formula is:
  295. // depth = (Az + B) / (C * z)
  296. // To get the z coordinate back we simply do the opposite:
  297. // z = B / (depth * C - A)
  298. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  299. // z = 1.0f / (depth - A/C) * B/C
  300. RenderAPI& rapi = RenderAPI::instance();
  301. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  302. float a = projMatrix[2][2];
  303. float b = projMatrix[2][3];
  304. float c = projMatrix[3][2];
  305. Vector2 output;
  306. if (c != 0.0f)
  307. {
  308. output.x = b / c;
  309. output.y = -a / c;
  310. }
  311. else // Ortographic, assuming viewing towards negative Z
  312. {
  313. output.x = -b;
  314. output.y = a;
  315. }
  316. return output;
  317. }
  318. Vector2 RendererView::getNDCZToDeviceZ()
  319. {
  320. RenderAPI& rapi = RenderAPI::instance();
  321. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  322. Vector2 ndcZToDeviceZ;
  323. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  324. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  325. return ndcZToDeviceZ;
  326. }
  327. void RendererView::updatePerViewBuffer()
  328. {
  329. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  330. Matrix4 invViewProj = viewProj.inverse();
  331. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  332. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  333. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  334. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  335. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, mProperties.projTransform.inverse());
  336. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  337. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  338. // view_z/view_w in view space, into world space.
  339. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  340. Matrix4 projZ = Matrix4::IDENTITY;
  341. projZ[2][2] = mProperties.projTransform[2][2];
  342. projZ[2][3] = mProperties.projTransform[2][3];
  343. projZ[3][2] = mProperties.projTransform[3][2];
  344. projZ[3][3] = 0.0f;
  345. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  346. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  347. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  348. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  349. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  350. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  351. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  352. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  353. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  354. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  355. const Rect2I& viewRect = mTargetDesc.viewRect;
  356. Vector4I viewportRect;
  357. viewportRect[0] = viewRect.x;
  358. viewportRect[1] = viewRect.y;
  359. viewportRect[2] = viewRect.width;
  360. viewportRect[3] = viewRect.height;
  361. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  362. Vector4 ndcToUV = getNDCToUV();
  363. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, ndcToUV);
  364. if (!mRenderSettings->enableLighting)
  365. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  366. else
  367. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  368. }
  369. Vector4 RendererView::getNDCToUV() const
  370. {
  371. RenderAPI& rapi = RenderAPI::instance();
  372. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  373. const Rect2I& viewRect = mTargetDesc.viewRect;
  374. float halfWidth = viewRect.width * 0.5f;
  375. float halfHeight = viewRect.height * 0.5f;
  376. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  377. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  378. Vector4 ndcToUV;
  379. ndcToUV.x = halfWidth / rtWidth;
  380. ndcToUV.y = -halfHeight / rtHeight;
  381. ndcToUV.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  382. ndcToUV.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  383. // Either of these flips the Y axis, but if they're both true they cancel out
  384. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  385. ndcToUV.y = -ndcToUV.y;
  386. return ndcToUV;
  387. }
  388. void RendererView::updateLightGrid(const VisibleLightData& visibleLightData,
  389. const VisibleReflProbeData& visibleReflProbeData)
  390. {
  391. mLightGrid.updateGrid(*this, visibleLightData, visibleReflProbeData, !mRenderSettings->enableLighting);
  392. }
  393. RendererViewGroup::RendererViewGroup()
  394. :mShadowRenderer(1024)
  395. { }
  396. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews, UINT32 shadowMapSize)
  397. :mShadowRenderer(shadowMapSize)
  398. {
  399. setViews(views, numViews);
  400. }
  401. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  402. {
  403. mViews.clear();
  404. for (UINT32 i = 0; i < numViews; i++)
  405. {
  406. mViews.push_back(views[i]);
  407. views[i]->_setViewIdx(i);
  408. }
  409. }
  410. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  411. {
  412. UINT32 numViews = (UINT32)mViews.size();
  413. // Early exit if no views render scene geometry
  414. bool allViewsOverlay = false;
  415. for (UINT32 i = 0; i < numViews; i++)
  416. {
  417. if (!mViews[i]->getRenderSettings().overlayOnly)
  418. {
  419. allViewsOverlay = false;
  420. break;
  421. }
  422. }
  423. if (allViewsOverlay)
  424. return;
  425. // Generate render queues per camera
  426. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  427. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  428. for(UINT32 i = 0; i < numViews; i++)
  429. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  430. // Calculate light visibility for all views
  431. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  432. mVisibility.radialLights.resize(numRadialLights, false);
  433. mVisibility.radialLights.assign(numRadialLights, false);
  434. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  435. mVisibility.spotLights.resize(numSpotLights, false);
  436. mVisibility.spotLights.assign(numSpotLights, false);
  437. for (UINT32 i = 0; i < numViews; i++)
  438. {
  439. if (mViews[i]->getRenderSettings().overlayOnly)
  440. continue;
  441. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  442. &mVisibility.radialLights);
  443. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  444. &mVisibility.spotLights);
  445. }
  446. // Calculate refl. probe visibility for all views
  447. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  448. mVisibility.reflProbes.resize(numProbes, false);
  449. mVisibility.reflProbes.assign(numProbes, false);
  450. // Note: Per-view visibility for refl. probes currently isn't calculated
  451. for (UINT32 i = 0; i < numViews; i++)
  452. {
  453. const auto& viewProps = mViews[i]->getProperties();
  454. // Don't recursively render reflection probes when generating reflection probe maps
  455. if (viewProps.renderingReflections)
  456. continue;
  457. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  458. }
  459. // Organize light and refl. probe visibility infomation in a more GPU friendly manner
  460. // Note: I'm determining light and refl. probe visibility for the entire group. It might be more performance
  461. // efficient to do it per view. Additionally I'm using a single GPU buffer to hold their information, which is
  462. // then updated when each view group is rendered. It might be better to keep one buffer reserved per-view.
  463. mVisibleLightData.update(sceneInfo, *this);
  464. mVisibleReflProbeData.update(sceneInfo, *this);
  465. for (UINT32 i = 0; i < numViews; i++)
  466. {
  467. if (mViews[i]->getRenderSettings().overlayOnly)
  468. continue;
  469. mViews[i]->updateLightGrid(mVisibleLightData, mVisibleReflProbeData);
  470. }
  471. }
  472. }}