BsRendererView.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "Renderer/BsCamera.h"
  5. #include "Renderer/BsRenderable.h"
  6. #include "Material/BsMaterial.h"
  7. #include "Material/BsShader.h"
  8. #include "Renderer/BsRendererUtility.h"
  9. #include "BsLightRendering.h"
  10. #include "Material/BsGpuParamsSet.h"
  11. #include "BsRendererScene.h"
  12. namespace bs { namespace ct
  13. {
  14. PerCameraParamDef gPerCameraParamDef;
  15. SkyboxParamDef gSkyboxParamDef;
  16. SkyboxMat::SkyboxMat()
  17. {
  18. if(mParams->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  19. mParams->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  20. mParamBuffer = gSkyboxParamDef.createBuffer();
  21. if(mParams->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  22. mParams->setParamBlockBuffer("Params", mParamBuffer);
  23. }
  24. void SkyboxMat::bind(const SPtr<GpuParamBlockBuffer>& perCamera, const SPtr<Texture>& texture, const Color& solidColor)
  25. {
  26. mParams->setParamBlockBuffer("PerCamera", perCamera);
  27. mSkyTextureParam.set(texture);
  28. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  29. mParamBuffer->flushToGPU();
  30. RendererMaterial::bind();
  31. }
  32. SkyboxMat* SkyboxMat::getVariation(bool color)
  33. {
  34. if (color)
  35. return get(getVariation<true>());
  36. return get(getVariation<false>());
  37. }
  38. RendererViewData::RendererViewData()
  39. :encodeDepth(false), depthEncodeNear(0.0f), depthEncodeFar(0.0f)
  40. {
  41. }
  42. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  43. :RendererViewData(src), frameIdx(0)
  44. {
  45. viewProjTransform = src.projTransform * src.viewTransform;
  46. target = src.target.target;
  47. viewRect = src.target.viewRect;
  48. nrmViewRect = src.target.nrmViewRect;
  49. numSamples = src.target.numSamples;
  50. clearFlags = src.target.clearFlags;
  51. clearColor = src.target.clearColor;
  52. clearDepthValue = src.target.clearDepthValue;
  53. clearStencilValue = src.target.clearStencilValue;
  54. }
  55. RendererView::RendererView()
  56. : mCamera(nullptr), mRenderSettingsHash(0), mViewIdx(-1)
  57. {
  58. mParamBuffer = gPerCameraParamDef.createBuffer();
  59. }
  60. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  61. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mRenderSettingsHash(0), mViewIdx(-1)
  62. {
  63. mParamBuffer = gPerCameraParamDef.createBuffer();
  64. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  65. setStateReductionMode(desc.stateReduction);
  66. }
  67. void RendererView::setStateReductionMode(StateReduction reductionMode)
  68. {
  69. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  70. StateReduction transparentStateReduction = reductionMode;
  71. if (transparentStateReduction == StateReduction::Material)
  72. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  73. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  74. }
  75. void RendererView::setRenderSettings(const SPtr<RenderSettings>& settings)
  76. {
  77. if (mRenderSettings == nullptr)
  78. mRenderSettings = bs_shared_ptr_new<RenderSettings>();
  79. if (settings != nullptr)
  80. *mRenderSettings = *settings;
  81. mRenderSettingsHash++;
  82. // Update compositor hierarchy (Note: Needs to be called even when viewport size (or other information) changes,
  83. // but we're currently calling it here as all such calls are followed by setRenderSettings.
  84. mCompositor.build(*this, RCNodeFinalResolve::getNodeId());
  85. }
  86. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  87. const Matrix4& proj, const ConvexVolume& worldFrustum)
  88. {
  89. mProperties.viewOrigin = origin;
  90. mProperties.viewDirection = direction;
  91. mProperties.viewTransform = view;
  92. mProperties.projTransform = proj;
  93. mProperties.cullFrustum = worldFrustum;
  94. mProperties.viewProjTransform = proj * view;
  95. }
  96. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  97. {
  98. mCamera = desc.sceneCamera;
  99. mProperties = desc;
  100. mProperties.viewProjTransform = desc.projTransform * desc.viewTransform;
  101. mProperties.prevViewProjTransform = Matrix4::IDENTITY;
  102. mTargetDesc = desc.target;
  103. setStateReductionMode(desc.stateReduction);
  104. }
  105. void RendererView::beginFrame()
  106. {
  107. // Note: inverse view-projection can be cached, it doesn't change every frame
  108. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  109. Matrix4 invViewProj = viewProj.inverse();
  110. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  111. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  112. }
  113. void RendererView::endFrame()
  114. {
  115. // Save view-projection matrix to use for temporal filtering
  116. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  117. // Advance per-view frame index. This is used primarily by temporal rendering effects, and pausing the frame index
  118. // allows you to freeze the current rendering as is, without temporal artifacts.
  119. mProperties.frameIdx++;
  120. mOpaqueQueue->clear();
  121. mTransparentQueue->clear();
  122. }
  123. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  124. Vector<bool>* visibility)
  125. {
  126. mVisibility.renderables.clear();
  127. mVisibility.renderables.resize(renderables.size(), false);
  128. if (mRenderSettings->overlayOnly)
  129. return;
  130. calculateVisibility(cullInfos, mVisibility.renderables);
  131. // Update per-object param buffers and queue render elements
  132. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  133. {
  134. if (!mVisibility.renderables[i])
  135. continue;
  136. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  137. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  138. for (auto& renderElem : renderables[i]->elements)
  139. {
  140. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  141. // check here
  142. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  143. if (isTransparent)
  144. mTransparentQueue->add(&renderElem, distanceToCamera);
  145. else
  146. mOpaqueQueue->add(&renderElem, distanceToCamera);
  147. }
  148. }
  149. if(visibility != nullptr)
  150. {
  151. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  152. {
  153. bool visible = (*visibility)[i];
  154. (*visibility)[i] = visible || mVisibility.renderables[i];
  155. }
  156. }
  157. mOpaqueQueue->sort();
  158. mTransparentQueue->sort();
  159. }
  160. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  161. LightType lightType, Vector<bool>* visibility)
  162. {
  163. // Special case for directional lights, they're always visible
  164. if(lightType == LightType::Directional)
  165. {
  166. if (visibility)
  167. visibility->assign(lights.size(), true);
  168. return;
  169. }
  170. Vector<bool>* perViewVisibility;
  171. if(lightType == LightType::Radial)
  172. {
  173. mVisibility.radialLights.clear();
  174. mVisibility.radialLights.resize(lights.size(), false);
  175. perViewVisibility = &mVisibility.radialLights;
  176. }
  177. else // Spot
  178. {
  179. mVisibility.spotLights.clear();
  180. mVisibility.spotLights.resize(lights.size(), false);
  181. perViewVisibility = &mVisibility.spotLights;
  182. }
  183. if (mRenderSettings->overlayOnly)
  184. return;
  185. calculateVisibility(bounds, *perViewVisibility);
  186. if(visibility != nullptr)
  187. {
  188. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  189. {
  190. bool visible = (*visibility)[i];
  191. (*visibility)[i] = visible || (*perViewVisibility)[i];
  192. }
  193. }
  194. }
  195. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  196. {
  197. UINT64 cameraLayers = mProperties.visibleLayers;
  198. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  199. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  200. {
  201. if ((cullInfos[i].layer & cameraLayers) == 0)
  202. continue;
  203. // Do frustum culling
  204. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  205. // operations, as it is trivial to update them. Also consider spatial partitioning.
  206. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  207. if (worldFrustum.intersects(boundingSphere))
  208. {
  209. // More precise with the box
  210. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  211. if (worldFrustum.intersects(boundingBox))
  212. visibility[i] = true;
  213. }
  214. }
  215. }
  216. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  217. {
  218. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  219. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  220. {
  221. if (worldFrustum.intersects(bounds[i]))
  222. visibility[i] = true;
  223. }
  224. }
  225. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  226. {
  227. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  228. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  229. {
  230. if (worldFrustum.intersects(bounds[i]))
  231. visibility[i] = true;
  232. }
  233. }
  234. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  235. {
  236. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  237. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  238. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  239. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  240. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  241. // formula is:
  242. // depth = (Az + B) / (C * z)
  243. // To get the z coordinate back we simply do the opposite:
  244. // z = B / (depth * C - A)
  245. // However some APIs will also do a transformation on the depth values before storing them to the texture
  246. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  247. // formula is:
  248. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  249. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  250. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  251. RenderAPI& rapi = RenderAPI::instance();
  252. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  253. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  254. float minDepth = rapiInfo.getMinimumDepthInputValue();
  255. float a = projMatrix[2][2];
  256. float b = projMatrix[2][3];
  257. float c = projMatrix[3][2];
  258. Vector2 output;
  259. if (c != 0.0f)
  260. {
  261. output.x = b / (depthRange * c);
  262. output.y = minDepth / depthRange - a / (depthRange * c);
  263. }
  264. else // Ortographic, assuming viewing towards negative Z
  265. {
  266. output.x = b / -depthRange;
  267. output.y = minDepth / depthRange - a / -depthRange;
  268. }
  269. return output;
  270. }
  271. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  272. {
  273. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  274. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  275. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  276. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  277. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  278. // formula is:
  279. // depth = (Az + B) / (C * z)
  280. // To get the z coordinate back we simply do the opposite:
  281. // z = B / (depth * C - A)
  282. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  283. // z = 1.0f / (depth - A/C) * B/C
  284. float a = projMatrix[2][2];
  285. float b = projMatrix[2][3];
  286. float c = projMatrix[3][2];
  287. Vector2 output;
  288. if (c != 0.0f)
  289. {
  290. output.x = b / c;
  291. output.y = -a / c;
  292. }
  293. else // Ortographic, assuming viewing towards negative Z
  294. {
  295. output.x = -b;
  296. output.y = a;
  297. }
  298. return output;
  299. }
  300. Vector2 RendererView::getNDCZToDeviceZ()
  301. {
  302. RenderAPI& rapi = RenderAPI::instance();
  303. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  304. Vector2 ndcZToDeviceZ;
  305. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  306. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  307. return ndcZToDeviceZ;
  308. }
  309. Matrix4 invertProjectionMatrix(const Matrix4& mat)
  310. {
  311. // Try to solve the most common case using high percision calculations, in order to reduce depth error
  312. if(mat[0][1] == 0.0f && mat[0][3] == 0.0f &&
  313. mat[1][0] == 0.0f && mat[1][3] == 0.0f &&
  314. mat[2][0] == 0.0f && mat[2][1] == 0.0f &&
  315. mat[3][0] == 0.0f && mat[3][1] == 0.0f &&
  316. mat[3][2] == -1.0f && mat[3][3] == 0.0f)
  317. {
  318. double a = mat[0][0];
  319. double b = mat[1][1];
  320. double c = mat[2][2];
  321. double d = mat[2][3];
  322. double s = mat[0][2];
  323. double t = mat[1][2];
  324. return Matrix4(
  325. (float)(1.0/a), 0.0f, 0.0f, (float)(-s/a),
  326. 0.0f, (float)(1.0/b), 0.0f, (float)(-t/b),
  327. 0.0f, 0.0f, 0.0f, -1.0f,
  328. 0.0f, 0.0f, (float)(1.0/d), (float)(c/d)
  329. );
  330. }
  331. else
  332. {
  333. return mat.inverse();
  334. }
  335. }
  336. void RendererView::updatePerViewBuffer()
  337. {
  338. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  339. Matrix4 invProj = invertProjectionMatrix(mProperties.projTransform);
  340. Matrix4 invView = mProperties.viewTransform.inverseAffine();
  341. Matrix4 invViewProj = invView * invProj;
  342. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  343. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  344. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  345. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj);
  346. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, invProj);
  347. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  348. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  349. // view_z/view_w in view space, into world space.
  350. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  351. Matrix4 projZ = Matrix4::IDENTITY;
  352. projZ[2][2] = mProperties.projTransform[2][2];
  353. projZ[2][3] = mProperties.projTransform[2][3];
  354. projZ[3][2] = mProperties.projTransform[3][2];
  355. projZ[3][3] = 0.0f;
  356. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  357. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  358. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  359. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  360. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  361. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  362. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  363. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  364. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  365. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  366. const Rect2I& viewRect = mTargetDesc.viewRect;
  367. Vector4I viewportRect;
  368. viewportRect[0] = viewRect.x;
  369. viewportRect[1] = viewRect.y;
  370. viewportRect[2] = viewRect.width;
  371. viewportRect[3] = viewRect.height;
  372. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  373. Vector4 ndcToUV = getNDCToUV();
  374. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, ndcToUV);
  375. if (!mRenderSettings->enableLighting)
  376. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  377. else
  378. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  379. }
  380. Vector4 RendererView::getNDCToUV() const
  381. {
  382. RenderAPI& rapi = RenderAPI::instance();
  383. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  384. const Rect2I& viewRect = mTargetDesc.viewRect;
  385. float halfWidth = viewRect.width * 0.5f;
  386. float halfHeight = viewRect.height * 0.5f;
  387. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  388. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  389. Vector4 ndcToUV;
  390. ndcToUV.x = halfWidth / rtWidth;
  391. ndcToUV.y = -halfHeight / rtHeight;
  392. ndcToUV.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  393. ndcToUV.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  394. // Either of these flips the Y axis, but if they're both true they cancel out
  395. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  396. ndcToUV.y = -ndcToUV.y;
  397. return ndcToUV;
  398. }
  399. void RendererView::updateLightGrid(const VisibleLightData& visibleLightData,
  400. const VisibleReflProbeData& visibleReflProbeData)
  401. {
  402. mLightGrid.updateGrid(*this, visibleLightData, visibleReflProbeData, !mRenderSettings->enableLighting);
  403. }
  404. RendererViewGroup::RendererViewGroup()
  405. :mShadowRenderer(1024)
  406. { }
  407. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews, UINT32 shadowMapSize)
  408. :mShadowRenderer(shadowMapSize)
  409. {
  410. setViews(views, numViews);
  411. }
  412. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  413. {
  414. mViews.clear();
  415. for (UINT32 i = 0; i < numViews; i++)
  416. {
  417. mViews.push_back(views[i]);
  418. views[i]->_setViewIdx(i);
  419. }
  420. }
  421. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  422. {
  423. UINT32 numViews = (UINT32)mViews.size();
  424. // Early exit if no views render scene geometry
  425. bool allViewsOverlay = false;
  426. for (UINT32 i = 0; i < numViews; i++)
  427. {
  428. if (!mViews[i]->getRenderSettings().overlayOnly)
  429. {
  430. allViewsOverlay = false;
  431. break;
  432. }
  433. }
  434. if (allViewsOverlay)
  435. return;
  436. // Generate render queues per camera
  437. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  438. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  439. for(UINT32 i = 0; i < numViews; i++)
  440. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  441. // Calculate light visibility for all views
  442. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  443. mVisibility.radialLights.resize(numRadialLights, false);
  444. mVisibility.radialLights.assign(numRadialLights, false);
  445. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  446. mVisibility.spotLights.resize(numSpotLights, false);
  447. mVisibility.spotLights.assign(numSpotLights, false);
  448. for (UINT32 i = 0; i < numViews; i++)
  449. {
  450. if (mViews[i]->getRenderSettings().overlayOnly)
  451. continue;
  452. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  453. &mVisibility.radialLights);
  454. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  455. &mVisibility.spotLights);
  456. }
  457. // Calculate refl. probe visibility for all views
  458. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  459. mVisibility.reflProbes.resize(numProbes, false);
  460. mVisibility.reflProbes.assign(numProbes, false);
  461. // Note: Per-view visibility for refl. probes currently isn't calculated
  462. for (UINT32 i = 0; i < numViews; i++)
  463. {
  464. const auto& viewProps = mViews[i]->getProperties();
  465. // Don't recursively render reflection probes when generating reflection probe maps
  466. if (viewProps.capturingReflections)
  467. continue;
  468. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  469. }
  470. // Organize light and refl. probe visibility infomation in a more GPU friendly manner
  471. // Note: I'm determining light and refl. probe visibility for the entire group. It might be more performance
  472. // efficient to do it per view. Additionally I'm using a single GPU buffer to hold their information, which is
  473. // then updated when each view group is rendered. It might be better to keep one buffer reserved per-view.
  474. mVisibleLightData.update(sceneInfo, *this);
  475. mVisibleReflProbeData.update(sceneInfo, *this);
  476. for (UINT32 i = 0; i < numViews; i++)
  477. {
  478. if (mViews[i]->getRenderSettings().overlayOnly)
  479. continue;
  480. mViews[i]->updateLightGrid(mVisibleLightData, mVisibleReflProbeData);
  481. }
  482. }
  483. }}