BsRendererView.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "Renderer/BsCamera.h"
  5. #include "Renderer/BsRenderable.h"
  6. #include "Material/BsMaterial.h"
  7. #include "Material/BsShader.h"
  8. #include "Renderer/BsRendererUtility.h"
  9. #include "BsLightRendering.h"
  10. #include "Material/BsGpuParamsSet.h"
  11. #include "BsRendererScene.h"
  12. namespace bs { namespace ct
  13. {
  14. PerCameraParamDef gPerCameraParamDef;
  15. SkyboxParamDef gSkyboxParamDef;
  16. ShaderVariation SkyboxMat::VAR_Texture = ShaderVariation({
  17. ShaderVariation::Param("SOLID_COLOR", false)
  18. });
  19. ShaderVariation SkyboxMat::VAR_Color = ShaderVariation({
  20. ShaderVariation::Param("SOLID_COLOR", true)
  21. });
  22. SkyboxMat::SkyboxMat()
  23. {
  24. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  25. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  26. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  27. mParamBuffer = gSkyboxParamDef.createBuffer();
  28. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  29. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  30. }
  31. void SkyboxMat::_initVariations(ShaderVariations& variations)
  32. {
  33. variations.add(VAR_Color);
  34. variations.add(VAR_Texture);
  35. }
  36. void SkyboxMat::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  37. {
  38. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  39. gRendererUtility().setPass(mMaterial, 0);
  40. }
  41. void SkyboxMat::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  42. {
  43. mSkyTextureParam.set(texture, TextureSurface(0, 1, 0, 0));
  44. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  45. mParamBuffer->flushToGPU();
  46. gRendererUtility().setPassParams(mParamsSet);
  47. }
  48. SkyboxMat* SkyboxMat::getVariation(bool color)
  49. {
  50. if (color)
  51. return get(VAR_Color);
  52. return get(VAR_Texture);
  53. }
  54. RendererViewData::RendererViewData()
  55. :encodeDepth(false), depthEncodeNear(0.0f), depthEncodeFar(0.0f)
  56. {
  57. }
  58. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  59. :RendererViewData(src), frameIdx(0)
  60. {
  61. viewProjTransform = src.projTransform * src.viewTransform;
  62. target = src.target.target;
  63. viewRect = src.target.viewRect;
  64. nrmViewRect = src.target.nrmViewRect;
  65. numSamples = src.target.numSamples;
  66. clearFlags = src.target.clearFlags;
  67. clearColor = src.target.clearColor;
  68. clearDepthValue = src.target.clearDepthValue;
  69. clearStencilValue = src.target.clearStencilValue;
  70. }
  71. RendererView::RendererView()
  72. : mCamera(nullptr), mRenderSettingsHash(0), mViewIdx(-1)
  73. {
  74. mParamBuffer = gPerCameraParamDef.createBuffer();
  75. }
  76. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  77. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mRenderSettingsHash(0), mViewIdx(-1)
  78. {
  79. mParamBuffer = gPerCameraParamDef.createBuffer();
  80. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  81. setStateReductionMode(desc.stateReduction);
  82. }
  83. void RendererView::setStateReductionMode(StateReduction reductionMode)
  84. {
  85. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  86. StateReduction transparentStateReduction = reductionMode;
  87. if (transparentStateReduction == StateReduction::Material)
  88. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  89. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  90. }
  91. void RendererView::setRenderSettings(const SPtr<RenderSettings>& settings)
  92. {
  93. if (mRenderSettings == nullptr)
  94. mRenderSettings = bs_shared_ptr_new<RenderSettings>();
  95. if (settings != nullptr)
  96. *mRenderSettings = *settings;
  97. mRenderSettingsHash++;
  98. // Update compositor hierarchy (Note: Needs to be called even when viewport size (or other information) changes,
  99. // but we're currently calling it here as all such calls are followed by setRenderSettings.
  100. mCompositor.build(*this, RCNodeFinalResolve::getNodeId());
  101. }
  102. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  103. const Matrix4& proj, const ConvexVolume& worldFrustum)
  104. {
  105. mProperties.viewOrigin = origin;
  106. mProperties.viewDirection = direction;
  107. mProperties.viewTransform = view;
  108. mProperties.projTransform = proj;
  109. mProperties.cullFrustum = worldFrustum;
  110. mProperties.viewProjTransform = proj * view;
  111. }
  112. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  113. {
  114. mCamera = desc.sceneCamera;
  115. mProperties = desc;
  116. mProperties.viewProjTransform = desc.projTransform * desc.viewTransform;
  117. mProperties.prevViewProjTransform = Matrix4::IDENTITY;
  118. mTargetDesc = desc.target;
  119. setStateReductionMode(desc.stateReduction);
  120. }
  121. void RendererView::beginFrame()
  122. {
  123. // Note: inverse view-projection can be cached, it doesn't change every frame
  124. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  125. Matrix4 invViewProj = viewProj.inverse();
  126. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  127. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  128. }
  129. void RendererView::endFrame()
  130. {
  131. // Save view-projection matrix to use for temporal filtering
  132. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  133. // Advance per-view frame index. This is used primarily by temporal rendering effects, and pausing the frame index
  134. // allows you to freeze the current rendering as is, without temporal artifacts.
  135. mProperties.frameIdx++;
  136. mOpaqueQueue->clear();
  137. mTransparentQueue->clear();
  138. }
  139. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  140. Vector<bool>* visibility)
  141. {
  142. mVisibility.renderables.clear();
  143. mVisibility.renderables.resize(renderables.size(), false);
  144. if (mRenderSettings->overlayOnly)
  145. return;
  146. calculateVisibility(cullInfos, mVisibility.renderables);
  147. // Update per-object param buffers and queue render elements
  148. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  149. {
  150. if (!mVisibility.renderables[i])
  151. continue;
  152. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  153. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  154. for (auto& renderElem : renderables[i]->elements)
  155. {
  156. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  157. // check here
  158. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  159. if (isTransparent)
  160. mTransparentQueue->add(&renderElem, distanceToCamera);
  161. else
  162. mOpaqueQueue->add(&renderElem, distanceToCamera);
  163. }
  164. }
  165. if(visibility != nullptr)
  166. {
  167. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  168. {
  169. bool visible = (*visibility)[i];
  170. (*visibility)[i] = visible || mVisibility.renderables[i];
  171. }
  172. }
  173. mOpaqueQueue->sort();
  174. mTransparentQueue->sort();
  175. }
  176. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  177. LightType lightType, Vector<bool>* visibility)
  178. {
  179. // Special case for directional lights, they're always visible
  180. if(lightType == LightType::Directional)
  181. {
  182. if (visibility)
  183. visibility->assign(lights.size(), true);
  184. return;
  185. }
  186. Vector<bool>* perViewVisibility;
  187. if(lightType == LightType::Radial)
  188. {
  189. mVisibility.radialLights.clear();
  190. mVisibility.radialLights.resize(lights.size(), false);
  191. perViewVisibility = &mVisibility.radialLights;
  192. }
  193. else // Spot
  194. {
  195. mVisibility.spotLights.clear();
  196. mVisibility.spotLights.resize(lights.size(), false);
  197. perViewVisibility = &mVisibility.spotLights;
  198. }
  199. if (mRenderSettings->overlayOnly)
  200. return;
  201. calculateVisibility(bounds, *perViewVisibility);
  202. if(visibility != nullptr)
  203. {
  204. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  205. {
  206. bool visible = (*visibility)[i];
  207. (*visibility)[i] = visible || (*perViewVisibility)[i];
  208. }
  209. }
  210. }
  211. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  212. {
  213. UINT64 cameraLayers = mProperties.visibleLayers;
  214. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  215. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  216. {
  217. if ((cullInfos[i].layer & cameraLayers) == 0)
  218. continue;
  219. // Do frustum culling
  220. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  221. // operations, as it is trivial to update them. Also consider spatial partitioning.
  222. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  223. if (worldFrustum.intersects(boundingSphere))
  224. {
  225. // More precise with the box
  226. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  227. if (worldFrustum.intersects(boundingBox))
  228. visibility[i] = true;
  229. }
  230. }
  231. }
  232. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  233. {
  234. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  235. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  236. {
  237. if (worldFrustum.intersects(bounds[i]))
  238. visibility[i] = true;
  239. }
  240. }
  241. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  242. {
  243. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  244. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  245. {
  246. if (worldFrustum.intersects(bounds[i]))
  247. visibility[i] = true;
  248. }
  249. }
  250. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  251. {
  252. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  253. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  254. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  255. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  256. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  257. // formula is:
  258. // depth = (Az + B) / (C * z)
  259. // To get the z coordinate back we simply do the opposite:
  260. // z = B / (depth * C - A)
  261. // However some APIs will also do a transformation on the depth values before storing them to the texture
  262. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  263. // formula is:
  264. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  265. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  266. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  267. RenderAPI& rapi = RenderAPI::instance();
  268. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  269. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  270. float minDepth = rapiInfo.getMinimumDepthInputValue();
  271. float a = projMatrix[2][2];
  272. float b = projMatrix[2][3];
  273. float c = projMatrix[3][2];
  274. Vector2 output;
  275. if (c != 0.0f)
  276. {
  277. output.x = b / (depthRange * c);
  278. output.y = minDepth / depthRange - a / (depthRange * c);
  279. }
  280. else // Ortographic, assuming viewing towards negative Z
  281. {
  282. output.x = b / -depthRange;
  283. output.y = minDepth / depthRange - a / -depthRange;
  284. }
  285. return output;
  286. }
  287. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  288. {
  289. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  290. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  291. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  292. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  293. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  294. // formula is:
  295. // depth = (Az + B) / (C * z)
  296. // To get the z coordinate back we simply do the opposite:
  297. // z = B / (depth * C - A)
  298. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  299. // z = 1.0f / (depth - A/C) * B/C
  300. float a = projMatrix[2][2];
  301. float b = projMatrix[2][3];
  302. float c = projMatrix[3][2];
  303. Vector2 output;
  304. if (c != 0.0f)
  305. {
  306. output.x = b / c;
  307. output.y = -a / c;
  308. }
  309. else // Ortographic, assuming viewing towards negative Z
  310. {
  311. output.x = -b;
  312. output.y = a;
  313. }
  314. return output;
  315. }
  316. Vector2 RendererView::getNDCZToDeviceZ()
  317. {
  318. RenderAPI& rapi = RenderAPI::instance();
  319. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  320. Vector2 ndcZToDeviceZ;
  321. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  322. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  323. return ndcZToDeviceZ;
  324. }
  325. Matrix4 invertProjectionMatrix(const Matrix4& mat)
  326. {
  327. // Try to solve the most common case using high percision calculations, in order to reduce depth error
  328. if(mat[0][1] == 0.0f && mat[0][3] == 0.0f &&
  329. mat[1][0] == 0.0f && mat[1][3] == 0.0f &&
  330. mat[2][0] == 0.0f && mat[2][1] == 0.0f &&
  331. mat[3][0] == 0.0f && mat[3][1] == 0.0f &&
  332. mat[3][2] == -1.0f && mat[3][3] == 0.0f)
  333. {
  334. double a = mat[0][0];
  335. double b = mat[1][1];
  336. double c = mat[2][2];
  337. double d = mat[2][3];
  338. double s = mat[0][2];
  339. double t = mat[1][2];
  340. return Matrix4(
  341. (float)(1.0/a), 0.0f, 0.0f, (float)(-s/a),
  342. 0.0f, (float)(1.0/b), 0.0f, (float)(-t/b),
  343. 0.0f, 0.0f, 0.0f, -1.0f,
  344. 0.0f, 0.0f, (float)(1.0/d), (float)(c/d)
  345. );
  346. }
  347. else
  348. {
  349. return mat.inverse();
  350. }
  351. }
  352. void RendererView::updatePerViewBuffer()
  353. {
  354. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  355. Matrix4 invProj = invertProjectionMatrix(mProperties.projTransform);
  356. Matrix4 invView = mProperties.viewTransform.inverseAffine();
  357. Matrix4 invViewProj = invView * invProj;
  358. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  359. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  360. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  361. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj);
  362. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, invProj);
  363. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  364. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  365. // view_z/view_w in view space, into world space.
  366. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  367. Matrix4 projZ = Matrix4::IDENTITY;
  368. projZ[2][2] = mProperties.projTransform[2][2];
  369. projZ[2][3] = mProperties.projTransform[2][3];
  370. projZ[3][2] = mProperties.projTransform[3][2];
  371. projZ[3][3] = 0.0f;
  372. Matrix4 NDCToPrevNDC = mProperties.prevViewProjTransform * invViewProj;
  373. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  374. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, NDCToPrevNDC);
  375. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  376. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  377. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  378. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  379. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  380. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  381. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  382. const Rect2I& viewRect = mTargetDesc.viewRect;
  383. Vector4I viewportRect;
  384. viewportRect[0] = viewRect.x;
  385. viewportRect[1] = viewRect.y;
  386. viewportRect[2] = viewRect.width;
  387. viewportRect[3] = viewRect.height;
  388. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  389. Vector4 ndcToUV = getNDCToUV();
  390. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, ndcToUV);
  391. if (!mRenderSettings->enableLighting)
  392. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  393. else
  394. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  395. }
  396. Vector4 RendererView::getNDCToUV() const
  397. {
  398. RenderAPI& rapi = RenderAPI::instance();
  399. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  400. const Rect2I& viewRect = mTargetDesc.viewRect;
  401. float halfWidth = viewRect.width * 0.5f;
  402. float halfHeight = viewRect.height * 0.5f;
  403. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  404. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  405. Vector4 ndcToUV;
  406. ndcToUV.x = halfWidth / rtWidth;
  407. ndcToUV.y = -halfHeight / rtHeight;
  408. ndcToUV.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  409. ndcToUV.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  410. // Either of these flips the Y axis, but if they're both true they cancel out
  411. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  412. ndcToUV.y = -ndcToUV.y;
  413. return ndcToUV;
  414. }
  415. void RendererView::updateLightGrid(const VisibleLightData& visibleLightData,
  416. const VisibleReflProbeData& visibleReflProbeData)
  417. {
  418. mLightGrid.updateGrid(*this, visibleLightData, visibleReflProbeData, !mRenderSettings->enableLighting);
  419. }
  420. RendererViewGroup::RendererViewGroup()
  421. :mShadowRenderer(1024)
  422. { }
  423. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews, UINT32 shadowMapSize)
  424. :mShadowRenderer(shadowMapSize)
  425. {
  426. setViews(views, numViews);
  427. }
  428. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  429. {
  430. mViews.clear();
  431. for (UINT32 i = 0; i < numViews; i++)
  432. {
  433. mViews.push_back(views[i]);
  434. views[i]->_setViewIdx(i);
  435. }
  436. }
  437. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  438. {
  439. UINT32 numViews = (UINT32)mViews.size();
  440. // Early exit if no views render scene geometry
  441. bool allViewsOverlay = false;
  442. for (UINT32 i = 0; i < numViews; i++)
  443. {
  444. if (!mViews[i]->getRenderSettings().overlayOnly)
  445. {
  446. allViewsOverlay = false;
  447. break;
  448. }
  449. }
  450. if (allViewsOverlay)
  451. return;
  452. // Generate render queues per camera
  453. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  454. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  455. for(UINT32 i = 0; i < numViews; i++)
  456. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  457. // Calculate light visibility for all views
  458. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  459. mVisibility.radialLights.resize(numRadialLights, false);
  460. mVisibility.radialLights.assign(numRadialLights, false);
  461. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  462. mVisibility.spotLights.resize(numSpotLights, false);
  463. mVisibility.spotLights.assign(numSpotLights, false);
  464. for (UINT32 i = 0; i < numViews; i++)
  465. {
  466. if (mViews[i]->getRenderSettings().overlayOnly)
  467. continue;
  468. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  469. &mVisibility.radialLights);
  470. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  471. &mVisibility.spotLights);
  472. }
  473. // Calculate refl. probe visibility for all views
  474. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  475. mVisibility.reflProbes.resize(numProbes, false);
  476. mVisibility.reflProbes.assign(numProbes, false);
  477. // Note: Per-view visibility for refl. probes currently isn't calculated
  478. for (UINT32 i = 0; i < numViews; i++)
  479. {
  480. const auto& viewProps = mViews[i]->getProperties();
  481. // Don't recursively render reflection probes when generating reflection probe maps
  482. if (viewProps.renderingReflections)
  483. continue;
  484. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  485. }
  486. // Organize light and refl. probe visibility infomation in a more GPU friendly manner
  487. // Note: I'm determining light and refl. probe visibility for the entire group. It might be more performance
  488. // efficient to do it per view. Additionally I'm using a single GPU buffer to hold their information, which is
  489. // then updated when each view group is rendered. It might be better to keep one buffer reserved per-view.
  490. mVisibleLightData.update(sceneInfo, *this);
  491. mVisibleReflProbeData.update(sceneInfo, *this);
  492. for (UINT32 i = 0; i < numViews; i++)
  493. {
  494. if (mViews[i]->getRenderSettings().overlayOnly)
  495. continue;
  496. mViews[i]->updateLightGrid(mVisibleLightData, mVisibleReflProbeData);
  497. }
  498. }
  499. }}