BsRendererView.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "BsCamera.h"
  5. #include "BsRenderable.h"
  6. #include "BsMaterial.h"
  7. #include "BsShader.h"
  8. #include "BsRenderTargets.h"
  9. #include "BsRendererUtility.h"
  10. #include "BsLightRendering.h"
  11. #include "BsGpuParamsSet.h"
  12. #include "BsRendererScene.h"
  13. namespace bs { namespace ct
  14. {
  15. PerCameraParamDef gPerCameraParamDef;
  16. SkyboxParamDef gSkyboxParamDef;
  17. template<bool SOLID_COLOR>
  18. SkyboxMat<SOLID_COLOR>::SkyboxMat()
  19. {
  20. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  21. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  22. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  23. mParamBuffer = gSkyboxParamDef.createBuffer();
  24. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  25. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  26. }
  27. template<bool SOLID_COLOR>
  28. void SkyboxMat<SOLID_COLOR>::_initDefines(ShaderDefines& defines)
  29. {
  30. if (SOLID_COLOR)
  31. defines.set("SOLID_COLOR", 1);
  32. }
  33. template<bool SOLID_COLOR>
  34. void SkyboxMat<SOLID_COLOR>::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  35. {
  36. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  37. gRendererUtility().setPass(mMaterial, 0);
  38. }
  39. template<bool SOLID_COLOR>
  40. void SkyboxMat<SOLID_COLOR>::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  41. {
  42. mSkyTextureParam.set(texture, TextureSurface(1, 1, 0, 0));
  43. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  44. mParamBuffer->flushToGPU();
  45. gRendererUtility().setPassParams(mParamsSet);
  46. }
  47. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  48. :RendererViewData(src)
  49. {
  50. viewProjTransform = src.projTransform * src.viewTransform;
  51. target = src.target.target;
  52. viewRect = src.target.viewRect;
  53. nrmViewRect = src.target.nrmViewRect;
  54. numSamples = src.target.numSamples;
  55. clearFlags = src.target.clearFlags;
  56. clearColor = src.target.clearColor;
  57. clearDepthValue = src.target.clearDepthValue;
  58. clearStencilValue = src.target.clearStencilValue;
  59. }
  60. RendererView::RendererView()
  61. : mUsingGBuffer(false)
  62. {
  63. mParamBuffer = gPerCameraParamDef.createBuffer();
  64. }
  65. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  66. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mUsingGBuffer(false)
  67. {
  68. mParamBuffer = gPerCameraParamDef.createBuffer();
  69. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  70. setStateReductionMode(desc.stateReduction);
  71. }
  72. void RendererView::setStateReductionMode(StateReduction reductionMode)
  73. {
  74. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  75. StateReduction transparentStateReduction = reductionMode;
  76. if (transparentStateReduction == StateReduction::Material)
  77. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  78. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  79. }
  80. void RendererView::setPostProcessSettings(const SPtr<PostProcessSettings>& ppSettings)
  81. {
  82. if (mPostProcessInfo.settings == nullptr)
  83. mPostProcessInfo.settings = bs_shared_ptr_new<StandardPostProcessSettings>();
  84. SPtr<StandardPostProcessSettings> stdPPSettings = std::static_pointer_cast<StandardPostProcessSettings>(ppSettings);
  85. if (stdPPSettings != nullptr)
  86. *mPostProcessInfo.settings = *stdPPSettings;
  87. else
  88. *mPostProcessInfo.settings = StandardPostProcessSettings();
  89. mPostProcessInfo.settingDirty = true;
  90. }
  91. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  92. const Matrix4& proj, const ConvexVolume& worldFrustum)
  93. {
  94. mProperties.viewOrigin = origin;
  95. mProperties.viewDirection = direction;
  96. mProperties.viewTransform = view;
  97. mProperties.projTransform = proj;
  98. mProperties.cullFrustum = worldFrustum;
  99. mProperties.viewProjTransform = proj * view;
  100. }
  101. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  102. {
  103. if (mTargetDesc.targetWidth != desc.target.targetWidth ||
  104. mTargetDesc.targetHeight != desc.target.targetHeight)
  105. mRenderTargets = nullptr;
  106. mCamera = desc.sceneCamera;
  107. mProperties = desc;
  108. mTargetDesc = desc.target;
  109. setStateReductionMode(desc.stateReduction);
  110. }
  111. void RendererView::beginFrame(bool useGBuffer)
  112. {
  113. if (useGBuffer)
  114. {
  115. // Render scene objects to g-buffer
  116. bool createGBuffer = mRenderTargets == nullptr ||
  117. mRenderTargets->getHDR() != mProperties.isHDR ||
  118. mRenderTargets->getNumSamples() != mTargetDesc.numSamples;
  119. if (createGBuffer)
  120. mRenderTargets = RenderTargets::create(mTargetDesc, mProperties.isHDR);
  121. mRenderTargets->prepare();
  122. mUsingGBuffer = true;
  123. }
  124. }
  125. void RendererView::endFrame()
  126. {
  127. // Save view-projection matrix to use for temporal filtering
  128. mProperties.prevViewProjTransform = mProperties.viewProjTransform;
  129. mOpaqueQueue->clear();
  130. mTransparentQueue->clear();
  131. if(mUsingGBuffer)
  132. {
  133. mRenderTargets->cleanup();
  134. mUsingGBuffer = false;
  135. }
  136. }
  137. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  138. Vector<bool>* visibility)
  139. {
  140. mVisibility.renderables.clear();
  141. mVisibility.renderables.resize(renderables.size(), false);
  142. if (mProperties.isOverlay)
  143. return;
  144. calculateVisibility(cullInfos, mVisibility.renderables);
  145. // Update per-object param buffers and queue render elements
  146. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  147. {
  148. if (!mVisibility.renderables[i])
  149. continue;
  150. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  151. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  152. for (auto& renderElem : renderables[i]->elements)
  153. {
  154. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  155. // check here
  156. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  157. if (isTransparent)
  158. mTransparentQueue->add(&renderElem, distanceToCamera);
  159. else
  160. mOpaqueQueue->add(&renderElem, distanceToCamera);
  161. }
  162. }
  163. if(visibility != nullptr)
  164. {
  165. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  166. {
  167. bool visible = (*visibility)[i];
  168. (*visibility)[i] = visible || mVisibility.renderables[i];
  169. }
  170. }
  171. mOpaqueQueue->sort();
  172. mTransparentQueue->sort();
  173. }
  174. void RendererView::determineVisible(const Vector<RendererLight>& lights, const Vector<Sphere>& bounds,
  175. LightType lightType, Vector<bool>* visibility)
  176. {
  177. // Special case for directional lights, they're always visible
  178. if(lightType == LightType::Directional)
  179. {
  180. if (visibility)
  181. visibility->assign(lights.size(), true);
  182. return;
  183. }
  184. Vector<bool>* perViewVisibility;
  185. if(lightType == LightType::Radial)
  186. {
  187. mVisibility.radialLights.clear();
  188. mVisibility.radialLights.resize(lights.size(), false);
  189. perViewVisibility = &mVisibility.radialLights;
  190. }
  191. else // Spot
  192. {
  193. mVisibility.spotLights.clear();
  194. mVisibility.spotLights.resize(lights.size(), false);
  195. perViewVisibility = &mVisibility.spotLights;
  196. }
  197. if (mProperties.isOverlay)
  198. return;
  199. calculateVisibility(bounds, *perViewVisibility);
  200. if(visibility != nullptr)
  201. {
  202. for (UINT32 i = 0; i < (UINT32)lights.size(); i++)
  203. {
  204. bool visible = (*visibility)[i];
  205. (*visibility)[i] = visible || (*perViewVisibility)[i];
  206. }
  207. }
  208. }
  209. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  210. {
  211. UINT64 cameraLayers = mProperties.visibleLayers;
  212. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  213. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  214. {
  215. if ((cullInfos[i].layer & cameraLayers) == 0)
  216. continue;
  217. // Do frustum culling
  218. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  219. // operations, as it is trivial to update them. Also consider spatial partitioning.
  220. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  221. if (worldFrustum.intersects(boundingSphere))
  222. {
  223. // More precise with the box
  224. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  225. if (worldFrustum.intersects(boundingBox))
  226. visibility[i] = true;
  227. }
  228. }
  229. }
  230. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  231. {
  232. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  233. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  234. {
  235. if (worldFrustum.intersects(bounds[i]))
  236. visibility[i] = true;
  237. }
  238. }
  239. void RendererView::calculateVisibility(const Vector<AABox>& bounds, Vector<bool>& visibility) const
  240. {
  241. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  242. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  243. {
  244. if (worldFrustum.intersects(bounds[i]))
  245. visibility[i] = true;
  246. }
  247. }
  248. Vector2 RendererView::getDeviceZToViewZ(const Matrix4& projMatrix)
  249. {
  250. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  251. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  252. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  253. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  254. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  255. // formula is:
  256. // depth = (Az + B) / (C * z)
  257. // To get the z coordinate back we simply do the opposite:
  258. // z = B / (depth * C - A)
  259. // However some APIs will also do a transformation on the depth values before storing them to the texture
  260. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  261. // formula is:
  262. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  263. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  264. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  265. RenderAPI& rapi = RenderAPI::instance();
  266. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  267. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  268. float minDepth = rapiInfo.getMinimumDepthInputValue();
  269. float a = projMatrix[2][2];
  270. float b = projMatrix[2][3];
  271. float c = projMatrix[3][2];
  272. Vector2 output;
  273. if (c != 0.0f)
  274. {
  275. output.x = b / (depthRange * c);
  276. output.y = minDepth / depthRange - a / (depthRange * c);
  277. }
  278. else // Ortographic, assuming viewing towards negative Z
  279. {
  280. output.x = b / -depthRange;
  281. output.y = minDepth / depthRange - a / -depthRange;
  282. }
  283. return output;
  284. }
  285. Vector2 RendererView::getNDCZToViewZ(const Matrix4& projMatrix)
  286. {
  287. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  288. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  289. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  290. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  291. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  292. // formula is:
  293. // depth = (Az + B) / (C * z)
  294. // To get the z coordinate back we simply do the opposite:
  295. // z = B / (depth * C - A)
  296. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  297. // z = 1.0f / (depth - A/C) * B/C
  298. RenderAPI& rapi = RenderAPI::instance();
  299. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  300. float a = projMatrix[2][2];
  301. float b = projMatrix[2][3];
  302. float c = projMatrix[3][2];
  303. Vector2 output;
  304. if (c != 0.0f)
  305. {
  306. output.x = b / c;
  307. output.y = -a / c;
  308. }
  309. else // Ortographic, assuming viewing towards negative Z
  310. {
  311. output.x = -b;
  312. output.y = a;
  313. }
  314. return output;
  315. }
  316. Vector2 RendererView::getNDCZToDeviceZ()
  317. {
  318. RenderAPI& rapi = RenderAPI::instance();
  319. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  320. Vector2 ndcZToDeviceZ;
  321. ndcZToDeviceZ.x = 1.0f / (rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue());
  322. ndcZToDeviceZ.y = -rapiInfo.getMinimumDepthInputValue();
  323. return ndcZToDeviceZ;
  324. }
  325. void RendererView::updatePerViewBuffer()
  326. {
  327. RenderAPI& rapi = RenderAPI::instance();
  328. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  329. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  330. Matrix4 invViewProj = viewProj.inverse();
  331. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  332. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  333. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  334. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  335. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, mProperties.projTransform.inverse());
  336. // Construct a special inverse view-projection matrix that had projection entries that effect z and w eliminated.
  337. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  338. // view_z/view_w in view space, into world space.
  339. // Only projects z/w coordinates (cancels out with the inverse matrix below)
  340. Matrix4 projZ = Matrix4::IDENTITY;
  341. projZ[2][2] = mProperties.projTransform[2][2];
  342. projZ[2][3] = mProperties.projTransform[2][3];
  343. projZ[3][2] = mProperties.projTransform[3][2];
  344. projZ[3][3] = 0.0f;
  345. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  346. gPerCameraParamDef.gNDCToPrevNDC.set(mParamBuffer, mProperties.prevViewProjTransform * invViewProj);
  347. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  348. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  349. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZToViewZ(mProperties.projTransform));
  350. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZToViewZ(mProperties.projTransform));
  351. gPerCameraParamDef.gNDCZToDeviceZ.set(mParamBuffer, getNDCZToDeviceZ());
  352. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  353. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  354. const Rect2I& viewRect = mTargetDesc.viewRect;
  355. Vector4I viewportRect;
  356. viewportRect[0] = viewRect.x;
  357. viewportRect[1] = viewRect.y;
  358. viewportRect[2] = viewRect.width;
  359. viewportRect[3] = viewRect.height;
  360. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  361. float halfWidth = viewRect.width * 0.5f;
  362. float halfHeight = viewRect.height * 0.5f;
  363. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  364. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  365. Vector4 clipToUVScaleOffset;
  366. clipToUVScaleOffset.x = halfWidth / rtWidth;
  367. clipToUVScaleOffset.y = -halfHeight / rtHeight;
  368. clipToUVScaleOffset.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  369. clipToUVScaleOffset.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  370. // Either of these flips the Y axis, but if they're both true they cancel out
  371. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  372. clipToUVScaleOffset.y = -clipToUVScaleOffset.y;
  373. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, clipToUVScaleOffset);
  374. if (mProperties.noLighting)
  375. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  376. else
  377. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  378. }
  379. template class SkyboxMat<true>;
  380. template class SkyboxMat<false>;
  381. RendererViewGroup::RendererViewGroup(RendererView** views, UINT32 numViews)
  382. {
  383. setViews(views, numViews);
  384. }
  385. void RendererViewGroup::setViews(RendererView** views, UINT32 numViews)
  386. {
  387. mViews.clear();
  388. for (UINT32 i = 0; i < numViews; i++)
  389. mViews.push_back(views[i]);
  390. }
  391. void RendererViewGroup::determineVisibility(const SceneInfo& sceneInfo)
  392. {
  393. UINT32 numViews = (UINT32)mViews.size();
  394. // Generate render queues per camera
  395. mVisibility.renderables.resize(sceneInfo.renderables.size(), false);
  396. mVisibility.renderables.assign(sceneInfo.renderables.size(), false);
  397. for(UINT32 i = 0; i < numViews; i++)
  398. mViews[i]->determineVisible(sceneInfo.renderables, sceneInfo.renderableCullInfos, &mVisibility.renderables);
  399. // Calculate light visibility for all views
  400. UINT32 numRadialLights = (UINT32)sceneInfo.radialLights.size();
  401. mVisibility.radialLights.resize(numRadialLights, false);
  402. mVisibility.radialLights.assign(numRadialLights, false);
  403. UINT32 numSpotLights = (UINT32)sceneInfo.spotLights.size();
  404. mVisibility.spotLights.resize(numSpotLights, false);
  405. mVisibility.spotLights.assign(numSpotLights, false);
  406. for (UINT32 i = 0; i < numViews; i++)
  407. {
  408. mViews[i]->determineVisible(sceneInfo.radialLights, sceneInfo.radialLightWorldBounds, LightType::Radial,
  409. &mVisibility.radialLights);
  410. mViews[i]->determineVisible(sceneInfo.spotLights, sceneInfo.spotLightWorldBounds, LightType::Spot,
  411. &mVisibility.spotLights);
  412. }
  413. // Calculate refl. probe visibility for all views
  414. UINT32 numProbes = (UINT32)sceneInfo.reflProbes.size();
  415. mVisibility.reflProbes.resize(numProbes, false);
  416. mVisibility.reflProbes.assign(numProbes, false);
  417. // Note: Per-view visibility for refl. probes currently isn't calculated
  418. for (UINT32 i = 0; i < numViews; i++)
  419. {
  420. const auto& viewProps = mViews[i]->getProperties();
  421. // Don't recursively render reflection probes when generating reflection probe maps
  422. if (viewProps.renderingReflections)
  423. continue;
  424. mViews[i]->calculateVisibility(sceneInfo.reflProbeWorldBounds, mVisibility.reflProbes);
  425. }
  426. }
  427. }}