BsRendererCamera.cpp 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererCamera.h"
  4. #include "BsCamera.h"
  5. #include "BsRenderable.h"
  6. #include "BsMaterial.h"
  7. #include "BsShader.h"
  8. #include "BsRenderTargets.h"
  9. namespace bs
  10. {
  11. PerCameraParamDef gPerCameraParamDef;
  12. RendererCamera::RendererCamera()
  13. :mCamera(nullptr), mUsingRenderTargets(false)
  14. {
  15. mParamBuffer = gPerCameraParamDef.createBuffer();
  16. }
  17. RendererCamera::RendererCamera(const CameraCore* camera, StateReduction reductionMode)
  18. :mCamera(camera), mUsingRenderTargets(false)
  19. {
  20. mParamBuffer = gPerCameraParamDef.createBuffer();
  21. update(reductionMode);
  22. }
  23. void RendererCamera::update(StateReduction reductionMode)
  24. {
  25. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  26. StateReduction transparentStateReduction = reductionMode;
  27. if (transparentStateReduction == StateReduction::Material)
  28. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  29. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  30. updatePP();
  31. }
  32. void RendererCamera::updatePP()
  33. {
  34. if (mPostProcessInfo.settings == nullptr)
  35. mPostProcessInfo.settings = bs_shared_ptr_new<StandardPostProcessSettings>();
  36. SPtr<StandardPostProcessSettings> ppSettings = std::static_pointer_cast<StandardPostProcessSettings>(mCamera->getPostProcessSettings());
  37. if (ppSettings != nullptr)
  38. *mPostProcessInfo.settings = *ppSettings;
  39. else
  40. *mPostProcessInfo.settings = StandardPostProcessSettings();
  41. mPostProcessInfo.settingDirty = true;
  42. }
  43. void RendererCamera::beginRendering(bool useGBuffer)
  44. {
  45. if (useGBuffer)
  46. {
  47. SPtr<ViewportCore> viewport = mCamera->getViewport();
  48. bool useHDR = mCamera->getFlags().isSet(CameraFlag::HDR);
  49. UINT32 msaaCount = mCamera->getMSAACount();
  50. // Render scene objects to g-buffer
  51. bool createGBuffer = mRenderTargets == nullptr ||
  52. mRenderTargets->getHDR() != useHDR ||
  53. mRenderTargets->getNumSamples() != msaaCount;
  54. if (createGBuffer)
  55. mRenderTargets = RenderTargets::create(viewport, useHDR, msaaCount);
  56. mRenderTargets->allocate();
  57. mUsingRenderTargets = true;
  58. }
  59. }
  60. void RendererCamera::endRendering()
  61. {
  62. mOpaqueQueue->clear();
  63. mTransparentQueue->clear();
  64. if(mUsingRenderTargets)
  65. {
  66. mRenderTargets->release();
  67. mUsingRenderTargets = false;
  68. }
  69. }
  70. void RendererCamera::determineVisible(const Vector<RendererObject*>& renderables, const Vector<Bounds>& renderableBounds,
  71. Vector<bool>& visibility)
  72. {
  73. mVisibility.clear();
  74. mVisibility.resize(renderables.size(), false);
  75. bool isOverlayCamera = mCamera->getFlags().isSet(CameraFlag::Overlay);
  76. if (isOverlayCamera)
  77. return;
  78. UINT64 cameraLayers = mCamera->getLayers();
  79. ConvexVolume worldFrustum = mCamera->getWorldFrustum();
  80. // Update per-object param buffers and queue render elements
  81. for(UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  82. {
  83. RenderableCore* renderable = renderables[i]->renderable;
  84. UINT32 rendererId = renderable->getRendererId();
  85. if ((renderable->getLayer() & cameraLayers) == 0)
  86. continue;
  87. // Do frustum culling
  88. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  89. // operations, as it is trivial to update them. Also consider spatial partitioning.
  90. const Sphere& boundingSphere = renderableBounds[rendererId].getSphere();
  91. if (worldFrustum.intersects(boundingSphere))
  92. {
  93. // More precise with the box
  94. const AABox& boundingBox = renderableBounds[rendererId].getBox();
  95. if (worldFrustum.intersects(boundingBox))
  96. {
  97. visibility[i] = true;
  98. mVisibility[i] = true;
  99. float distanceToCamera = (mCamera->getPosition() - boundingBox.getCenter()).length();
  100. for (auto& renderElem : renderables[i]->elements)
  101. {
  102. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  103. if (isTransparent)
  104. mTransparentQueue->add(&renderElem, distanceToCamera);
  105. else
  106. mOpaqueQueue->add(&renderElem, distanceToCamera);
  107. }
  108. }
  109. }
  110. }
  111. mOpaqueQueue->sort();
  112. mTransparentQueue->sort();
  113. }
  114. Vector2 RendererCamera::getDeviceZTransform(const Matrix4& projMatrix) const
  115. {
  116. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  117. // in world space. This involes applying the inverse projection transform to the depth value. When you multiply
  118. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  119. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  120. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  121. // formula is:
  122. // depth = (Az + B) / (C * z)
  123. // To get the z coordinate back we simply do the opposite:
  124. // z = B / (depth * C - A)
  125. // However some APIs will also do a transformation on the depth values before storing them to the texture
  126. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  127. // formula is:
  128. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  129. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  130. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  131. RenderAPICore& rapi = RenderAPICore::instance();
  132. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  133. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  134. float minDepth = rapiInfo.getMinimumDepthInputValue();
  135. float a = projMatrix[2][2];
  136. float b = projMatrix[2][3];
  137. float c = projMatrix[3][2];
  138. Vector2 output;
  139. output.x = b / (depthRange * c);
  140. output.y = minDepth / depthRange - a / (depthRange * c);
  141. return output;
  142. }
  143. void RendererCamera::updatePerCameraBuffer()
  144. {
  145. Matrix4 proj = mCamera->getProjectionMatrixRS();
  146. Matrix4 view = mCamera->getViewMatrix();
  147. Matrix4 viewProj = proj * view;
  148. Matrix4 invViewProj = viewProj.inverse();
  149. gPerCameraParamDef.gMatProj.set(mParamBuffer, proj);
  150. gPerCameraParamDef.gMatView.set(mParamBuffer, view);
  151. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  152. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  153. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, proj.inverse());
  154. // Construct a special inverse view-projection matrix that had projection entries that affect z and w eliminated.
  155. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  156. // view_z/view_w in view space, into world space.
  157. // Only projects z/w coordinates
  158. Matrix4 projZ = Matrix4::IDENTITY;
  159. projZ[2][2] = proj[2][2];
  160. projZ[2][3] = proj[2][3];
  161. projZ[3][2] = proj[3][2];
  162. projZ[3][3] = 0.0f;
  163. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  164. gPerCameraParamDef.gViewDir.set(mParamBuffer, mCamera->getForward());
  165. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mCamera->getPosition());
  166. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZTransform(proj));
  167. SPtr<ViewportCore> viewport = mCamera->getViewport();
  168. SPtr<RenderTargetCore> rt = viewport->getTarget();
  169. float halfWidth = viewport->getWidth() * 0.5f;
  170. float halfHeight = viewport->getHeight() * 0.5f;
  171. float rtWidth;
  172. float rtHeight;
  173. if(rt != nullptr)
  174. {
  175. rtWidth = (float)rt->getProperties().getWidth();
  176. rtHeight = (float)rt->getProperties().getHeight();
  177. }
  178. else
  179. {
  180. rtWidth = 20.0f;
  181. rtHeight = 20.0f;
  182. }
  183. RenderAPICore& rapi = RenderAPICore::instance();
  184. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  185. Vector4 clipToUVScaleOffset;
  186. clipToUVScaleOffset.x = halfWidth / rtWidth;
  187. clipToUVScaleOffset.y = -halfHeight / rtHeight;
  188. clipToUVScaleOffset.z = viewport->getX() / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  189. clipToUVScaleOffset.w = viewport->getY() / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  190. if (!rapiInfo.getNDCYAxisDown())
  191. clipToUVScaleOffset.y = -clipToUVScaleOffset.y;
  192. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, clipToUVScaleOffset);
  193. }
  194. }