BsRendererCamera.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererCamera.h"
  4. #include "BsCamera.h"
  5. #include "BsRenderable.h"
  6. #include "BsMaterial.h"
  7. #include "BsShader.h"
  8. #include "BsRenderTargets.h"
  9. #include "BsRendererUtility.h"
  10. #include "BsGpuParamsSet.h"
  11. namespace bs { namespace ct
  12. {
  13. PerCameraParamDef gPerCameraParamDef;
  14. SkyboxParamDef gSkyboxParamDef;
  15. template<bool SOLID_COLOR>
  16. SkyboxMat<SOLID_COLOR>::SkyboxMat()
  17. {
  18. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  19. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  20. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  21. mParamBuffer = gSkyboxParamDef.createBuffer();
  22. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  23. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  24. }
  25. template<bool SOLID_COLOR>
  26. void SkyboxMat<SOLID_COLOR>::_initDefines(ShaderDefines& defines)
  27. {
  28. if (SOLID_COLOR)
  29. defines.set("SOLID_COLOR", 1);
  30. }
  31. template<bool SOLID_COLOR>
  32. void SkyboxMat<SOLID_COLOR>::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  33. {
  34. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  35. gRendererUtility().setPass(mMaterial, 0);
  36. }
  37. template<bool SOLID_COLOR>
  38. void SkyboxMat<SOLID_COLOR>::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  39. {
  40. mSkyTextureParam.set(texture, TextureSurface(1, 1, 0, 0));
  41. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  42. mParamBuffer->flushToGPU();
  43. gRendererUtility().setPassParams(mParamsSet);
  44. }
  45. RendererCamera::RendererCamera()
  46. : mUsingGBuffer(false)
  47. {
  48. mParamBuffer = gPerCameraParamDef.createBuffer();
  49. }
  50. RendererCamera::RendererCamera(const RENDERER_VIEW_DESC& desc)
  51. : mViewDesc(desc), mUsingGBuffer(false)
  52. {
  53. mParamBuffer = gPerCameraParamDef.createBuffer();
  54. setStateReductionMode(desc.stateReduction);
  55. }
  56. void RendererCamera::setStateReductionMode(StateReduction reductionMode)
  57. {
  58. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  59. StateReduction transparentStateReduction = reductionMode;
  60. if (transparentStateReduction == StateReduction::Material)
  61. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  62. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  63. }
  64. void RendererCamera::setPostProcessSettings(const SPtr<PostProcessSettings>& ppSettings)
  65. {
  66. if (mPostProcessInfo.settings == nullptr)
  67. mPostProcessInfo.settings = bs_shared_ptr_new<StandardPostProcessSettings>();
  68. SPtr<StandardPostProcessSettings> stdPPSettings = std::static_pointer_cast<StandardPostProcessSettings>(ppSettings);
  69. if (stdPPSettings != nullptr)
  70. *mPostProcessInfo.settings = *stdPPSettings;
  71. else
  72. *mPostProcessInfo.settings = StandardPostProcessSettings();
  73. mPostProcessInfo.settingDirty = true;
  74. }
  75. void RendererCamera::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  76. const Matrix4& proj, const ConvexVolume& worldFrustum)
  77. {
  78. mViewDesc.viewOrigin = origin;
  79. mViewDesc.viewDirection = direction;
  80. mViewDesc.viewTransform = view;
  81. mViewDesc.projTransform = proj;
  82. mViewDesc.cullFrustum = worldFrustum;
  83. }
  84. void RendererCamera::setView(const RENDERER_VIEW_DESC& desc)
  85. {
  86. if (mViewDesc.target.targetWidth != desc.target.targetWidth ||
  87. mViewDesc.target.targetHeight != desc.target.targetHeight)
  88. mRenderTargets = nullptr;
  89. mViewDesc = desc;
  90. setStateReductionMode(desc.stateReduction);
  91. }
  92. void RendererCamera::beginRendering(bool useGBuffer)
  93. {
  94. if (useGBuffer)
  95. {
  96. // Render scene objects to g-buffer
  97. bool createGBuffer = mRenderTargets == nullptr ||
  98. mRenderTargets->getHDR() != mViewDesc.isHDR ||
  99. mRenderTargets->getNumSamples() != mViewDesc.target.numSamples;
  100. if (createGBuffer)
  101. mRenderTargets = RenderTargets::create(mViewDesc.target, mViewDesc.isHDR);
  102. mRenderTargets->allocate();
  103. mUsingGBuffer = true;
  104. }
  105. }
  106. void RendererCamera::endRendering()
  107. {
  108. mOpaqueQueue->clear();
  109. mTransparentQueue->clear();
  110. if(mUsingGBuffer)
  111. {
  112. mRenderTargets->release();
  113. mUsingGBuffer = false;
  114. }
  115. }
  116. void RendererCamera::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  117. Vector<bool>* visibility)
  118. {
  119. mVisibility.renderables.clear();
  120. mVisibility.renderables.resize(renderables.size(), false);
  121. if (mViewDesc.isOverlay)
  122. return;
  123. calculateVisibility(cullInfos, mVisibility.renderables);
  124. // Update per-object param buffers and queue render elements
  125. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  126. {
  127. if (!mVisibility.renderables[i])
  128. continue;
  129. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  130. float distanceToCamera = (mViewDesc.viewOrigin - boundingBox.getCenter()).length();
  131. for (auto& renderElem : renderables[i]->elements)
  132. {
  133. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  134. // check here
  135. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  136. if (isTransparent)
  137. mTransparentQueue->add(&renderElem, distanceToCamera);
  138. else
  139. mOpaqueQueue->add(&renderElem, distanceToCamera);
  140. }
  141. }
  142. if(visibility != nullptr)
  143. {
  144. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  145. {
  146. bool visible = (*visibility)[i];
  147. (*visibility)[i] = visible || mVisibility.renderables[i];
  148. }
  149. }
  150. mOpaqueQueue->sort();
  151. mTransparentQueue->sort();
  152. }
  153. void RendererCamera::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  154. {
  155. UINT64 cameraLayers = mViewDesc.visibleLayers;
  156. const ConvexVolume& worldFrustum = mViewDesc.cullFrustum;
  157. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  158. {
  159. if ((cullInfos[i].layer & cameraLayers) == 0)
  160. continue;
  161. // Do frustum culling
  162. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  163. // operations, as it is trivial to update them. Also consider spatial partitioning.
  164. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  165. if (worldFrustum.intersects(boundingSphere))
  166. {
  167. // More precise with the box
  168. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  169. if (worldFrustum.intersects(boundingBox))
  170. visibility[i] = true;
  171. }
  172. }
  173. }
  174. void RendererCamera::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  175. {
  176. const ConvexVolume& worldFrustum = mViewDesc.cullFrustum;
  177. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  178. {
  179. if (worldFrustum.intersects(bounds[i]))
  180. visibility[i] = true;
  181. }
  182. }
  183. Vector2 RendererCamera::getDeviceZTransform(const Matrix4& projMatrix) const
  184. {
  185. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  186. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  187. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  188. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  189. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  190. // formula is:
  191. // depth = (Az + B) / (C * z)
  192. // To get the z coordinate back we simply do the opposite:
  193. // z = B / (depth * C - A)
  194. // However some APIs will also do a transformation on the depth values before storing them to the texture
  195. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  196. // formula is:
  197. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  198. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  199. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  200. RenderAPI& rapi = RenderAPI::instance();
  201. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  202. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  203. float minDepth = rapiInfo.getMinimumDepthInputValue();
  204. float a = projMatrix[2][2];
  205. float b = projMatrix[2][3];
  206. float c = projMatrix[3][2];
  207. Vector2 output;
  208. if (c != 0.0f)
  209. {
  210. output.x = b / (depthRange * c);
  211. output.y = minDepth / depthRange - a / (depthRange * c);
  212. }
  213. else // Ortographic, assuming viewing towards negative Z
  214. {
  215. output.x = b / -depthRange;
  216. output.y = minDepth / depthRange - a / -depthRange;
  217. }
  218. return output;
  219. }
  220. Vector2 RendererCamera::getNDCZTransform(const Matrix4& projMatrix) const
  221. {
  222. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  223. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  224. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  225. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  226. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  227. // formula is:
  228. // depth = (Az + B) / (C * z)
  229. // To get the z coordinate back we simply do the opposite:
  230. // z = B / (depth * C - A)
  231. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  232. // z = 1.0f / (depth - A/C) * B/C
  233. RenderAPI& rapi = RenderAPI::instance();
  234. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  235. float a = projMatrix[2][2];
  236. float b = projMatrix[2][3];
  237. float c = projMatrix[3][2];
  238. Vector2 output;
  239. if (c != 0.0f)
  240. {
  241. output.x = b / c;
  242. output.y = -a / c;
  243. }
  244. else // Ortographic, assuming viewing towards negative Z
  245. {
  246. output.x = -b;
  247. output.y = a;
  248. }
  249. return output;
  250. }
  251. void RendererCamera::updatePerViewBuffer()
  252. {
  253. Matrix4 viewProj = mViewDesc.projTransform * mViewDesc.viewTransform;
  254. Matrix4 invViewProj = viewProj.inverse();
  255. gPerCameraParamDef.gMatProj.set(mParamBuffer, mViewDesc.projTransform);
  256. gPerCameraParamDef.gMatView.set(mParamBuffer, mViewDesc.viewTransform);
  257. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  258. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  259. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, mViewDesc.projTransform.inverse());
  260. // Construct a special inverse view-projection matrix that had projection entries that affect z and w eliminated.
  261. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  262. // view_z/view_w in view space, into world space.
  263. // Only projects z/w coordinates
  264. Matrix4 projZ = Matrix4::IDENTITY;
  265. projZ[2][2] = mViewDesc.projTransform[2][2];
  266. projZ[2][3] = mViewDesc.projTransform[2][3];
  267. projZ[3][2] = mViewDesc.projTransform[3][2];
  268. projZ[3][3] = 0.0f;
  269. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  270. gPerCameraParamDef.gViewDir.set(mParamBuffer, mViewDesc.viewDirection);
  271. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mViewDesc.viewOrigin);
  272. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZTransform(mViewDesc.projTransform));
  273. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZTransform(mViewDesc.projTransform));
  274. Vector2 nearFar(mViewDesc.nearPlane, mViewDesc.farPlane);
  275. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  276. const Rect2I& viewRect = mViewDesc.target.viewRect;
  277. Vector4I viewportRect;
  278. viewportRect[0] = viewRect.x;
  279. viewportRect[1] = viewRect.y;
  280. viewportRect[2] = viewRect.width;
  281. viewportRect[3] = viewRect.height;
  282. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  283. float halfWidth = viewRect.width * 0.5f;
  284. float halfHeight = viewRect.height * 0.5f;
  285. float rtWidth = mViewDesc.target.targetWidth != 0 ? (float)mViewDesc.target.targetWidth : 20.0f;
  286. float rtHeight = mViewDesc.target.targetHeight != 0 ? (float)mViewDesc.target.targetHeight : 20.0f;
  287. RenderAPI& rapi = RenderAPI::instance();
  288. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  289. Vector4 clipToUVScaleOffset;
  290. clipToUVScaleOffset.x = halfWidth / rtWidth;
  291. clipToUVScaleOffset.y = -halfHeight / rtHeight;
  292. clipToUVScaleOffset.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  293. clipToUVScaleOffset.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  294. // Either of these flips the Y axis, but if they're both true they cancel out
  295. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  296. clipToUVScaleOffset.y = -clipToUVScaleOffset.y;
  297. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, clipToUVScaleOffset);
  298. if (mViewDesc.noLighting)
  299. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  300. else
  301. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  302. }
  303. template class SkyboxMat<true>;
  304. template class SkyboxMat<false>;
  305. }}