BsRendererView.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsRendererView.h"
  4. #include "BsCamera.h"
  5. #include "BsRenderable.h"
  6. #include "BsMaterial.h"
  7. #include "BsShader.h"
  8. #include "BsRenderTargets.h"
  9. #include "BsRendererUtility.h"
  10. #include "BsGpuParamsSet.h"
  11. namespace bs { namespace ct
  12. {
  13. PerCameraParamDef gPerCameraParamDef;
  14. SkyboxParamDef gSkyboxParamDef;
  15. template<bool SOLID_COLOR>
  16. SkyboxMat<SOLID_COLOR>::SkyboxMat()
  17. {
  18. SPtr<GpuParams> params = mParamsSet->getGpuParams();
  19. if(params->hasTexture(GPT_FRAGMENT_PROGRAM, "gSkyTex"))
  20. params->getTextureParam(GPT_FRAGMENT_PROGRAM, "gSkyTex", mSkyTextureParam);
  21. mParamBuffer = gSkyboxParamDef.createBuffer();
  22. if(params->hasParamBlock(GPT_FRAGMENT_PROGRAM, "Params"))
  23. mParamsSet->setParamBlockBuffer("Params", mParamBuffer, true);
  24. }
  25. template<bool SOLID_COLOR>
  26. void SkyboxMat<SOLID_COLOR>::_initDefines(ShaderDefines& defines)
  27. {
  28. if (SOLID_COLOR)
  29. defines.set("SOLID_COLOR", 1);
  30. }
  31. template<bool SOLID_COLOR>
  32. void SkyboxMat<SOLID_COLOR>::bind(const SPtr<GpuParamBlockBuffer>& perCamera)
  33. {
  34. mParamsSet->setParamBlockBuffer("PerCamera", perCamera, true);
  35. gRendererUtility().setPass(mMaterial, 0);
  36. }
  37. template<bool SOLID_COLOR>
  38. void SkyboxMat<SOLID_COLOR>::setParams(const SPtr<Texture>& texture, const Color& solidColor)
  39. {
  40. mSkyTextureParam.set(texture, TextureSurface(1, 1, 0, 0));
  41. gSkyboxParamDef.gClearColor.set(mParamBuffer, solidColor);
  42. mParamBuffer->flushToGPU();
  43. gRendererUtility().setPassParams(mParamsSet);
  44. }
  45. RendererViewProperties::RendererViewProperties(const RENDERER_VIEW_DESC& src)
  46. :RendererViewData(src)
  47. {
  48. viewProjTransform = src.projTransform * src.viewTransform;
  49. target = src.target.target;
  50. viewRect = src.target.viewRect;
  51. nrmViewRect = src.target.nrmViewRect;
  52. numSamples = src.target.numSamples;
  53. clearFlags = src.target.clearFlags;
  54. clearColor = src.target.clearColor;
  55. clearDepthValue = src.target.clearDepthValue;
  56. clearStencilValue = src.target.clearStencilValue;
  57. }
  58. RendererView::RendererView()
  59. : mUsingGBuffer(false)
  60. {
  61. mParamBuffer = gPerCameraParamDef.createBuffer();
  62. }
  63. RendererView::RendererView(const RENDERER_VIEW_DESC& desc)
  64. : mProperties(desc), mTargetDesc(desc.target), mCamera(desc.sceneCamera), mUsingGBuffer(false)
  65. {
  66. mParamBuffer = gPerCameraParamDef.createBuffer();
  67. setStateReductionMode(desc.stateReduction);
  68. }
  69. void RendererView::setStateReductionMode(StateReduction reductionMode)
  70. {
  71. mOpaqueQueue = bs_shared_ptr_new<RenderQueue>(reductionMode);
  72. StateReduction transparentStateReduction = reductionMode;
  73. if (transparentStateReduction == StateReduction::Material)
  74. transparentStateReduction = StateReduction::Distance; // Transparent object MUST be sorted by distance
  75. mTransparentQueue = bs_shared_ptr_new<RenderQueue>(transparentStateReduction);
  76. }
  77. void RendererView::setPostProcessSettings(const SPtr<PostProcessSettings>& ppSettings)
  78. {
  79. if (mPostProcessInfo.settings == nullptr)
  80. mPostProcessInfo.settings = bs_shared_ptr_new<StandardPostProcessSettings>();
  81. SPtr<StandardPostProcessSettings> stdPPSettings = std::static_pointer_cast<StandardPostProcessSettings>(ppSettings);
  82. if (stdPPSettings != nullptr)
  83. *mPostProcessInfo.settings = *stdPPSettings;
  84. else
  85. *mPostProcessInfo.settings = StandardPostProcessSettings();
  86. mPostProcessInfo.settingDirty = true;
  87. }
  88. void RendererView::setTransform(const Vector3& origin, const Vector3& direction, const Matrix4& view,
  89. const Matrix4& proj, const ConvexVolume& worldFrustum)
  90. {
  91. mProperties.viewOrigin = origin;
  92. mProperties.viewDirection = direction;
  93. mProperties.viewTransform = view;
  94. mProperties.projTransform = proj;
  95. mProperties.cullFrustum = worldFrustum;
  96. mProperties.viewProjTransform = proj * view;
  97. }
  98. void RendererView::setView(const RENDERER_VIEW_DESC& desc)
  99. {
  100. if (mTargetDesc.targetWidth != desc.target.targetWidth ||
  101. mTargetDesc.targetHeight != desc.target.targetHeight)
  102. mRenderTargets = nullptr;
  103. mCamera = desc.sceneCamera;
  104. mProperties = desc;
  105. mTargetDesc = desc.target;
  106. setStateReductionMode(desc.stateReduction);
  107. }
  108. void RendererView::beginRendering(bool useGBuffer)
  109. {
  110. if (useGBuffer)
  111. {
  112. // Render scene objects to g-buffer
  113. bool createGBuffer = mRenderTargets == nullptr ||
  114. mRenderTargets->getHDR() != mProperties.isHDR ||
  115. mRenderTargets->getNumSamples() != mTargetDesc.numSamples;
  116. if (createGBuffer)
  117. mRenderTargets = RenderTargets::create(mTargetDesc, mProperties.isHDR);
  118. mRenderTargets->prepare();
  119. mUsingGBuffer = true;
  120. }
  121. }
  122. void RendererView::endRendering()
  123. {
  124. mOpaqueQueue->clear();
  125. mTransparentQueue->clear();
  126. if(mUsingGBuffer)
  127. {
  128. mRenderTargets->cleanup();
  129. mUsingGBuffer = false;
  130. }
  131. }
  132. void RendererView::determineVisible(const Vector<RendererObject*>& renderables, const Vector<CullInfo>& cullInfos,
  133. Vector<bool>* visibility)
  134. {
  135. mVisibility.renderables.clear();
  136. mVisibility.renderables.resize(renderables.size(), false);
  137. if (mProperties.isOverlay)
  138. return;
  139. calculateVisibility(cullInfos, mVisibility.renderables);
  140. // Update per-object param buffers and queue render elements
  141. for(UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  142. {
  143. if (!mVisibility.renderables[i])
  144. continue;
  145. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  146. float distanceToCamera = (mProperties.viewOrigin - boundingBox.getCenter()).length();
  147. for (auto& renderElem : renderables[i]->elements)
  148. {
  149. // Note: I could keep opaque and transparent renderables in two separate arrays, so I don't need to do the
  150. // check here
  151. bool isTransparent = (renderElem.material->getShader()->getFlags() & (UINT32)ShaderFlags::Transparent) != 0;
  152. if (isTransparent)
  153. mTransparentQueue->add(&renderElem, distanceToCamera);
  154. else
  155. mOpaqueQueue->add(&renderElem, distanceToCamera);
  156. }
  157. }
  158. if(visibility != nullptr)
  159. {
  160. for (UINT32 i = 0; i < (UINT32)renderables.size(); i++)
  161. {
  162. bool visible = (*visibility)[i];
  163. (*visibility)[i] = visible || mVisibility.renderables[i];
  164. }
  165. }
  166. mOpaqueQueue->sort();
  167. mTransparentQueue->sort();
  168. }
  169. void RendererView::calculateVisibility(const Vector<CullInfo>& cullInfos, Vector<bool>& visibility) const
  170. {
  171. UINT64 cameraLayers = mProperties.visibleLayers;
  172. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  173. for (UINT32 i = 0; i < (UINT32)cullInfos.size(); i++)
  174. {
  175. if ((cullInfos[i].layer & cameraLayers) == 0)
  176. continue;
  177. // Do frustum culling
  178. // Note: This is bound to be a bottleneck at some point. When it is ensure that intersect methods use vector
  179. // operations, as it is trivial to update them. Also consider spatial partitioning.
  180. const Sphere& boundingSphere = cullInfos[i].bounds.getSphere();
  181. if (worldFrustum.intersects(boundingSphere))
  182. {
  183. // More precise with the box
  184. const AABox& boundingBox = cullInfos[i].bounds.getBox();
  185. if (worldFrustum.intersects(boundingBox))
  186. visibility[i] = true;
  187. }
  188. }
  189. }
  190. void RendererView::calculateVisibility(const Vector<Sphere>& bounds, Vector<bool>& visibility) const
  191. {
  192. const ConvexVolume& worldFrustum = mProperties.cullFrustum;
  193. for (UINT32 i = 0; i < (UINT32)bounds.size(); i++)
  194. {
  195. if (worldFrustum.intersects(bounds[i]))
  196. visibility[i] = true;
  197. }
  198. }
  199. Vector2 RendererView::getDeviceZTransform(const Matrix4& projMatrix) const
  200. {
  201. // Returns a set of values that will transform depth buffer values (in range [0, 1]) to a distance
  202. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  203. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  204. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  205. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  206. // formula is:
  207. // depth = (Az + B) / (C * z)
  208. // To get the z coordinate back we simply do the opposite:
  209. // z = B / (depth * C - A)
  210. // However some APIs will also do a transformation on the depth values before storing them to the texture
  211. // (e.g. OpenGL will transform from [-1, 1] to [0, 1]). And we need to reverse that as well. Therefore the final
  212. // formula is:
  213. // z = B / ((depth * (maxDepth - minDepth) + minDepth) * C - A)
  214. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  215. // z = 1.0f / (depth + minDepth/(maxDepth - minDepth) - A/((maxDepth - minDepth) * C)) * B/((maxDepth - minDepth) * C)
  216. RenderAPI& rapi = RenderAPI::instance();
  217. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  218. float depthRange = rapiInfo.getMaximumDepthInputValue() - rapiInfo.getMinimumDepthInputValue();
  219. float minDepth = rapiInfo.getMinimumDepthInputValue();
  220. float a = projMatrix[2][2];
  221. float b = projMatrix[2][3];
  222. float c = projMatrix[3][2];
  223. Vector2 output;
  224. if (c != 0.0f)
  225. {
  226. output.x = b / (depthRange * c);
  227. output.y = minDepth / depthRange - a / (depthRange * c);
  228. }
  229. else // Ortographic, assuming viewing towards negative Z
  230. {
  231. output.x = b / -depthRange;
  232. output.y = minDepth / depthRange - a / -depthRange;
  233. }
  234. return output;
  235. }
  236. Vector2 RendererView::getNDCZTransform(const Matrix4& projMatrix) const
  237. {
  238. // Returns a set of values that will transform depth buffer values (e.g. [0, 1] in DX, [-1, 1] in GL) to a distance
  239. // in view space. This involes applying the inverse projection transform to the depth value. When you multiply
  240. // a vector with the projection matrix you get [clipX, clipY, Az + B, C * z], where we don't care about clipX/clipY.
  241. // A is [2, 2], B is [2, 3] and C is [3, 2] elements of the projection matrix (only ones that matter for our depth
  242. // value). The hardware will also automatically divide the z value with w to get the depth, therefore the final
  243. // formula is:
  244. // depth = (Az + B) / (C * z)
  245. // To get the z coordinate back we simply do the opposite:
  246. // z = B / (depth * C - A)
  247. // Are we reorganize it because it needs to fit the "(1.0f / (depth + y)) * x" format used in the shader:
  248. // z = 1.0f / (depth - A/C) * B/C
  249. RenderAPI& rapi = RenderAPI::instance();
  250. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  251. float a = projMatrix[2][2];
  252. float b = projMatrix[2][3];
  253. float c = projMatrix[3][2];
  254. Vector2 output;
  255. if (c != 0.0f)
  256. {
  257. output.x = b / c;
  258. output.y = -a / c;
  259. }
  260. else // Ortographic, assuming viewing towards negative Z
  261. {
  262. output.x = -b;
  263. output.y = a;
  264. }
  265. return output;
  266. }
  267. void RendererView::updatePerViewBuffer()
  268. {
  269. Matrix4 viewProj = mProperties.projTransform * mProperties.viewTransform;
  270. Matrix4 invViewProj = viewProj.inverse();
  271. gPerCameraParamDef.gMatProj.set(mParamBuffer, mProperties.projTransform);
  272. gPerCameraParamDef.gMatView.set(mParamBuffer, mProperties.viewTransform);
  273. gPerCameraParamDef.gMatViewProj.set(mParamBuffer, viewProj);
  274. gPerCameraParamDef.gMatInvViewProj.set(mParamBuffer, invViewProj); // Note: Calculate inverses separately (better precision possibly)
  275. gPerCameraParamDef.gMatInvProj.set(mParamBuffer, mProperties.projTransform.inverse());
  276. // Construct a special inverse view-projection matrix that had projection entries that affect z and w eliminated.
  277. // Used to transform a vector(clip_x, clip_y, view_z, view_w), where clip_x/clip_y are in clip space, and
  278. // view_z/view_w in view space, into world space.
  279. // Only projects z/w coordinates
  280. Matrix4 projZ = Matrix4::IDENTITY;
  281. projZ[2][2] = mProperties.projTransform[2][2];
  282. projZ[2][3] = mProperties.projTransform[2][3];
  283. projZ[3][2] = mProperties.projTransform[3][2];
  284. projZ[3][3] = 0.0f;
  285. gPerCameraParamDef.gMatScreenToWorld.set(mParamBuffer, invViewProj * projZ);
  286. gPerCameraParamDef.gViewDir.set(mParamBuffer, mProperties.viewDirection);
  287. gPerCameraParamDef.gViewOrigin.set(mParamBuffer, mProperties.viewOrigin);
  288. gPerCameraParamDef.gDeviceZToWorldZ.set(mParamBuffer, getDeviceZTransform(mProperties.projTransform));
  289. gPerCameraParamDef.gNDCZToWorldZ.set(mParamBuffer, getNDCZTransform(mProperties.projTransform));
  290. Vector2 nearFar(mProperties.nearPlane, mProperties.farPlane);
  291. gPerCameraParamDef.gNearFar.set(mParamBuffer, nearFar);
  292. const Rect2I& viewRect = mTargetDesc.viewRect;
  293. Vector4I viewportRect;
  294. viewportRect[0] = viewRect.x;
  295. viewportRect[1] = viewRect.y;
  296. viewportRect[2] = viewRect.width;
  297. viewportRect[3] = viewRect.height;
  298. gPerCameraParamDef.gViewportRectangle.set(mParamBuffer, viewportRect);
  299. float halfWidth = viewRect.width * 0.5f;
  300. float halfHeight = viewRect.height * 0.5f;
  301. float rtWidth = mTargetDesc.targetWidth != 0 ? (float)mTargetDesc.targetWidth : 20.0f;
  302. float rtHeight = mTargetDesc.targetHeight != 0 ? (float)mTargetDesc.targetHeight : 20.0f;
  303. RenderAPI& rapi = RenderAPI::instance();
  304. const RenderAPIInfo& rapiInfo = rapi.getAPIInfo();
  305. Vector4 clipToUVScaleOffset;
  306. clipToUVScaleOffset.x = halfWidth / rtWidth;
  307. clipToUVScaleOffset.y = -halfHeight / rtHeight;
  308. clipToUVScaleOffset.z = viewRect.x / rtWidth + (halfWidth + rapiInfo.getHorizontalTexelOffset()) / rtWidth;
  309. clipToUVScaleOffset.w = viewRect.y / rtHeight + (halfHeight + rapiInfo.getVerticalTexelOffset()) / rtHeight;
  310. // Either of these flips the Y axis, but if they're both true they cancel out
  311. if (rapiInfo.isFlagSet(RenderAPIFeatureFlag::UVYAxisUp) ^ rapiInfo.isFlagSet(RenderAPIFeatureFlag::NDCYAxisDown))
  312. clipToUVScaleOffset.y = -clipToUVScaleOffset.y;
  313. gPerCameraParamDef.gClipToUVScaleOffset.set(mParamBuffer, clipToUVScaleOffset);
  314. if (mProperties.noLighting)
  315. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 100.0f);
  316. else
  317. gPerCameraParamDef.gAmbientFactor.set(mParamBuffer, 0.0f);
  318. }
  319. template class SkyboxMat<true>;
  320. template class SkyboxMat<false>;
  321. }}