QuadTree.cpp 60 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763
  1. // Jolt Physics Library (https://github.com/jrouwe/JoltPhysics)
  2. // SPDX-FileCopyrightText: 2021 Jorrit Rouwe
  3. // SPDX-License-Identifier: MIT
  4. #include <Jolt/Jolt.h>
  5. #include <Jolt/Physics/Collision/BroadPhase/QuadTree.h>
  6. #include <Jolt/Physics/Collision/BroadPhase/BroadPhaseQuadTree.h>
  7. #include <Jolt/Physics/Collision/RayCast.h>
  8. #include <Jolt/Physics/Collision/AABoxCast.h>
  9. #include <Jolt/Physics/Collision/CastResult.h>
  10. #include <Jolt/Physics/Collision/SortReverseAndStore.h>
  11. #include <Jolt/Physics/Body/BodyPair.h>
  12. #include <Jolt/Physics/PhysicsLock.h>
  13. #include <Jolt/Geometry/AABox4.h>
  14. #include <Jolt/Geometry/RayAABox.h>
  15. #include <Jolt/Geometry/OrientedBox.h>
  16. #include <Jolt/Core/STLLocalAllocator.h>
  17. #ifdef JPH_DUMP_BROADPHASE_TREE
  18. JPH_SUPPRESS_WARNINGS_STD_BEGIN
  19. #include <fstream>
  20. JPH_SUPPRESS_WARNINGS_STD_END
  21. #endif // JPH_DUMP_BROADPHASE_TREE
  22. JPH_NAMESPACE_BEGIN
  23. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  24. // QuadTree::Node
  25. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  26. QuadTree::Node::Node(bool inIsChanged) :
  27. mIsChanged(inIsChanged)
  28. {
  29. // First reset bounds
  30. Vec4 val = Vec4::sReplicate(cLargeFloat);
  31. val.StoreFloat4((Float4 *)&mBoundsMinX);
  32. val.StoreFloat4((Float4 *)&mBoundsMinY);
  33. val.StoreFloat4((Float4 *)&mBoundsMinZ);
  34. val = Vec4::sReplicate(-cLargeFloat);
  35. val.StoreFloat4((Float4 *)&mBoundsMaxX);
  36. val.StoreFloat4((Float4 *)&mBoundsMaxY);
  37. val.StoreFloat4((Float4 *)&mBoundsMaxZ);
  38. // Reset child node ids
  39. mChildNodeID[0] = NodeID::sInvalid();
  40. mChildNodeID[1] = NodeID::sInvalid();
  41. mChildNodeID[2] = NodeID::sInvalid();
  42. mChildNodeID[3] = NodeID::sInvalid();
  43. }
  44. void QuadTree::Node::GetChildBounds(int inChildIndex, AABox &outBounds) const
  45. {
  46. // Read bounding box in order min -> max
  47. outBounds.mMin = Vec3(mBoundsMinX[inChildIndex], mBoundsMinY[inChildIndex], mBoundsMinZ[inChildIndex]);
  48. outBounds.mMax = Vec3(mBoundsMaxX[inChildIndex], mBoundsMaxY[inChildIndex], mBoundsMaxZ[inChildIndex]);
  49. }
  50. void QuadTree::Node::SetChildBounds(int inChildIndex, const AABox &inBounds)
  51. {
  52. // Bounding boxes provided to the quad tree should never be larger than cLargeFloat because this may trigger overflow exceptions
  53. // e.g. when squaring the value while testing sphere overlaps
  54. JPH_ASSERT(inBounds.mMin.GetX() >= -cLargeFloat && inBounds.mMin.GetX() <= cLargeFloat
  55. && inBounds.mMin.GetY() >= -cLargeFloat && inBounds.mMin.GetY() <= cLargeFloat
  56. && inBounds.mMin.GetZ() >= -cLargeFloat && inBounds.mMin.GetZ() <= cLargeFloat
  57. && inBounds.mMax.GetX() >= -cLargeFloat && inBounds.mMax.GetX() <= cLargeFloat
  58. && inBounds.mMax.GetY() >= -cLargeFloat && inBounds.mMax.GetY() <= cLargeFloat
  59. && inBounds.mMax.GetZ() >= -cLargeFloat && inBounds.mMax.GetZ() <= cLargeFloat);
  60. // Set max first (this keeps the bounding box invalid for reading threads)
  61. mBoundsMaxZ[inChildIndex] = inBounds.mMax.GetZ();
  62. mBoundsMaxY[inChildIndex] = inBounds.mMax.GetY();
  63. mBoundsMaxX[inChildIndex] = inBounds.mMax.GetX();
  64. // Then set min (and make box valid)
  65. mBoundsMinZ[inChildIndex] = inBounds.mMin.GetZ();
  66. mBoundsMinY[inChildIndex] = inBounds.mMin.GetY();
  67. mBoundsMinX[inChildIndex] = inBounds.mMin.GetX(); // Min X becomes valid last
  68. }
  69. void QuadTree::Node::InvalidateChildBounds(int inChildIndex)
  70. {
  71. // First we make the box invalid by setting the min to cLargeFloat
  72. mBoundsMinX[inChildIndex] = cLargeFloat; // Min X becomes invalid first
  73. mBoundsMinY[inChildIndex] = cLargeFloat;
  74. mBoundsMinZ[inChildIndex] = cLargeFloat;
  75. // Then we reset the max values too
  76. mBoundsMaxX[inChildIndex] = -cLargeFloat;
  77. mBoundsMaxY[inChildIndex] = -cLargeFloat;
  78. mBoundsMaxZ[inChildIndex] = -cLargeFloat;
  79. }
  80. void QuadTree::Node::GetNodeBounds(AABox &outBounds) const
  81. {
  82. // Get first child bounds
  83. GetChildBounds(0, outBounds);
  84. // Encapsulate other child bounds
  85. for (int child_idx = 1; child_idx < 4; ++child_idx)
  86. {
  87. AABox tmp;
  88. GetChildBounds(child_idx, tmp);
  89. outBounds.Encapsulate(tmp);
  90. }
  91. }
  92. bool QuadTree::Node::EncapsulateChildBounds(int inChildIndex, const AABox &inBounds)
  93. {
  94. bool changed = AtomicMin(mBoundsMinX[inChildIndex], inBounds.mMin.GetX());
  95. changed |= AtomicMin(mBoundsMinY[inChildIndex], inBounds.mMin.GetY());
  96. changed |= AtomicMin(mBoundsMinZ[inChildIndex], inBounds.mMin.GetZ());
  97. changed |= AtomicMax(mBoundsMaxX[inChildIndex], inBounds.mMax.GetX());
  98. changed |= AtomicMax(mBoundsMaxY[inChildIndex], inBounds.mMax.GetY());
  99. changed |= AtomicMax(mBoundsMaxZ[inChildIndex], inBounds.mMax.GetZ());
  100. return changed;
  101. }
  102. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  103. // QuadTree
  104. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  105. const AABox QuadTree::cInvalidBounds(Vec3::sReplicate(cLargeFloat), Vec3::sReplicate(-cLargeFloat));
  106. static inline void sQuadTreePerformanceWarning()
  107. {
  108. #ifdef JPH_ENABLE_ASSERTS
  109. static atomic<bool> triggered_report { false };
  110. bool expected = false;
  111. if (triggered_report.compare_exchange_strong(expected, true))
  112. Trace("QuadTree: Performance warning: Stack full!\n"
  113. "This must be a very deep tree. Are you batch adding bodies through BodyInterface::AddBodiesPrepare/AddBodiesFinalize?\n"
  114. "If you add lots of bodies through BodyInterface::AddBody you may need to call PhysicsSystem::OptimizeBroadPhase to rebuild the tree.");
  115. #endif
  116. }
  117. void QuadTree::GetBodyLocation(const TrackingVector &inTracking, BodyID inBodyID, uint32 &outNodeIdx, uint32 &outChildIdx) const
  118. {
  119. uint32 body_location = inTracking[inBodyID.GetIndex()].mBodyLocation;
  120. JPH_ASSERT(body_location != Tracking::cInvalidBodyLocation);
  121. outNodeIdx = body_location & 0x3fffffff;
  122. outChildIdx = body_location >> 30;
  123. JPH_ASSERT(mAllocator->Get(outNodeIdx).mChildNodeID[outChildIdx] == inBodyID, "Make sure that the body is in the node where it should be");
  124. }
  125. void QuadTree::SetBodyLocation(TrackingVector &ioTracking, BodyID inBodyID, uint32 inNodeIdx, uint32 inChildIdx) const
  126. {
  127. JPH_ASSERT(inNodeIdx <= 0x3fffffff);
  128. JPH_ASSERT(inChildIdx < 4);
  129. JPH_ASSERT(mAllocator->Get(inNodeIdx).mChildNodeID[inChildIdx] == inBodyID, "Make sure that the body is in the node where it should be");
  130. ioTracking[inBodyID.GetIndex()].mBodyLocation = inNodeIdx + (inChildIdx << 30);
  131. #ifdef JPH_ENABLE_ASSERTS
  132. uint32 v1, v2;
  133. GetBodyLocation(ioTracking, inBodyID, v1, v2);
  134. JPH_ASSERT(v1 == inNodeIdx);
  135. JPH_ASSERT(v2 == inChildIdx);
  136. #endif
  137. }
  138. void QuadTree::sInvalidateBodyLocation(TrackingVector &ioTracking, BodyID inBodyID)
  139. {
  140. ioTracking[inBodyID.GetIndex()].mBodyLocation = Tracking::cInvalidBodyLocation;
  141. }
  142. QuadTree::~QuadTree()
  143. {
  144. // Get rid of any nodes that are still to be freed
  145. DiscardOldTree();
  146. // Get the current root node
  147. const RootNode &root_node = GetCurrentRoot();
  148. // Collect all bodies
  149. Allocator::Batch free_batch;
  150. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  151. node_stack.reserve(cStackSize);
  152. node_stack.push_back(root_node.GetNodeID());
  153. JPH_ASSERT(node_stack.front().IsValid());
  154. if (node_stack.front().IsNode())
  155. {
  156. do
  157. {
  158. // Process node
  159. NodeID node_id = node_stack.back();
  160. node_stack.pop_back();
  161. JPH_ASSERT(!node_id.IsBody());
  162. uint32 node_idx = node_id.GetNodeIndex();
  163. const Node &node = mAllocator->Get(node_idx);
  164. // Recurse and get all child nodes
  165. for (NodeID child_node_id : node.mChildNodeID)
  166. if (child_node_id.IsValid() && child_node_id.IsNode())
  167. node_stack.push_back(child_node_id);
  168. // Mark node to be freed
  169. mAllocator->AddObjectToBatch(free_batch, node_idx);
  170. }
  171. while (!node_stack.empty());
  172. }
  173. // Now free all nodes
  174. mAllocator->DestructObjectBatch(free_batch);
  175. }
  176. uint32 QuadTree::AllocateNode(bool inIsChanged)
  177. {
  178. uint32 index = mAllocator->ConstructObject(inIsChanged);
  179. if (index == Allocator::cInvalidObjectIndex)
  180. {
  181. // If you're running out of nodes, you're most likely adding too many individual bodies to the tree.
  182. // Because of the lock free nature of this tree, any individual body is added to the root of the tree.
  183. // This means that if you add a lot of bodies individually, you will end up with a very deep tree and you'll be
  184. // using a lot more nodes than you would if you added them in batches.
  185. // Please look at BodyInterface::AddBodiesPrepare/AddBodiesFinalize.
  186. //
  187. // If you have created a wrapper around Jolt then a possible solution is to activate a mode during loading
  188. // that queues up any bodies that need to be added. When loading is done, insert all of them as a single batch.
  189. // This could be implemented as a 'start batching' / 'end batching' call to switch in and out of that mode.
  190. // The rest of the code can then just use the regular 'add single body' call on your wrapper and doesn't need to know
  191. // if this mode is active or not.
  192. //
  193. // Calling PhysicsSystem::Update or PhysicsSystem::OptimizeBroadPhase will perform maintenance
  194. // on the tree and will make it efficient again. If you're not calling these functions and are adding a lot of bodies
  195. // you could still be running out of nodes because the tree is not being maintained. If your application is paused,
  196. // consider still calling PhysicsSystem::Update with a delta time of 0 to keep the tree in good shape.
  197. //
  198. // The system keeps track of a previous and a current tree, this allows for queries to continue using the old tree
  199. // while the new tree is being built. If you completely clean the PhysicsSystem and rebuild it from scratch, you may
  200. // want to call PhysicsSystem::OptimizeBroadPhase two times after clearing to completely get rid of any lingering nodes.
  201. //
  202. // The number of nodes that is allocated is related to the max number of bodies that is passed in PhysicsSystem::Init.
  203. // For normal situations there are plenty of nodes available. If all else fails, you can increase the number of nodes
  204. // by increasing the maximum number of bodies.
  205. Trace("QuadTree: Out of nodes!");
  206. std::abort();
  207. }
  208. return index;
  209. }
  210. void QuadTree::Init(Allocator &inAllocator)
  211. {
  212. // Store allocator
  213. mAllocator = &inAllocator;
  214. // Allocate root node
  215. mRootNode[mRootNodeIndex].mIndex = AllocateNode(false);
  216. }
  217. void QuadTree::DiscardOldTree()
  218. {
  219. // Check if there is an old tree
  220. RootNode &old_root_node = mRootNode[mRootNodeIndex ^ 1];
  221. if (old_root_node.mIndex != cInvalidNodeIndex)
  222. {
  223. // Clear the root
  224. old_root_node.mIndex = cInvalidNodeIndex;
  225. // Now free all old nodes
  226. mAllocator->DestructObjectBatch(mFreeNodeBatch);
  227. // Clear the batch
  228. mFreeNodeBatch = Allocator::Batch();
  229. }
  230. }
  231. AABox QuadTree::GetBounds() const
  232. {
  233. uint32 node_idx = GetCurrentRoot().mIndex;
  234. JPH_ASSERT(node_idx != cInvalidNodeIndex);
  235. const Node &node = mAllocator->Get(node_idx);
  236. AABox bounds;
  237. node.GetNodeBounds(bounds);
  238. return bounds;
  239. }
  240. void QuadTree::UpdatePrepare(const BodyVector &inBodies, TrackingVector &ioTracking, UpdateState &outUpdateState, bool inFullRebuild)
  241. {
  242. #ifdef JPH_ENABLE_ASSERTS
  243. // We only read positions
  244. BodyAccess::Grant grant(BodyAccess::EAccess::None, BodyAccess::EAccess::Read);
  245. #endif
  246. // Assert we have no nodes pending deletion, this means DiscardOldTree wasn't called yet
  247. JPH_ASSERT(mFreeNodeBatch.mNumObjects == 0);
  248. // Mark tree non-dirty
  249. mIsDirty = false;
  250. // Get the current root node
  251. const RootNode &root_node = GetCurrentRoot();
  252. #ifdef JPH_DUMP_BROADPHASE_TREE
  253. DumpTree(root_node.GetNodeID(), StringFormat("%s_PRE", mName).c_str());
  254. #endif
  255. // Assert sane data
  256. #ifdef JPH_DEBUG
  257. ValidateTree(inBodies, ioTracking, root_node.mIndex, mNumBodies);
  258. #endif
  259. // Create space for all body ID's
  260. NodeID *node_ids = new NodeID [mNumBodies];
  261. NodeID *cur_node_id = node_ids;
  262. // Collect all bodies
  263. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  264. node_stack.reserve(cStackSize);
  265. node_stack.push_back(root_node.GetNodeID());
  266. JPH_ASSERT(node_stack.front().IsValid());
  267. do
  268. {
  269. // Pop node from stack
  270. NodeID node_id = node_stack.back();
  271. node_stack.pop_back();
  272. // Check if node is a body
  273. if (node_id.IsBody())
  274. {
  275. // Validate that we're still in the right layer
  276. #ifdef JPH_ENABLE_ASSERTS
  277. uint32 body_index = node_id.GetBodyID().GetIndex();
  278. JPH_ASSERT(ioTracking[body_index].mObjectLayer == inBodies[body_index]->GetObjectLayer());
  279. #endif
  280. // Store body
  281. *cur_node_id = node_id;
  282. ++cur_node_id;
  283. }
  284. else
  285. {
  286. // Process normal node
  287. uint32 node_idx = node_id.GetNodeIndex();
  288. const Node &node = mAllocator->Get(node_idx);
  289. if (!node.mIsChanged && !inFullRebuild)
  290. {
  291. // Node is unchanged, treat it as a whole
  292. *cur_node_id = node_id;
  293. ++cur_node_id;
  294. }
  295. else
  296. {
  297. // Node is changed, recurse and get all children
  298. for (NodeID child_node_id : node.mChildNodeID)
  299. if (child_node_id.IsValid())
  300. node_stack.push_back(child_node_id);
  301. // Mark node to be freed
  302. mAllocator->AddObjectToBatch(mFreeNodeBatch, node_idx);
  303. }
  304. }
  305. }
  306. while (!node_stack.empty());
  307. // Check that our book keeping matches
  308. uint32 num_node_ids = uint32(cur_node_id - node_ids);
  309. JPH_ASSERT(inFullRebuild? num_node_ids == mNumBodies : num_node_ids <= mNumBodies);
  310. // This will be the new root node id
  311. NodeID root_node_id;
  312. if (num_node_ids > 0)
  313. {
  314. // We mark the first 5 levels (max 1024 nodes) of the newly built tree as 'changed' so that
  315. // those nodes get recreated every time when we rebuild the tree. This balances the amount of
  316. // time we spend on rebuilding the tree ('unchanged' nodes will be put in the new tree as a whole)
  317. // vs the quality of the built tree.
  318. constexpr uint cMaxDepthMarkChanged = 5;
  319. // Build new tree
  320. AABox root_bounds;
  321. root_node_id = BuildTree(inBodies, ioTracking, node_ids, num_node_ids, cMaxDepthMarkChanged, root_bounds);
  322. if (root_node_id.IsBody())
  323. {
  324. // For a single body we need to allocate a new root node
  325. uint32 root_idx = AllocateNode(false);
  326. Node &root = mAllocator->Get(root_idx);
  327. root.SetChildBounds(0, root_bounds);
  328. root.mChildNodeID[0] = root_node_id;
  329. SetBodyLocation(ioTracking, root_node_id.GetBodyID(), root_idx, 0);
  330. root_node_id = NodeID::sFromNodeIndex(root_idx);
  331. }
  332. }
  333. else
  334. {
  335. // Empty tree, create root node
  336. uint32 root_idx = AllocateNode(false);
  337. root_node_id = NodeID::sFromNodeIndex(root_idx);
  338. }
  339. // Delete temporary data
  340. delete [] node_ids;
  341. outUpdateState.mRootNodeID = root_node_id;
  342. }
  343. void QuadTree::UpdateFinalize([[maybe_unused]] const BodyVector &inBodies, [[maybe_unused]] const TrackingVector &inTracking, const UpdateState &inUpdateState)
  344. {
  345. // Tree building is complete, now we switch the old with the new tree
  346. uint32 new_root_idx = mRootNodeIndex ^ 1;
  347. RootNode &new_root_node = mRootNode[new_root_idx];
  348. {
  349. // Note: We don't need to lock here as the old tree stays available so any queries
  350. // that use it can continue using it until DiscardOldTree is called. This slot
  351. // should be empty and unused at this moment.
  352. JPH_ASSERT(new_root_node.mIndex == cInvalidNodeIndex);
  353. new_root_node.mIndex = inUpdateState.mRootNodeID.GetNodeIndex();
  354. }
  355. // All queries that start from now on will use this new tree
  356. mRootNodeIndex = new_root_idx;
  357. #ifdef JPH_DUMP_BROADPHASE_TREE
  358. DumpTree(new_root_node.GetNodeID(), StringFormat("%s_POST", mName).c_str());
  359. #endif
  360. #ifdef JPH_DEBUG
  361. ValidateTree(inBodies, inTracking, new_root_node.mIndex, mNumBodies);
  362. #endif
  363. }
  364. void QuadTree::sPartition(NodeID *ioNodeIDs, Vec3 *ioNodeCenters, int inNumber, int &outMidPoint)
  365. {
  366. // Handle trivial case
  367. if (inNumber <= 4)
  368. {
  369. outMidPoint = inNumber / 2;
  370. return;
  371. }
  372. // Calculate bounding box of box centers
  373. Vec3 center_min = Vec3::sReplicate(cLargeFloat);
  374. Vec3 center_max = Vec3::sReplicate(-cLargeFloat);
  375. for (const Vec3 *c = ioNodeCenters, *c_end = ioNodeCenters + inNumber; c < c_end; ++c)
  376. {
  377. Vec3 center = *c;
  378. center_min = Vec3::sMin(center_min, center);
  379. center_max = Vec3::sMax(center_max, center);
  380. }
  381. // Calculate split plane
  382. int dimension = (center_max - center_min).GetHighestComponentIndex();
  383. float split = 0.5f * (center_min + center_max)[dimension];
  384. // Divide bodies
  385. int start = 0, end = inNumber;
  386. while (start < end)
  387. {
  388. // Search for first element that is on the right hand side of the split plane
  389. while (start < end && ioNodeCenters[start][dimension] < split)
  390. ++start;
  391. // Search for the first element that is on the left hand side of the split plane
  392. while (start < end && ioNodeCenters[end - 1][dimension] >= split)
  393. --end;
  394. if (start < end)
  395. {
  396. // Swap the two elements
  397. std::swap(ioNodeIDs[start], ioNodeIDs[end - 1]);
  398. std::swap(ioNodeCenters[start], ioNodeCenters[end - 1]);
  399. ++start;
  400. --end;
  401. }
  402. }
  403. JPH_ASSERT(start == end);
  404. if (start > 0 && start < inNumber)
  405. {
  406. // Success!
  407. outMidPoint = start;
  408. }
  409. else
  410. {
  411. // Failed to divide bodies
  412. outMidPoint = inNumber / 2;
  413. }
  414. }
  415. void QuadTree::sPartition4(NodeID *ioNodeIDs, Vec3 *ioNodeCenters, int inBegin, int inEnd, int *outSplit)
  416. {
  417. NodeID *node_ids = ioNodeIDs + inBegin;
  418. Vec3 *node_centers = ioNodeCenters + inBegin;
  419. int number = inEnd - inBegin;
  420. // Partition entire range
  421. sPartition(node_ids, node_centers, number, outSplit[2]);
  422. // Partition lower half
  423. sPartition(node_ids, node_centers, outSplit[2], outSplit[1]);
  424. // Partition upper half
  425. sPartition(node_ids + outSplit[2], node_centers + outSplit[2], number - outSplit[2], outSplit[3]);
  426. // Convert to proper range
  427. outSplit[0] = inBegin;
  428. outSplit[1] += inBegin;
  429. outSplit[2] += inBegin;
  430. outSplit[3] += outSplit[2];
  431. outSplit[4] = inEnd;
  432. }
  433. AABox QuadTree::GetNodeOrBodyBounds(const BodyVector &inBodies, NodeID inNodeID) const
  434. {
  435. if (inNodeID.IsNode())
  436. {
  437. // It is a node
  438. uint32 node_idx = inNodeID.GetNodeIndex();
  439. const Node &node = mAllocator->Get(node_idx);
  440. AABox bounds;
  441. node.GetNodeBounds(bounds);
  442. return bounds;
  443. }
  444. else
  445. {
  446. // It is a body
  447. return inBodies[inNodeID.GetBodyID().GetIndex()]->GetWorldSpaceBounds();
  448. }
  449. }
  450. QuadTree::NodeID QuadTree::BuildTree(const BodyVector &inBodies, TrackingVector &ioTracking, NodeID *ioNodeIDs, int inNumber, uint inMaxDepthMarkChanged, AABox &outBounds)
  451. {
  452. // Trivial case: No bodies in tree
  453. if (inNumber == 0)
  454. {
  455. outBounds = cInvalidBounds;
  456. return NodeID::sInvalid();
  457. }
  458. // Trivial case: When we have 1 body or node, return it
  459. if (inNumber == 1)
  460. {
  461. if (ioNodeIDs->IsNode())
  462. {
  463. // When returning an existing node as root, ensure that no parent has been set
  464. Node &node = mAllocator->Get(ioNodeIDs->GetNodeIndex());
  465. node.mParentNodeIndex = cInvalidNodeIndex;
  466. }
  467. outBounds = GetNodeOrBodyBounds(inBodies, *ioNodeIDs);
  468. return *ioNodeIDs;
  469. }
  470. // Calculate centers of all bodies that are to be inserted
  471. Vec3 *centers = new Vec3 [inNumber];
  472. JPH_ASSERT(IsAligned(centers, JPH_VECTOR_ALIGNMENT));
  473. Vec3 *c = centers;
  474. for (const NodeID *n = ioNodeIDs, *n_end = ioNodeIDs + inNumber; n < n_end; ++n, ++c)
  475. *c = GetNodeOrBodyBounds(inBodies, *n).GetCenter();
  476. // The algorithm is a recursive tree build, but to avoid the call overhead we keep track of a stack here
  477. struct StackEntry
  478. {
  479. uint32 mNodeIdx; // Node index of node that is generated
  480. int mChildIdx; // Index of child that we're currently processing
  481. int mSplit[5]; // Indices where the node ID's have been split to form 4 partitions
  482. uint32 mDepth; // Depth of this node in the tree
  483. Vec3 mNodeBoundsMin; // Bounding box of this node, accumulated while iterating over children
  484. Vec3 mNodeBoundsMax;
  485. };
  486. static_assert(sizeof(StackEntry) == 64);
  487. StackEntry stack[cStackSize / 4]; // We don't process 4 at a time in this loop but 1, so the stack can be 4x as small
  488. int top = 0;
  489. // Create root node
  490. stack[0].mNodeIdx = AllocateNode(inMaxDepthMarkChanged > 0);
  491. stack[0].mChildIdx = -1;
  492. stack[0].mDepth = 0;
  493. stack[0].mNodeBoundsMin = Vec3::sReplicate(cLargeFloat);
  494. stack[0].mNodeBoundsMax = Vec3::sReplicate(-cLargeFloat);
  495. sPartition4(ioNodeIDs, centers, 0, inNumber, stack[0].mSplit);
  496. for (;;)
  497. {
  498. StackEntry &cur_stack = stack[top];
  499. // Next child
  500. cur_stack.mChildIdx++;
  501. // Check if all children processed
  502. if (cur_stack.mChildIdx >= 4)
  503. {
  504. // Terminate if there's nothing left to pop
  505. if (top <= 0)
  506. break;
  507. // Add our bounds to our parents bounds
  508. StackEntry &prev_stack = stack[top - 1];
  509. prev_stack.mNodeBoundsMin = Vec3::sMin(prev_stack.mNodeBoundsMin, cur_stack.mNodeBoundsMin);
  510. prev_stack.mNodeBoundsMax = Vec3::sMax(prev_stack.mNodeBoundsMax, cur_stack.mNodeBoundsMax);
  511. // Store parent node
  512. Node &node = mAllocator->Get(cur_stack.mNodeIdx);
  513. node.mParentNodeIndex = prev_stack.mNodeIdx;
  514. // Store this node's properties in the parent node
  515. Node &parent_node = mAllocator->Get(prev_stack.mNodeIdx);
  516. parent_node.mChildNodeID[prev_stack.mChildIdx] = NodeID::sFromNodeIndex(cur_stack.mNodeIdx);
  517. parent_node.SetChildBounds(prev_stack.mChildIdx, AABox(cur_stack.mNodeBoundsMin, cur_stack.mNodeBoundsMax));
  518. // Pop entry from stack
  519. --top;
  520. }
  521. else
  522. {
  523. // Get low and high index to bodies to process
  524. int low = cur_stack.mSplit[cur_stack.mChildIdx];
  525. int high = cur_stack.mSplit[cur_stack.mChildIdx + 1];
  526. int num_bodies = high - low;
  527. if (num_bodies == 1)
  528. {
  529. // Get body info
  530. NodeID child_node_id = ioNodeIDs[low];
  531. AABox bounds = GetNodeOrBodyBounds(inBodies, child_node_id);
  532. // Update node
  533. Node &node = mAllocator->Get(cur_stack.mNodeIdx);
  534. node.mChildNodeID[cur_stack.mChildIdx] = child_node_id;
  535. node.SetChildBounds(cur_stack.mChildIdx, bounds);
  536. if (child_node_id.IsNode())
  537. {
  538. // Update parent for this node
  539. Node &child_node = mAllocator->Get(child_node_id.GetNodeIndex());
  540. child_node.mParentNodeIndex = cur_stack.mNodeIdx;
  541. }
  542. else
  543. {
  544. // Set location in tracking
  545. SetBodyLocation(ioTracking, child_node_id.GetBodyID(), cur_stack.mNodeIdx, cur_stack.mChildIdx);
  546. }
  547. // Encapsulate bounding box in parent
  548. cur_stack.mNodeBoundsMin = Vec3::sMin(cur_stack.mNodeBoundsMin, bounds.mMin);
  549. cur_stack.mNodeBoundsMax = Vec3::sMax(cur_stack.mNodeBoundsMax, bounds.mMax);
  550. }
  551. else if (num_bodies > 1)
  552. {
  553. // Allocate new node
  554. StackEntry &new_stack = stack[++top];
  555. JPH_ASSERT(top < cStackSize / 4);
  556. uint32 next_depth = cur_stack.mDepth + 1;
  557. new_stack.mNodeIdx = AllocateNode(inMaxDepthMarkChanged > next_depth);
  558. new_stack.mChildIdx = -1;
  559. new_stack.mDepth = next_depth;
  560. new_stack.mNodeBoundsMin = Vec3::sReplicate(cLargeFloat);
  561. new_stack.mNodeBoundsMax = Vec3::sReplicate(-cLargeFloat);
  562. sPartition4(ioNodeIDs, centers, low, high, new_stack.mSplit);
  563. }
  564. }
  565. }
  566. // Delete temporary data
  567. delete [] centers;
  568. // Store bounding box of root
  569. outBounds.mMin = stack[0].mNodeBoundsMin;
  570. outBounds.mMax = stack[0].mNodeBoundsMax;
  571. // Return root
  572. return NodeID::sFromNodeIndex(stack[0].mNodeIdx);
  573. }
  574. void QuadTree::MarkNodeAndParentsChanged(uint32 inNodeIndex)
  575. {
  576. uint32 node_idx = inNodeIndex;
  577. do
  578. {
  579. // If node has changed, parent will be too
  580. Node &node = mAllocator->Get(node_idx);
  581. if (node.mIsChanged)
  582. break;
  583. // Mark node as changed
  584. node.mIsChanged = true;
  585. // Get our parent
  586. node_idx = node.mParentNodeIndex;
  587. }
  588. while (node_idx != cInvalidNodeIndex);
  589. }
  590. void QuadTree::WidenAndMarkNodeAndParentsChanged(uint32 inNodeIndex, const AABox &inNewBounds)
  591. {
  592. uint32 node_idx = inNodeIndex;
  593. for (;;)
  594. {
  595. // Mark node as changed
  596. Node &node = mAllocator->Get(node_idx);
  597. node.mIsChanged = true;
  598. // Get our parent
  599. uint32 parent_idx = node.mParentNodeIndex;
  600. if (parent_idx == cInvalidNodeIndex)
  601. break;
  602. // Find which child of the parent we're in
  603. Node &parent_node = mAllocator->Get(parent_idx);
  604. NodeID node_id = NodeID::sFromNodeIndex(node_idx);
  605. int child_idx = -1;
  606. for (int i = 0; i < 4; ++i)
  607. if (parent_node.mChildNodeID[i] == node_id)
  608. {
  609. // Found one, set the node index and child index and update the bounding box too
  610. child_idx = i;
  611. break;
  612. }
  613. JPH_ASSERT(child_idx != -1, "Nodes don't get removed from the tree, we must have found it");
  614. // To avoid any race conditions with other threads we only enlarge bounding boxes
  615. if (!parent_node.EncapsulateChildBounds(child_idx, inNewBounds))
  616. {
  617. // No changes to bounding box, only marking as changed remains to be done
  618. if (!parent_node.mIsChanged)
  619. MarkNodeAndParentsChanged(parent_idx);
  620. break;
  621. }
  622. // Update node index
  623. node_idx = parent_idx;
  624. }
  625. }
  626. bool QuadTree::TryInsertLeaf(TrackingVector &ioTracking, int inNodeIndex, NodeID inLeafID, const AABox &inLeafBounds, int inLeafNumBodies)
  627. {
  628. // Tentatively assign the node as parent
  629. bool leaf_is_node = inLeafID.IsNode();
  630. if (leaf_is_node)
  631. {
  632. uint32 leaf_idx = inLeafID.GetNodeIndex();
  633. mAllocator->Get(leaf_idx).mParentNodeIndex = inNodeIndex;
  634. }
  635. // Fetch node that we're adding to
  636. Node &node = mAllocator->Get(inNodeIndex);
  637. // Find an empty child
  638. for (uint32 child_idx = 0; child_idx < 4; ++child_idx)
  639. if (node.mChildNodeID[child_idx].CompareExchange(NodeID::sInvalid(), inLeafID)) // Check if we can claim it
  640. {
  641. // We managed to add it to the node
  642. // If leaf was a body, we need to update its bookkeeping
  643. if (!leaf_is_node)
  644. SetBodyLocation(ioTracking, inLeafID.GetBodyID(), inNodeIndex, child_idx);
  645. // Now set the bounding box making the child valid for queries
  646. node.SetChildBounds(child_idx, inLeafBounds);
  647. // Widen the bounds for our parents too
  648. WidenAndMarkNodeAndParentsChanged(inNodeIndex, inLeafBounds);
  649. // Update body counter
  650. mNumBodies += inLeafNumBodies;
  651. // And we're done
  652. return true;
  653. }
  654. return false;
  655. }
  656. bool QuadTree::TryCreateNewRoot(TrackingVector &ioTracking, atomic<uint32> &ioRootNodeIndex, NodeID inLeafID, const AABox &inLeafBounds, int inLeafNumBodies)
  657. {
  658. // Fetch old root
  659. uint32 root_idx = ioRootNodeIndex;
  660. Node &root = mAllocator->Get(root_idx);
  661. // Create new root, mark this new root as changed as we're not creating a very efficient tree at this point
  662. uint32 new_root_idx = AllocateNode(true);
  663. Node &new_root = mAllocator->Get(new_root_idx);
  664. // First child is current root, note that since the tree may be modified concurrently we cannot assume that the bounds of our child will be correct so we set a very large bounding box
  665. new_root.mChildNodeID[0] = NodeID::sFromNodeIndex(root_idx);
  666. new_root.SetChildBounds(0, AABox(Vec3::sReplicate(-cLargeFloat), Vec3::sReplicate(cLargeFloat)));
  667. // Second child is new leaf
  668. new_root.mChildNodeID[1] = inLeafID;
  669. new_root.SetChildBounds(1, inLeafBounds);
  670. // Tentatively assign new root as parent
  671. bool leaf_is_node = inLeafID.IsNode();
  672. if (leaf_is_node)
  673. {
  674. uint32 leaf_idx = inLeafID.GetNodeIndex();
  675. mAllocator->Get(leaf_idx).mParentNodeIndex = new_root_idx;
  676. }
  677. // Try to swap it
  678. if (ioRootNodeIndex.compare_exchange_strong(root_idx, new_root_idx))
  679. {
  680. // We managed to set the new root
  681. // If leaf was a body, we need to update its bookkeeping
  682. if (!leaf_is_node)
  683. SetBodyLocation(ioTracking, inLeafID.GetBodyID(), new_root_idx, 1);
  684. // Store parent node for old root
  685. root.mParentNodeIndex = new_root_idx;
  686. // Update body counter
  687. mNumBodies += inLeafNumBodies;
  688. // And we're done
  689. return true;
  690. }
  691. // Failed to swap, someone else must have created a new root, try again
  692. mAllocator->DestructObject(new_root_idx);
  693. return false;
  694. }
  695. void QuadTree::AddBodiesPrepare(const BodyVector &inBodies, TrackingVector &ioTracking, BodyID *ioBodyIDs, int inNumber, AddState &outState)
  696. {
  697. // Assert sane input
  698. JPH_ASSERT(ioBodyIDs != nullptr);
  699. JPH_ASSERT(inNumber > 0);
  700. #ifdef JPH_ENABLE_ASSERTS
  701. // Below we just cast the body ID's to node ID's, check here that that is valid
  702. for (const BodyID *b = ioBodyIDs, *b_end = ioBodyIDs + inNumber; b < b_end; ++b)
  703. NodeID::sFromBodyID(*b);
  704. #endif
  705. // Build subtree for the new bodies, note that we mark all nodes as 'not changed'
  706. // so they will stay together as a batch and will make the tree rebuild cheaper
  707. outState.mLeafID = BuildTree(inBodies, ioTracking, (NodeID *)ioBodyIDs, inNumber, 0, outState.mLeafBounds);
  708. #ifdef JPH_DEBUG
  709. if (outState.mLeafID.IsNode())
  710. ValidateTree(inBodies, ioTracking, outState.mLeafID.GetNodeIndex(), inNumber);
  711. #endif
  712. }
  713. void QuadTree::AddBodiesFinalize(TrackingVector &ioTracking, int inNumberBodies, const AddState &inState)
  714. {
  715. // Assert sane input
  716. JPH_ASSERT(inNumberBodies > 0);
  717. // Mark tree dirty
  718. mIsDirty = true;
  719. // Get the current root node
  720. RootNode &root_node = GetCurrentRoot();
  721. for (;;)
  722. {
  723. // Check if we can insert the body in the root
  724. if (TryInsertLeaf(ioTracking, root_node.mIndex, inState.mLeafID, inState.mLeafBounds, inNumberBodies))
  725. return;
  726. // Check if we can create a new root
  727. if (TryCreateNewRoot(ioTracking, root_node.mIndex, inState.mLeafID, inState.mLeafBounds, inNumberBodies))
  728. return;
  729. }
  730. }
  731. void QuadTree::AddBodiesAbort(TrackingVector &ioTracking, const AddState &inState)
  732. {
  733. // Collect all bodies
  734. Allocator::Batch free_batch;
  735. NodeID node_stack[cStackSize];
  736. node_stack[0] = inState.mLeafID;
  737. JPH_ASSERT(node_stack[0].IsValid());
  738. int top = 0;
  739. do
  740. {
  741. // Check if node is a body
  742. NodeID child_node_id = node_stack[top];
  743. if (child_node_id.IsBody())
  744. {
  745. // Reset location of body
  746. sInvalidateBodyLocation(ioTracking, child_node_id.GetBodyID());
  747. }
  748. else
  749. {
  750. // Process normal node
  751. uint32 node_idx = child_node_id.GetNodeIndex();
  752. const Node &node = mAllocator->Get(node_idx);
  753. for (NodeID sub_child_node_id : node.mChildNodeID)
  754. if (sub_child_node_id.IsValid())
  755. {
  756. JPH_ASSERT(top < cStackSize);
  757. node_stack[top] = sub_child_node_id;
  758. top++;
  759. }
  760. // Mark it to be freed
  761. mAllocator->AddObjectToBatch(free_batch, node_idx);
  762. }
  763. --top;
  764. }
  765. while (top >= 0);
  766. // Now free all nodes as a single batch
  767. mAllocator->DestructObjectBatch(free_batch);
  768. }
  769. void QuadTree::RemoveBodies([[maybe_unused]] const BodyVector &inBodies, TrackingVector &ioTracking, const BodyID *ioBodyIDs, int inNumber)
  770. {
  771. // Assert sane input
  772. JPH_ASSERT(ioBodyIDs != nullptr);
  773. JPH_ASSERT(inNumber > 0);
  774. // Mark tree dirty
  775. mIsDirty = true;
  776. for (const BodyID *cur = ioBodyIDs, *end = ioBodyIDs + inNumber; cur < end; ++cur)
  777. {
  778. // Check if BodyID is correct
  779. JPH_ASSERT(inBodies[cur->GetIndex()]->GetID() == *cur, "Provided BodyID doesn't match BodyID in body manager");
  780. // Get location of body
  781. uint32 node_idx, child_idx;
  782. GetBodyLocation(ioTracking, *cur, node_idx, child_idx);
  783. // First we reset our internal bookkeeping
  784. sInvalidateBodyLocation(ioTracking, *cur);
  785. // Then we make the bounding box invalid, no queries can find this node anymore
  786. Node &node = mAllocator->Get(node_idx);
  787. node.InvalidateChildBounds(child_idx);
  788. // Finally we reset the child id, this makes the node available for adds again
  789. node.mChildNodeID[child_idx] = NodeID::sInvalid();
  790. // We don't need to bubble up our bounding box changes to our parents since we never make volumes smaller, only bigger
  791. // But we do need to mark the nodes as changed so that the tree can be rebuilt
  792. MarkNodeAndParentsChanged(node_idx);
  793. }
  794. mNumBodies -= inNumber;
  795. }
  796. void QuadTree::NotifyBodiesAABBChanged(const BodyVector &inBodies, const TrackingVector &inTracking, const BodyID *ioBodyIDs, int inNumber)
  797. {
  798. // Assert sane input
  799. JPH_ASSERT(ioBodyIDs != nullptr);
  800. JPH_ASSERT(inNumber > 0);
  801. for (const BodyID *cur = ioBodyIDs, *end = ioBodyIDs + inNumber; cur < end; ++cur)
  802. {
  803. // Check if BodyID is correct
  804. const Body *body = inBodies[cur->GetIndex()];
  805. JPH_ASSERT(body->GetID() == *cur, "Provided BodyID doesn't match BodyID in body manager");
  806. // Get the new bounding box
  807. const AABox &new_bounds = body->GetWorldSpaceBounds();
  808. // Get location of body
  809. uint32 node_idx, child_idx;
  810. GetBodyLocation(inTracking, *cur, node_idx, child_idx);
  811. // Widen bounds for node
  812. Node &node = mAllocator->Get(node_idx);
  813. if (node.EncapsulateChildBounds(child_idx, new_bounds))
  814. {
  815. // Mark tree dirty
  816. mIsDirty = true;
  817. // If bounds changed, widen the bounds for our parents too
  818. WidenAndMarkNodeAndParentsChanged(node_idx, new_bounds);
  819. }
  820. }
  821. }
  822. template <class Visitor>
  823. JPH_INLINE void QuadTree::WalkTree(const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking, Visitor &ioVisitor JPH_IF_TRACK_BROADPHASE_STATS(, LayerToStats &ioStats)) const
  824. {
  825. // Get the root
  826. const RootNode &root_node = GetCurrentRoot();
  827. #ifdef JPH_TRACK_BROADPHASE_STATS
  828. // Start tracking stats
  829. int bodies_visited = 0;
  830. int hits_collected = 0;
  831. int nodes_visited = 0;
  832. uint64 collector_ticks = 0;
  833. uint64 start = GetProcessorTickCount();
  834. #endif // JPH_TRACK_BROADPHASE_STATS
  835. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack_array;
  836. node_stack_array.resize(cStackSize);
  837. NodeID *node_stack = node_stack_array.data();
  838. node_stack[0] = root_node.GetNodeID();
  839. int top = 0;
  840. do
  841. {
  842. // Check if node is a body
  843. NodeID child_node_id = node_stack[top];
  844. if (child_node_id.IsBody())
  845. {
  846. // Track amount of bodies visited
  847. JPH_IF_TRACK_BROADPHASE_STATS(++bodies_visited;)
  848. BodyID body_id = child_node_id.GetBodyID();
  849. ObjectLayer object_layer = inTracking[body_id.GetIndex()].mObjectLayer; // We're not taking a lock on the body, so it may be in the process of being removed so check if the object layer is invalid
  850. if (object_layer != cObjectLayerInvalid && inObjectLayerFilter.ShouldCollide(object_layer))
  851. {
  852. JPH_PROFILE("VisitBody");
  853. // Track amount of hits
  854. JPH_IF_TRACK_BROADPHASE_STATS(++hits_collected;)
  855. // Start track time the collector takes
  856. JPH_IF_TRACK_BROADPHASE_STATS(uint64 collector_start = GetProcessorTickCount();)
  857. // We found a body we collide with, call our visitor
  858. ioVisitor.VisitBody(body_id, top);
  859. // End track time the collector takes
  860. JPH_IF_TRACK_BROADPHASE_STATS(collector_ticks += GetProcessorTickCount() - collector_start;)
  861. // Check if we're done
  862. if (ioVisitor.ShouldAbort())
  863. break;
  864. }
  865. }
  866. else if (child_node_id.IsValid())
  867. {
  868. JPH_IF_TRACK_BROADPHASE_STATS(++nodes_visited;)
  869. // Ensure there is space on the stack (falls back to heap if there isn't)
  870. if (top + 4 >= (int)node_stack_array.size())
  871. {
  872. sQuadTreePerformanceWarning();
  873. node_stack_array.resize(node_stack_array.size() << 1);
  874. node_stack = node_stack_array.data();
  875. ioVisitor.OnStackResized(node_stack_array.size());
  876. }
  877. // Process normal node
  878. const Node &node = mAllocator->Get(child_node_id.GetNodeIndex());
  879. JPH_ASSERT(IsAligned(&node, JPH_CACHE_LINE_SIZE));
  880. // Load bounds of 4 children
  881. Vec4 bounds_minx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinX);
  882. Vec4 bounds_miny = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinY);
  883. Vec4 bounds_minz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinZ);
  884. Vec4 bounds_maxx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxX);
  885. Vec4 bounds_maxy = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxY);
  886. Vec4 bounds_maxz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxZ);
  887. // Load ids for 4 children
  888. UVec4 child_ids = UVec4::sLoadInt4Aligned((const uint32 *)&node.mChildNodeID[0]);
  889. // Check which sub nodes to visit
  890. int num_results = ioVisitor.VisitNodes(bounds_minx, bounds_miny, bounds_minz, bounds_maxx, bounds_maxy, bounds_maxz, child_ids, top);
  891. child_ids.StoreInt4((uint32 *)&node_stack[top]);
  892. top += num_results;
  893. }
  894. // Fetch next node until we find one that the visitor wants to see
  895. do
  896. --top;
  897. while (top >= 0 && !ioVisitor.ShouldVisitNode(top));
  898. }
  899. while (top >= 0);
  900. #ifdef JPH_TRACK_BROADPHASE_STATS
  901. // Calculate total time the broadphase walk took
  902. uint64 total_ticks = GetProcessorTickCount() - start;
  903. // Update stats under lock protection (slow!)
  904. {
  905. unique_lock lock(mStatsMutex);
  906. Stat &s = ioStats[inObjectLayerFilter.GetDescription()];
  907. s.mNumQueries++;
  908. s.mNodesVisited += nodes_visited;
  909. s.mBodiesVisited += bodies_visited;
  910. s.mHitsReported += hits_collected;
  911. s.mTotalTicks += total_ticks;
  912. s.mCollectorTicks += collector_ticks;
  913. }
  914. #endif // JPH_TRACK_BROADPHASE_STATS
  915. }
  916. void QuadTree::CastRay(const RayCast &inRay, RayCastBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  917. {
  918. class Visitor
  919. {
  920. public:
  921. /// Constructor
  922. JPH_INLINE Visitor(const RayCast &inRay, RayCastBodyCollector &ioCollector) :
  923. mOrigin(inRay.mOrigin),
  924. mInvDirection(inRay.mDirection),
  925. mCollector(ioCollector)
  926. {
  927. mFractionStack.resize(cStackSize);
  928. mFractionStack[0] = -1;
  929. }
  930. /// Returns true if further processing of the tree should be aborted
  931. JPH_INLINE bool ShouldAbort() const
  932. {
  933. return mCollector.ShouldEarlyOut();
  934. }
  935. /// Returns true if this node / body should be visited, false if no hit can be generated
  936. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  937. {
  938. return mFractionStack[inStackTop] < mCollector.GetEarlyOutFraction();
  939. }
  940. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  941. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop)
  942. {
  943. // Test the ray against 4 bounding boxes
  944. Vec4 fraction = RayAABox4(mOrigin, mInvDirection, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  945. // Sort so that highest values are first (we want to first process closer hits and we process stack top to bottom)
  946. return SortReverseAndStore(fraction, mCollector.GetEarlyOutFraction(), ioChildNodeIDs, &mFractionStack[inStackTop]);
  947. }
  948. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  949. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  950. {
  951. // Store potential hit with body
  952. BroadPhaseCastResult result { inBodyID, mFractionStack[inStackTop] };
  953. mCollector.AddHit(result);
  954. }
  955. /// Called when the stack is resized, this allows us to resize the fraction stack to match the new stack size
  956. JPH_INLINE void OnStackResized(size_t inNewStackSize)
  957. {
  958. mFractionStack.resize(inNewStackSize);
  959. }
  960. private:
  961. Vec3 mOrigin;
  962. RayInvDirection mInvDirection;
  963. RayCastBodyCollector & mCollector;
  964. Array<float, STLLocalAllocator<float, cStackSize>> mFractionStack;
  965. };
  966. Visitor visitor(inRay, ioCollector);
  967. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCastRayStats));
  968. }
  969. void QuadTree::CollideAABox(const AABox &inBox, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  970. {
  971. class Visitor
  972. {
  973. public:
  974. /// Constructor
  975. JPH_INLINE Visitor(const AABox &inBox, CollideShapeBodyCollector &ioCollector) :
  976. mBox(inBox),
  977. mCollector(ioCollector)
  978. {
  979. }
  980. /// Returns true if further processing of the tree should be aborted
  981. JPH_INLINE bool ShouldAbort() const
  982. {
  983. return mCollector.ShouldEarlyOut();
  984. }
  985. /// Returns true if this node / body should be visited, false if no hit can be generated
  986. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  987. {
  988. return true;
  989. }
  990. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  991. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  992. {
  993. // Test the box vs 4 boxes
  994. UVec4 hitting = AABox4VsBox(mBox, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  995. return CountAndSortTrues(hitting, ioChildNodeIDs);
  996. }
  997. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  998. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  999. {
  1000. // Store potential hit with body
  1001. mCollector.AddHit(inBodyID);
  1002. }
  1003. /// Called when the stack is resized
  1004. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1005. {
  1006. // Nothing to do
  1007. }
  1008. private:
  1009. const AABox & mBox;
  1010. CollideShapeBodyCollector & mCollector;
  1011. };
  1012. Visitor visitor(inBox, ioCollector);
  1013. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideAABoxStats));
  1014. }
  1015. void QuadTree::CollideSphere(Vec3Arg inCenter, float inRadius, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1016. {
  1017. class Visitor
  1018. {
  1019. public:
  1020. /// Constructor
  1021. JPH_INLINE Visitor(Vec3Arg inCenter, float inRadius, CollideShapeBodyCollector &ioCollector) :
  1022. mCenterX(inCenter.SplatX()),
  1023. mCenterY(inCenter.SplatY()),
  1024. mCenterZ(inCenter.SplatZ()),
  1025. mRadiusSq(Vec4::sReplicate(Square(inRadius))),
  1026. mCollector(ioCollector)
  1027. {
  1028. }
  1029. /// Returns true if further processing of the tree should be aborted
  1030. JPH_INLINE bool ShouldAbort() const
  1031. {
  1032. return mCollector.ShouldEarlyOut();
  1033. }
  1034. /// Returns true if this node / body should be visited, false if no hit can be generated
  1035. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1036. {
  1037. return true;
  1038. }
  1039. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1040. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1041. {
  1042. // Test 4 boxes vs sphere
  1043. UVec4 hitting = AABox4VsSphere(mCenterX, mCenterY, mCenterZ, mRadiusSq, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1044. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1045. }
  1046. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1047. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1048. {
  1049. // Store potential hit with body
  1050. mCollector.AddHit(inBodyID);
  1051. }
  1052. /// Called when the stack is resized
  1053. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1054. {
  1055. // Nothing to do
  1056. }
  1057. private:
  1058. Vec4 mCenterX;
  1059. Vec4 mCenterY;
  1060. Vec4 mCenterZ;
  1061. Vec4 mRadiusSq;
  1062. CollideShapeBodyCollector & mCollector;
  1063. };
  1064. Visitor visitor(inCenter, inRadius, ioCollector);
  1065. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideSphereStats));
  1066. }
  1067. void QuadTree::CollidePoint(Vec3Arg inPoint, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1068. {
  1069. class Visitor
  1070. {
  1071. public:
  1072. /// Constructor
  1073. JPH_INLINE Visitor(Vec3Arg inPoint, CollideShapeBodyCollector &ioCollector) :
  1074. mPoint(inPoint),
  1075. mCollector(ioCollector)
  1076. {
  1077. }
  1078. /// Returns true if further processing of the tree should be aborted
  1079. JPH_INLINE bool ShouldAbort() const
  1080. {
  1081. return mCollector.ShouldEarlyOut();
  1082. }
  1083. /// Returns true if this node / body should be visited, false if no hit can be generated
  1084. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1085. {
  1086. return true;
  1087. }
  1088. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1089. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1090. {
  1091. // Test if point overlaps with box
  1092. UVec4 hitting = AABox4VsPoint(mPoint, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1093. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1094. }
  1095. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1096. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1097. {
  1098. // Store potential hit with body
  1099. mCollector.AddHit(inBodyID);
  1100. }
  1101. /// Called when the stack is resized
  1102. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1103. {
  1104. // Nothing to do
  1105. }
  1106. private:
  1107. Vec3 mPoint;
  1108. CollideShapeBodyCollector & mCollector;
  1109. };
  1110. Visitor visitor(inPoint, ioCollector);
  1111. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollidePointStats));
  1112. }
  1113. void QuadTree::CollideOrientedBox(const OrientedBox &inBox, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1114. {
  1115. class Visitor
  1116. {
  1117. public:
  1118. /// Constructor
  1119. JPH_INLINE Visitor(const OrientedBox &inBox, CollideShapeBodyCollector &ioCollector) :
  1120. mBox(inBox),
  1121. mCollector(ioCollector)
  1122. {
  1123. }
  1124. /// Returns true if further processing of the tree should be aborted
  1125. JPH_INLINE bool ShouldAbort() const
  1126. {
  1127. return mCollector.ShouldEarlyOut();
  1128. }
  1129. /// Returns true if this node / body should be visited, false if no hit can be generated
  1130. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1131. {
  1132. return true;
  1133. }
  1134. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1135. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1136. {
  1137. // Test if point overlaps with box
  1138. UVec4 hitting = AABox4VsBox(mBox, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1139. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1140. }
  1141. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1142. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1143. {
  1144. // Store potential hit with body
  1145. mCollector.AddHit(inBodyID);
  1146. }
  1147. /// Called when the stack is resized
  1148. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1149. {
  1150. // Nothing to do
  1151. }
  1152. private:
  1153. OrientedBox mBox;
  1154. CollideShapeBodyCollector & mCollector;
  1155. };
  1156. Visitor visitor(inBox, ioCollector);
  1157. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideOrientedBoxStats));
  1158. }
  1159. void QuadTree::CastAABox(const AABoxCast &inBox, CastShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1160. {
  1161. class Visitor
  1162. {
  1163. public:
  1164. /// Constructor
  1165. JPH_INLINE Visitor(const AABoxCast &inBox, CastShapeBodyCollector &ioCollector) :
  1166. mOrigin(inBox.mBox.GetCenter()),
  1167. mExtent(inBox.mBox.GetExtent()),
  1168. mInvDirection(inBox.mDirection),
  1169. mCollector(ioCollector)
  1170. {
  1171. mFractionStack.resize(cStackSize);
  1172. mFractionStack[0] = -1;
  1173. }
  1174. /// Returns true if further processing of the tree should be aborted
  1175. JPH_INLINE bool ShouldAbort() const
  1176. {
  1177. return mCollector.ShouldEarlyOut();
  1178. }
  1179. /// Returns true if this node / body should be visited, false if no hit can be generated
  1180. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1181. {
  1182. return mFractionStack[inStackTop] < mCollector.GetPositiveEarlyOutFraction();
  1183. }
  1184. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1185. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop)
  1186. {
  1187. // Enlarge them by the casted aabox extents
  1188. Vec4 bounds_min_x = inBoundsMinX, bounds_min_y = inBoundsMinY, bounds_min_z = inBoundsMinZ, bounds_max_x = inBoundsMaxX, bounds_max_y = inBoundsMaxY, bounds_max_z = inBoundsMaxZ;
  1189. AABox4EnlargeWithExtent(mExtent, bounds_min_x, bounds_min_y, bounds_min_z, bounds_max_x, bounds_max_y, bounds_max_z);
  1190. // Test 4 children
  1191. Vec4 fraction = RayAABox4(mOrigin, mInvDirection, bounds_min_x, bounds_min_y, bounds_min_z, bounds_max_x, bounds_max_y, bounds_max_z);
  1192. // Sort so that highest values are first (we want to first process closer hits and we process stack top to bottom)
  1193. return SortReverseAndStore(fraction, mCollector.GetPositiveEarlyOutFraction(), ioChildNodeIDs, &mFractionStack[inStackTop]);
  1194. }
  1195. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1196. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1197. {
  1198. // Store potential hit with body
  1199. BroadPhaseCastResult result { inBodyID, mFractionStack[inStackTop] };
  1200. mCollector.AddHit(result);
  1201. }
  1202. /// Called when the stack is resized, this allows us to resize the fraction stack to match the new stack size
  1203. JPH_INLINE void OnStackResized(size_t inNewStackSize)
  1204. {
  1205. mFractionStack.resize(inNewStackSize);
  1206. }
  1207. private:
  1208. Vec3 mOrigin;
  1209. Vec3 mExtent;
  1210. RayInvDirection mInvDirection;
  1211. CastShapeBodyCollector & mCollector;
  1212. Array<float, STLLocalAllocator<float, cStackSize>> mFractionStack;
  1213. };
  1214. Visitor visitor(inBox, ioCollector);
  1215. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCastAABoxStats));
  1216. }
  1217. void QuadTree::FindCollidingPairs(const BodyVector &inBodies, const BodyID *inActiveBodies, int inNumActiveBodies, float inSpeculativeContactDistance, BodyPairCollector &ioPairCollector, const ObjectLayerPairFilter &inObjectLayerPairFilter) const
  1218. {
  1219. // Note that we don't lock the tree at this point. We know that the tree is not going to be swapped or deleted while finding collision pairs due to the way the jobs are scheduled in the PhysicsSystem::Update.
  1220. // We double check this at the end of the function.
  1221. const RootNode &root_node = GetCurrentRoot();
  1222. JPH_ASSERT(root_node.mIndex != cInvalidNodeIndex);
  1223. // Assert sane input
  1224. JPH_ASSERT(inActiveBodies != nullptr);
  1225. JPH_ASSERT(inNumActiveBodies > 0);
  1226. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack_array;
  1227. node_stack_array.resize(cStackSize);
  1228. NodeID *node_stack = node_stack_array.data();
  1229. // Loop over all active bodies
  1230. for (int b1 = 0; b1 < inNumActiveBodies; ++b1)
  1231. {
  1232. BodyID b1_id = inActiveBodies[b1];
  1233. const Body &body1 = *inBodies[b1_id.GetIndex()];
  1234. JPH_ASSERT(!body1.IsStatic());
  1235. // Expand the bounding box by the speculative contact distance
  1236. AABox bounds1 = body1.GetWorldSpaceBounds();
  1237. bounds1.ExpandBy(Vec3::sReplicate(inSpeculativeContactDistance));
  1238. // Test each body with the tree
  1239. node_stack[0] = root_node.GetNodeID();
  1240. int top = 0;
  1241. do
  1242. {
  1243. // Check if node is a body
  1244. NodeID child_node_id = node_stack[top];
  1245. if (child_node_id.IsBody())
  1246. {
  1247. // Don't collide with self
  1248. BodyID b2_id = child_node_id.GetBodyID();
  1249. if (b1_id != b2_id)
  1250. {
  1251. // Collision between dynamic pairs need to be picked up only once
  1252. const Body &body2 = *inBodies[b2_id.GetIndex()];
  1253. if (inObjectLayerPairFilter.ShouldCollide(body1.GetObjectLayer(), body2.GetObjectLayer())
  1254. && Body::sFindCollidingPairsCanCollide(body1, body2)
  1255. && bounds1.Overlaps(body2.GetWorldSpaceBounds())) // In the broadphase we widen the bounding box when a body moves, do a final check to see if the bounding boxes actually overlap
  1256. {
  1257. // Store potential hit between bodies
  1258. ioPairCollector.AddHit({ b1_id, b2_id });
  1259. }
  1260. }
  1261. }
  1262. else if (child_node_id.IsValid())
  1263. {
  1264. // Process normal node
  1265. const Node &node = mAllocator->Get(child_node_id.GetNodeIndex());
  1266. JPH_ASSERT(IsAligned(&node, JPH_CACHE_LINE_SIZE));
  1267. // Get bounds of 4 children
  1268. Vec4 bounds_minx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinX);
  1269. Vec4 bounds_miny = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinY);
  1270. Vec4 bounds_minz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinZ);
  1271. Vec4 bounds_maxx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxX);
  1272. Vec4 bounds_maxy = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxY);
  1273. Vec4 bounds_maxz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxZ);
  1274. // Test overlap
  1275. UVec4 overlap = AABox4VsBox(bounds1, bounds_minx, bounds_miny, bounds_minz, bounds_maxx, bounds_maxy, bounds_maxz);
  1276. int num_results = overlap.CountTrues();
  1277. if (num_results > 0)
  1278. {
  1279. // Load ids for 4 children
  1280. UVec4 child_ids = UVec4::sLoadInt4Aligned((const uint32 *)&node.mChildNodeID[0]);
  1281. // Sort so that overlaps are first
  1282. child_ids = UVec4::sSort4True(overlap, child_ids);
  1283. // Ensure there is space on the stack (falls back to heap if there isn't)
  1284. if (top + 4 >= (int)node_stack_array.size())
  1285. {
  1286. sQuadTreePerformanceWarning();
  1287. node_stack_array.resize(node_stack_array.size() << 1);
  1288. node_stack = node_stack_array.data();
  1289. }
  1290. // Push them onto the stack
  1291. child_ids.StoreInt4((uint32 *)&node_stack[top]);
  1292. top += num_results;
  1293. }
  1294. }
  1295. --top;
  1296. }
  1297. while (top >= 0);
  1298. }
  1299. // Test that the root node was not swapped while finding collision pairs.
  1300. // This would mean that UpdateFinalize/DiscardOldTree ran during collision detection which should not be possible due to the way the jobs are scheduled.
  1301. JPH_ASSERT(root_node.mIndex != cInvalidNodeIndex);
  1302. JPH_ASSERT(&root_node == &GetCurrentRoot());
  1303. }
  1304. #ifdef JPH_DEBUG
  1305. void QuadTree::ValidateTree(const BodyVector &inBodies, const TrackingVector &inTracking, uint32 inNodeIndex, uint32 inNumExpectedBodies) const
  1306. {
  1307. JPH_PROFILE_FUNCTION();
  1308. // Root should be valid
  1309. JPH_ASSERT(inNodeIndex != cInvalidNodeIndex);
  1310. // To avoid call overhead, create a stack in place
  1311. JPH_SUPPRESS_WARNING_PUSH
  1312. JPH_CLANG_SUPPRESS_WARNING("-Wunused-member-function") // The default constructor of StackEntry is unused when using Jolt's Array class but not when using std::vector
  1313. struct StackEntry
  1314. {
  1315. StackEntry() = default;
  1316. inline StackEntry(uint32 inNodeIndex, uint32 inParentNodeIndex) : mNodeIndex(inNodeIndex), mParentNodeIndex(inParentNodeIndex) { }
  1317. uint32 mNodeIndex;
  1318. uint32 mParentNodeIndex;
  1319. };
  1320. JPH_SUPPRESS_WARNING_POP
  1321. Array<StackEntry, STLLocalAllocator<StackEntry, cStackSize>> stack;
  1322. stack.reserve(cStackSize);
  1323. stack.emplace_back(inNodeIndex, cInvalidNodeIndex);
  1324. uint32 num_bodies = 0;
  1325. do
  1326. {
  1327. // Copy entry from the stack
  1328. StackEntry cur_stack = stack.back();
  1329. stack.pop_back();
  1330. // Validate parent
  1331. const Node &node = mAllocator->Get(cur_stack.mNodeIndex);
  1332. JPH_ASSERT(node.mParentNodeIndex == cur_stack.mParentNodeIndex);
  1333. // Validate that when a parent is not-changed that all of its children are also
  1334. JPH_ASSERT(cur_stack.mParentNodeIndex == cInvalidNodeIndex || mAllocator->Get(cur_stack.mParentNodeIndex).mIsChanged || !node.mIsChanged);
  1335. // Loop children
  1336. for (uint32 i = 0; i < 4; ++i)
  1337. {
  1338. NodeID child_node_id = node.mChildNodeID[i];
  1339. if (child_node_id.IsValid())
  1340. {
  1341. if (child_node_id.IsNode())
  1342. {
  1343. // Child is a node, recurse
  1344. uint32 child_idx = child_node_id.GetNodeIndex();
  1345. stack.emplace_back(child_idx, cur_stack.mNodeIndex);
  1346. // Validate that the bounding box is bigger or equal to the bounds in the tree
  1347. // Bounding box could also be invalid if all children of our child were removed
  1348. AABox child_bounds;
  1349. node.GetChildBounds(i, child_bounds);
  1350. AABox real_child_bounds;
  1351. mAllocator->Get(child_idx).GetNodeBounds(real_child_bounds);
  1352. JPH_ASSERT(child_bounds.Contains(real_child_bounds) || !real_child_bounds.IsValid());
  1353. }
  1354. else
  1355. {
  1356. // Increment number of bodies found
  1357. ++num_bodies;
  1358. // Check if tracker matches position of body
  1359. uint32 node_idx, child_idx;
  1360. GetBodyLocation(inTracking, child_node_id.GetBodyID(), node_idx, child_idx);
  1361. JPH_ASSERT(node_idx == cur_stack.mNodeIndex);
  1362. JPH_ASSERT(child_idx == i);
  1363. // Validate that the body cached bounds still match the actual bounds
  1364. const Body *body = inBodies[child_node_id.GetBodyID().GetIndex()];
  1365. body->ValidateCachedBounds();
  1366. // Validate that the node bounds are bigger or equal to the body bounds
  1367. AABox body_bounds;
  1368. node.GetChildBounds(i, body_bounds);
  1369. JPH_ASSERT(body_bounds.Contains(body->GetWorldSpaceBounds()));
  1370. }
  1371. }
  1372. }
  1373. }
  1374. while (!stack.empty());
  1375. // Check that the amount of bodies in the tree matches our counter
  1376. JPH_ASSERT(num_bodies == inNumExpectedBodies);
  1377. }
  1378. #endif
  1379. #ifdef JPH_DUMP_BROADPHASE_TREE
  1380. void QuadTree::DumpTree(const NodeID &inRoot, const char *inFileNamePrefix) const
  1381. {
  1382. // Open DOT file
  1383. std::ofstream f;
  1384. f.open(StringFormat("%s.dot", inFileNamePrefix).c_str(), std::ofstream::out | std::ofstream::trunc);
  1385. if (!f.is_open())
  1386. return;
  1387. // Write header
  1388. f << "digraph {\n";
  1389. // Iterate the entire tree
  1390. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  1391. node_stack.reserve(cStackSize);
  1392. node_stack.push_back(inRoot);
  1393. JPH_ASSERT(inRoot.IsValid());
  1394. do
  1395. {
  1396. // Check if node is a body
  1397. NodeID node_id = node_stack.back();
  1398. node_stack.pop_back();
  1399. if (node_id.IsBody())
  1400. {
  1401. // Output body
  1402. String body_id = ConvertToString(node_id.GetBodyID().GetIndex());
  1403. f << "body" << body_id << "[label = \"Body " << body_id << "\"]\n";
  1404. }
  1405. else
  1406. {
  1407. // Process normal node
  1408. uint32 node_idx = node_id.GetNodeIndex();
  1409. const Node &node = mAllocator->Get(node_idx);
  1410. // Get bounding box
  1411. AABox bounds;
  1412. node.GetNodeBounds(bounds);
  1413. // Output node
  1414. String node_str = ConvertToString(node_idx);
  1415. f << "node" << node_str << "[label = \"Node " << node_str << "\nVolume: " << ConvertToString(bounds.GetVolume()) << "\" color=" << (node.mIsChanged? "red" : "black") << "]\n";
  1416. // Recurse and get all children
  1417. for (NodeID child_node_id : node.mChildNodeID)
  1418. if (child_node_id.IsValid())
  1419. {
  1420. node_stack.push_back(child_node_id);
  1421. // Output link
  1422. f << "node" << node_str << " -> ";
  1423. if (child_node_id.IsBody())
  1424. f << "body" << ConvertToString(child_node_id.GetBodyID().GetIndex());
  1425. else
  1426. f << "node" << ConvertToString(child_node_id.GetNodeIndex());
  1427. f << "\n";
  1428. }
  1429. }
  1430. }
  1431. while (!node_stack.empty());
  1432. // Finish DOT file
  1433. f << "}\n";
  1434. f.close();
  1435. // Convert to svg file
  1436. String cmd = StringFormat("dot %s.dot -Tsvg -o %s.svg", inFileNamePrefix, inFileNamePrefix);
  1437. system(cmd.c_str());
  1438. }
  1439. #endif // JPH_DUMP_BROADPHASE_TREE
  1440. #ifdef JPH_TRACK_BROADPHASE_STATS
  1441. uint64 QuadTree::GetTicks100Pct(const LayerToStats &inLayer) const
  1442. {
  1443. uint64 total_ticks = 0;
  1444. for (const LayerToStats::value_type &kv : inLayer)
  1445. total_ticks += kv.second.mTotalTicks;
  1446. return total_ticks;
  1447. }
  1448. void QuadTree::ReportStats(const char *inName, const LayerToStats &inLayer, uint64 inTicks100Pct) const
  1449. {
  1450. for (const LayerToStats::value_type &kv : inLayer)
  1451. {
  1452. double total_pct = 100.0 * double(kv.second.mTotalTicks) / double(inTicks100Pct);
  1453. double total_pct_excl_collector = 100.0 * double(kv.second.mTotalTicks - kv.second.mCollectorTicks) / double(inTicks100Pct);
  1454. double hits_reported_vs_bodies_visited = kv.second.mBodiesVisited > 0? 100.0 * double(kv.second.mHitsReported) / double(kv.second.mBodiesVisited) : 100.0;
  1455. double hits_reported_vs_nodes_visited = kv.second.mNodesVisited > 0? double(kv.second.mHitsReported) / double(kv.second.mNodesVisited) : -1.0;
  1456. std::stringstream str;
  1457. str << inName << ", " << kv.first << ", " << mName << ", " << kv.second.mNumQueries << ", " << total_pct << ", " << total_pct_excl_collector << ", " << kv.second.mNodesVisited << ", " << kv.second.mBodiesVisited << ", " << kv.second.mHitsReported << ", " << hits_reported_vs_bodies_visited << ", " << hits_reported_vs_nodes_visited;
  1458. Trace(str.str().c_str());
  1459. }
  1460. }
  1461. uint64 QuadTree::GetTicks100Pct() const
  1462. {
  1463. uint64 total_ticks = 0;
  1464. total_ticks += GetTicks100Pct(mCastRayStats);
  1465. total_ticks += GetTicks100Pct(mCollideAABoxStats);
  1466. total_ticks += GetTicks100Pct(mCollideSphereStats);
  1467. total_ticks += GetTicks100Pct(mCollidePointStats);
  1468. total_ticks += GetTicks100Pct(mCollideOrientedBoxStats);
  1469. total_ticks += GetTicks100Pct(mCastAABoxStats);
  1470. return total_ticks;
  1471. }
  1472. void QuadTree::ReportStats(uint64 inTicks100Pct) const
  1473. {
  1474. unique_lock lock(mStatsMutex);
  1475. ReportStats("RayCast", mCastRayStats, inTicks100Pct);
  1476. ReportStats("CollideAABox", mCollideAABoxStats, inTicks100Pct);
  1477. ReportStats("CollideSphere", mCollideSphereStats, inTicks100Pct);
  1478. ReportStats("CollidePoint", mCollidePointStats, inTicks100Pct);
  1479. ReportStats("CollideOrientedBox", mCollideOrientedBoxStats, inTicks100Pct);
  1480. ReportStats("CastAABox", mCastAABoxStats, inTicks100Pct);
  1481. }
  1482. #endif // JPH_TRACK_BROADPHASE_STATS
  1483. uint QuadTree::GetMaxTreeDepth(const NodeID &inNodeID) const
  1484. {
  1485. // Reached a leaf?
  1486. if (!inNodeID.IsValid() || inNodeID.IsBody())
  1487. return 0;
  1488. // Recurse to children
  1489. uint max_depth = 0;
  1490. const Node &node = mAllocator->Get(inNodeID.GetNodeIndex());
  1491. for (NodeID child_node_id : node.mChildNodeID)
  1492. max_depth = max(max_depth, GetMaxTreeDepth(child_node_id));
  1493. return max_depth + 1;
  1494. }
  1495. JPH_NAMESPACE_END