QuadTree.cpp 60 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768
  1. // Jolt Physics Library (https://github.com/jrouwe/JoltPhysics)
  2. // SPDX-FileCopyrightText: 2021 Jorrit Rouwe
  3. // SPDX-License-Identifier: MIT
  4. #include <Jolt/Jolt.h>
  5. #include <Jolt/Physics/Collision/BroadPhase/QuadTree.h>
  6. #include <Jolt/Physics/Collision/BroadPhase/BroadPhaseQuadTree.h>
  7. #include <Jolt/Physics/Collision/RayCast.h>
  8. #include <Jolt/Physics/Collision/AABoxCast.h>
  9. #include <Jolt/Physics/Collision/CastResult.h>
  10. #include <Jolt/Physics/Collision/SortReverseAndStore.h>
  11. #include <Jolt/Physics/Body/BodyPair.h>
  12. #include <Jolt/Physics/PhysicsLock.h>
  13. #include <Jolt/Geometry/AABox4.h>
  14. #include <Jolt/Geometry/RayAABox.h>
  15. #include <Jolt/Geometry/OrientedBox.h>
  16. #include <Jolt/Core/STLLocalAllocator.h>
  17. #ifdef JPH_DUMP_BROADPHASE_TREE
  18. JPH_SUPPRESS_WARNINGS_STD_BEGIN
  19. #include <fstream>
  20. JPH_SUPPRESS_WARNINGS_STD_END
  21. #endif // JPH_DUMP_BROADPHASE_TREE
  22. JPH_NAMESPACE_BEGIN
  23. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  24. // QuadTree::Node
  25. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  26. QuadTree::Node::Node(bool inIsChanged) :
  27. mIsChanged(inIsChanged)
  28. {
  29. // First reset bounds
  30. Vec4 val = Vec4::sReplicate(cLargeFloat);
  31. val.StoreFloat4((Float4 *)&mBoundsMinX);
  32. val.StoreFloat4((Float4 *)&mBoundsMinY);
  33. val.StoreFloat4((Float4 *)&mBoundsMinZ);
  34. val = Vec4::sReplicate(-cLargeFloat);
  35. val.StoreFloat4((Float4 *)&mBoundsMaxX);
  36. val.StoreFloat4((Float4 *)&mBoundsMaxY);
  37. val.StoreFloat4((Float4 *)&mBoundsMaxZ);
  38. // Reset child node ids
  39. mChildNodeID[0] = NodeID::sInvalid();
  40. mChildNodeID[1] = NodeID::sInvalid();
  41. mChildNodeID[2] = NodeID::sInvalid();
  42. mChildNodeID[3] = NodeID::sInvalid();
  43. }
  44. void QuadTree::Node::GetChildBounds(int inChildIndex, AABox &outBounds) const
  45. {
  46. // Read bounding box in order min -> max
  47. outBounds.mMin = Vec3(mBoundsMinX[inChildIndex], mBoundsMinY[inChildIndex], mBoundsMinZ[inChildIndex]);
  48. outBounds.mMax = Vec3(mBoundsMaxX[inChildIndex], mBoundsMaxY[inChildIndex], mBoundsMaxZ[inChildIndex]);
  49. }
  50. void QuadTree::Node::SetChildBounds(int inChildIndex, const AABox &inBounds)
  51. {
  52. // Bounding boxes provided to the quad tree should never be larger than cLargeFloat because this may trigger overflow exceptions
  53. // e.g. when squaring the value while testing sphere overlaps
  54. JPH_ASSERT(inBounds.mMin.GetX() >= -cLargeFloat && inBounds.mMin.GetX() <= cLargeFloat
  55. && inBounds.mMin.GetY() >= -cLargeFloat && inBounds.mMin.GetY() <= cLargeFloat
  56. && inBounds.mMin.GetZ() >= -cLargeFloat && inBounds.mMin.GetZ() <= cLargeFloat
  57. && inBounds.mMax.GetX() >= -cLargeFloat && inBounds.mMax.GetX() <= cLargeFloat
  58. && inBounds.mMax.GetY() >= -cLargeFloat && inBounds.mMax.GetY() <= cLargeFloat
  59. && inBounds.mMax.GetZ() >= -cLargeFloat && inBounds.mMax.GetZ() <= cLargeFloat);
  60. // Set max first (this keeps the bounding box invalid for reading threads)
  61. mBoundsMaxZ[inChildIndex] = inBounds.mMax.GetZ();
  62. mBoundsMaxY[inChildIndex] = inBounds.mMax.GetY();
  63. mBoundsMaxX[inChildIndex] = inBounds.mMax.GetX();
  64. // Then set min (and make box valid)
  65. mBoundsMinZ[inChildIndex] = inBounds.mMin.GetZ();
  66. mBoundsMinY[inChildIndex] = inBounds.mMin.GetY();
  67. mBoundsMinX[inChildIndex] = inBounds.mMin.GetX(); // Min X becomes valid last
  68. }
  69. void QuadTree::Node::InvalidateChildBounds(int inChildIndex)
  70. {
  71. // First we make the box invalid by setting the min to cLargeFloat
  72. mBoundsMinX[inChildIndex] = cLargeFloat; // Min X becomes invalid first
  73. mBoundsMinY[inChildIndex] = cLargeFloat;
  74. mBoundsMinZ[inChildIndex] = cLargeFloat;
  75. // Then we reset the max values too
  76. mBoundsMaxX[inChildIndex] = -cLargeFloat;
  77. mBoundsMaxY[inChildIndex] = -cLargeFloat;
  78. mBoundsMaxZ[inChildIndex] = -cLargeFloat;
  79. }
  80. void QuadTree::Node::GetNodeBounds(AABox &outBounds) const
  81. {
  82. // Get first child bounds
  83. GetChildBounds(0, outBounds);
  84. // Encapsulate other child bounds
  85. for (int child_idx = 1; child_idx < 4; ++child_idx)
  86. {
  87. AABox tmp;
  88. GetChildBounds(child_idx, tmp);
  89. outBounds.Encapsulate(tmp);
  90. }
  91. }
  92. bool QuadTree::Node::EncapsulateChildBounds(int inChildIndex, const AABox &inBounds)
  93. {
  94. bool changed = AtomicMin(mBoundsMinX[inChildIndex], inBounds.mMin.GetX());
  95. changed |= AtomicMin(mBoundsMinY[inChildIndex], inBounds.mMin.GetY());
  96. changed |= AtomicMin(mBoundsMinZ[inChildIndex], inBounds.mMin.GetZ());
  97. changed |= AtomicMax(mBoundsMaxX[inChildIndex], inBounds.mMax.GetX());
  98. changed |= AtomicMax(mBoundsMaxY[inChildIndex], inBounds.mMax.GetY());
  99. changed |= AtomicMax(mBoundsMaxZ[inChildIndex], inBounds.mMax.GetZ());
  100. return changed;
  101. }
  102. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  103. // QuadTree
  104. ////////////////////////////////////////////////////////////////////////////////////////////////////////
  105. const AABox QuadTree::cInvalidBounds(Vec3::sReplicate(cLargeFloat), Vec3::sReplicate(-cLargeFloat));
  106. static inline void sQuadTreePerformanceWarning()
  107. {
  108. #ifdef JPH_ENABLE_ASSERTS
  109. static atomic<bool> triggered_report { false };
  110. bool expected = false;
  111. if (triggered_report.compare_exchange_strong(expected, true))
  112. Trace("QuadTree: Performance warning: Stack full!\n"
  113. "This must be a very deep tree. Are you batch adding bodies through BodyInterface::AddBodiesPrepare/AddBodiesFinalize?\n"
  114. "If you add lots of bodies through BodyInterface::AddBody you may need to call PhysicsSystem::OptimizeBroadPhase to rebuild the tree.");
  115. #endif
  116. }
  117. void QuadTree::GetBodyLocation(const TrackingVector &inTracking, BodyID inBodyID, uint32 &outNodeIdx, uint32 &outChildIdx) const
  118. {
  119. uint32 body_location = inTracking[inBodyID.GetIndex()].mBodyLocation;
  120. JPH_ASSERT(body_location != Tracking::cInvalidBodyLocation);
  121. outNodeIdx = body_location & 0x3fffffff;
  122. outChildIdx = body_location >> 30;
  123. JPH_ASSERT(mAllocator->Get(outNodeIdx).mChildNodeID[outChildIdx] == inBodyID, "Make sure that the body is in the node where it should be");
  124. }
  125. void QuadTree::SetBodyLocation(TrackingVector &ioTracking, BodyID inBodyID, uint32 inNodeIdx, uint32 inChildIdx) const
  126. {
  127. JPH_ASSERT(inNodeIdx <= 0x3fffffff);
  128. JPH_ASSERT(inChildIdx < 4);
  129. JPH_ASSERT(mAllocator->Get(inNodeIdx).mChildNodeID[inChildIdx] == inBodyID, "Make sure that the body is in the node where it should be");
  130. ioTracking[inBodyID.GetIndex()].mBodyLocation = inNodeIdx + (inChildIdx << 30);
  131. #ifdef JPH_ENABLE_ASSERTS
  132. uint32 v1, v2;
  133. GetBodyLocation(ioTracking, inBodyID, v1, v2);
  134. JPH_ASSERT(v1 == inNodeIdx);
  135. JPH_ASSERT(v2 == inChildIdx);
  136. #endif
  137. }
  138. void QuadTree::sInvalidateBodyLocation(TrackingVector &ioTracking, BodyID inBodyID)
  139. {
  140. ioTracking[inBodyID.GetIndex()].mBodyLocation = Tracking::cInvalidBodyLocation;
  141. }
  142. QuadTree::~QuadTree()
  143. {
  144. // Get rid of any nodes that are still to be freed
  145. DiscardOldTree();
  146. // Get the current root node
  147. const RootNode &root_node = GetCurrentRoot();
  148. // Collect all bodies
  149. Allocator::Batch free_batch;
  150. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  151. node_stack.reserve(cStackSize);
  152. node_stack.push_back(root_node.GetNodeID());
  153. JPH_ASSERT(node_stack.front().IsValid());
  154. if (node_stack.front().IsNode())
  155. {
  156. do
  157. {
  158. // Process node
  159. NodeID node_id = node_stack.back();
  160. node_stack.pop_back();
  161. JPH_ASSERT(!node_id.IsBody());
  162. uint32 node_idx = node_id.GetNodeIndex();
  163. const Node &node = mAllocator->Get(node_idx);
  164. // Recurse and get all child nodes
  165. for (NodeID child_node_id : node.mChildNodeID)
  166. if (child_node_id.IsValid() && child_node_id.IsNode())
  167. node_stack.push_back(child_node_id);
  168. // Mark node to be freed
  169. mAllocator->AddObjectToBatch(free_batch, node_idx);
  170. }
  171. while (!node_stack.empty());
  172. }
  173. // Now free all nodes
  174. mAllocator->DestructObjectBatch(free_batch);
  175. }
  176. uint32 QuadTree::AllocateNode(bool inIsChanged)
  177. {
  178. uint32 index = mAllocator->ConstructObject(inIsChanged);
  179. if (index == Allocator::cInvalidObjectIndex)
  180. {
  181. // If you're running out of nodes, you're most likely adding too many individual bodies to the tree.
  182. // Because of the lock free nature of this tree, any individual body is added to the root of the tree.
  183. // This means that if you add a lot of bodies individually, you will end up with a very deep tree and you'll be
  184. // using a lot more nodes than you would if you added them in batches.
  185. // Please look at BodyInterface::AddBodiesPrepare/AddBodiesFinalize.
  186. //
  187. // If you have created a wrapper around Jolt then a possible solution is to activate a mode during loading
  188. // that queues up any bodies that need to be added. When loading is done, insert all of them as a single batch.
  189. // This could be implemented as a 'start batching' / 'end batching' call to switch in and out of that mode.
  190. // The rest of the code can then just use the regular 'add single body' call on your wrapper and doesn't need to know
  191. // if this mode is active or not.
  192. //
  193. // Calling PhysicsSystem::Update or PhysicsSystem::OptimizeBroadPhase will perform maintenance
  194. // on the tree and will make it efficient again. If you're not calling these functions and are adding a lot of bodies
  195. // you could still be running out of nodes because the tree is not being maintained. If your application is paused,
  196. // consider still calling PhysicsSystem::Update with a delta time of 0 to keep the tree in good shape.
  197. //
  198. // The number of nodes that is allocated is related to the max number of bodies that is passed in PhysicsSystem::Init.
  199. // For normal situations there are plenty of nodes available. If all else fails, you can increase the number of nodes
  200. // by increasing the maximum number of bodies.
  201. Trace("QuadTree: Out of nodes!");
  202. std::abort();
  203. }
  204. return index;
  205. }
  206. void QuadTree::Init(Allocator &inAllocator)
  207. {
  208. // Store allocator
  209. mAllocator = &inAllocator;
  210. // Allocate root node
  211. mRootNode[mRootNodeIndex].mIndex = AllocateNode(false);
  212. }
  213. void QuadTree::DiscardOldTree()
  214. {
  215. // Check if there is an old tree
  216. RootNode &old_root_node = mRootNode[mRootNodeIndex ^ 1];
  217. if (old_root_node.mIndex != cInvalidNodeIndex)
  218. {
  219. // Clear the root
  220. old_root_node.mIndex = cInvalidNodeIndex;
  221. // Now free all old nodes
  222. mAllocator->DestructObjectBatch(mFreeNodeBatch);
  223. // Clear the batch
  224. mFreeNodeBatch = Allocator::Batch();
  225. }
  226. }
  227. AABox QuadTree::GetBounds() const
  228. {
  229. uint32 node_idx = GetCurrentRoot().mIndex;
  230. JPH_ASSERT(node_idx != cInvalidNodeIndex);
  231. const Node &node = mAllocator->Get(node_idx);
  232. AABox bounds;
  233. node.GetNodeBounds(bounds);
  234. return bounds;
  235. }
  236. void QuadTree::UpdatePrepare(const BodyVector &inBodies, TrackingVector &ioTracking, UpdateState &outUpdateState, bool inFullRebuild)
  237. {
  238. #ifdef JPH_ENABLE_ASSERTS
  239. // We only read positions
  240. BodyAccess::Grant grant(BodyAccess::EAccess::None, BodyAccess::EAccess::Read);
  241. #endif
  242. // Assert we have no nodes pending deletion, this means DiscardOldTree wasn't called yet
  243. JPH_ASSERT(mFreeNodeBatch.mNumObjects == 0);
  244. // Mark tree non-dirty
  245. mIsDirty = false;
  246. // Get the current root node
  247. const RootNode &root_node = GetCurrentRoot();
  248. #ifdef JPH_DUMP_BROADPHASE_TREE
  249. DumpTree(root_node.GetNodeID(), StringFormat("%s_PRE", mName).c_str());
  250. #endif
  251. // Assert sane data
  252. #ifdef JPH_DEBUG
  253. ValidateTree(inBodies, ioTracking, root_node.mIndex, mNumBodies);
  254. #endif
  255. // Create space for all body ID's
  256. NodeID *node_ids = mNumBodies > 0? new NodeID [mNumBodies] : nullptr;
  257. NodeID *cur_node_id = node_ids;
  258. // Collect all bodies
  259. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  260. node_stack.reserve(cStackSize);
  261. node_stack.push_back(root_node.GetNodeID());
  262. JPH_ASSERT(node_stack.front().IsValid());
  263. do
  264. {
  265. // Pop node from stack
  266. NodeID node_id = node_stack.back();
  267. node_stack.pop_back();
  268. // Check if node is a body
  269. if (node_id.IsBody())
  270. {
  271. // Validate that we're still in the right layer
  272. #ifdef JPH_ENABLE_ASSERTS
  273. uint32 body_index = node_id.GetBodyID().GetIndex();
  274. JPH_ASSERT(ioTracking[body_index].mObjectLayer == inBodies[body_index]->GetObjectLayer());
  275. #endif
  276. // Store body
  277. *cur_node_id = node_id;
  278. ++cur_node_id;
  279. }
  280. else
  281. {
  282. // Process normal node
  283. uint32 node_idx = node_id.GetNodeIndex();
  284. const Node &node = mAllocator->Get(node_idx);
  285. if (!node.mIsChanged && !inFullRebuild)
  286. {
  287. // Node is unchanged, treat it as a whole
  288. *cur_node_id = node_id;
  289. ++cur_node_id;
  290. }
  291. else
  292. {
  293. // Node is changed, recurse and get all children
  294. for (NodeID child_node_id : node.mChildNodeID)
  295. if (child_node_id.IsValid())
  296. node_stack.push_back(child_node_id);
  297. // Mark node to be freed
  298. mAllocator->AddObjectToBatch(mFreeNodeBatch, node_idx);
  299. }
  300. }
  301. }
  302. while (!node_stack.empty());
  303. // Check that our book keeping matches
  304. uint32 num_node_ids = uint32(cur_node_id - node_ids);
  305. JPH_ASSERT(inFullRebuild? num_node_ids == mNumBodies : num_node_ids <= mNumBodies);
  306. // This will be the new root node id
  307. NodeID root_node_id;
  308. if (num_node_ids > 0)
  309. {
  310. // We mark the first 5 levels (max 1024 nodes) of the newly built tree as 'changed' so that
  311. // those nodes get recreated every time when we rebuild the tree. This balances the amount of
  312. // time we spend on rebuilding the tree ('unchanged' nodes will be put in the new tree as a whole)
  313. // vs the quality of the built tree.
  314. constexpr uint cMaxDepthMarkChanged = 5;
  315. // Build new tree
  316. AABox root_bounds;
  317. root_node_id = BuildTree(inBodies, ioTracking, node_ids, num_node_ids, cMaxDepthMarkChanged, root_bounds);
  318. if (root_node_id.IsBody())
  319. {
  320. // For a single body we need to allocate a new root node
  321. uint32 root_idx = AllocateNode(false);
  322. Node &root = mAllocator->Get(root_idx);
  323. root.SetChildBounds(0, root_bounds);
  324. root.mChildNodeID[0] = root_node_id;
  325. SetBodyLocation(ioTracking, root_node_id.GetBodyID(), root_idx, 0);
  326. root_node_id = NodeID::sFromNodeIndex(root_idx);
  327. }
  328. }
  329. else
  330. {
  331. // Empty tree, create root node
  332. uint32 root_idx = AllocateNode(false);
  333. root_node_id = NodeID::sFromNodeIndex(root_idx);
  334. }
  335. // Delete temporary data
  336. delete [] node_ids;
  337. outUpdateState.mRootNodeID = root_node_id;
  338. }
  339. void QuadTree::UpdateFinalize([[maybe_unused]] const BodyVector &inBodies, [[maybe_unused]] const TrackingVector &inTracking, const UpdateState &inUpdateState)
  340. {
  341. // Tree building is complete, now we switch the old with the new tree
  342. uint32 new_root_idx = mRootNodeIndex ^ 1;
  343. RootNode &new_root_node = mRootNode[new_root_idx];
  344. {
  345. // Note: We don't need to lock here as the old tree stays available so any queries
  346. // that use it can continue using it until DiscardOldTree is called. This slot
  347. // should be empty and unused at this moment.
  348. JPH_ASSERT(new_root_node.mIndex == cInvalidNodeIndex);
  349. new_root_node.mIndex = inUpdateState.mRootNodeID.GetNodeIndex();
  350. }
  351. // All queries that start from now on will use this new tree
  352. mRootNodeIndex = new_root_idx;
  353. #ifdef JPH_DUMP_BROADPHASE_TREE
  354. DumpTree(new_root_node.GetNodeID(), StringFormat("%s_POST", mName).c_str());
  355. #endif
  356. #ifdef JPH_DEBUG
  357. ValidateTree(inBodies, inTracking, new_root_node.mIndex, mNumBodies);
  358. #endif
  359. }
  360. void QuadTree::sPartition(NodeID *ioNodeIDs, Vec3 *ioNodeCenters, int inNumber, int &outMidPoint)
  361. {
  362. // Handle trivial case
  363. if (inNumber <= 4)
  364. {
  365. outMidPoint = inNumber / 2;
  366. return;
  367. }
  368. // Calculate bounding box of box centers
  369. Vec3 center_min = Vec3::sReplicate(cLargeFloat);
  370. Vec3 center_max = Vec3::sReplicate(-cLargeFloat);
  371. for (const Vec3 *c = ioNodeCenters, *c_end = ioNodeCenters + inNumber; c < c_end; ++c)
  372. {
  373. Vec3 center = *c;
  374. center_min = Vec3::sMin(center_min, center);
  375. center_max = Vec3::sMax(center_max, center);
  376. }
  377. // Calculate split plane
  378. int dimension = (center_max - center_min).GetHighestComponentIndex();
  379. float split = 0.5f * (center_min + center_max)[dimension];
  380. // Divide bodies
  381. int start = 0, end = inNumber;
  382. while (start < end)
  383. {
  384. // Search for first element that is on the right hand side of the split plane
  385. while (start < end && ioNodeCenters[start][dimension] < split)
  386. ++start;
  387. // Search for the first element that is on the left hand side of the split plane
  388. while (start < end && ioNodeCenters[end - 1][dimension] >= split)
  389. --end;
  390. if (start < end)
  391. {
  392. // Swap the two elements
  393. std::swap(ioNodeIDs[start], ioNodeIDs[end - 1]);
  394. std::swap(ioNodeCenters[start], ioNodeCenters[end - 1]);
  395. ++start;
  396. --end;
  397. }
  398. }
  399. JPH_ASSERT(start == end);
  400. if (start > 0 && start < inNumber)
  401. {
  402. // Success!
  403. outMidPoint = start;
  404. }
  405. else
  406. {
  407. // Failed to divide bodies
  408. outMidPoint = inNumber / 2;
  409. }
  410. }
  411. void QuadTree::sPartition4(NodeID *ioNodeIDs, Vec3 *ioNodeCenters, int inBegin, int inEnd, int *outSplit)
  412. {
  413. NodeID *node_ids = ioNodeIDs + inBegin;
  414. Vec3 *node_centers = ioNodeCenters + inBegin;
  415. int number = inEnd - inBegin;
  416. // Partition entire range
  417. sPartition(node_ids, node_centers, number, outSplit[2]);
  418. // Partition lower half
  419. sPartition(node_ids, node_centers, outSplit[2], outSplit[1]);
  420. // Partition upper half
  421. sPartition(node_ids + outSplit[2], node_centers + outSplit[2], number - outSplit[2], outSplit[3]);
  422. // Convert to proper range
  423. outSplit[0] = inBegin;
  424. outSplit[1] += inBegin;
  425. outSplit[2] += inBegin;
  426. outSplit[3] += outSplit[2];
  427. outSplit[4] = inEnd;
  428. }
  429. AABox QuadTree::GetNodeOrBodyBounds(const BodyVector &inBodies, NodeID inNodeID) const
  430. {
  431. if (inNodeID.IsNode())
  432. {
  433. // It is a node
  434. uint32 node_idx = inNodeID.GetNodeIndex();
  435. const Node &node = mAllocator->Get(node_idx);
  436. AABox bounds;
  437. node.GetNodeBounds(bounds);
  438. return bounds;
  439. }
  440. else
  441. {
  442. // It is a body
  443. return inBodies[inNodeID.GetBodyID().GetIndex()]->GetWorldSpaceBounds();
  444. }
  445. }
  446. QuadTree::NodeID QuadTree::BuildTree(const BodyVector &inBodies, TrackingVector &ioTracking, NodeID *ioNodeIDs, int inNumber, uint inMaxDepthMarkChanged, AABox &outBounds)
  447. {
  448. // Trivial case: No bodies in tree
  449. if (inNumber == 0)
  450. {
  451. outBounds = cInvalidBounds;
  452. return NodeID::sInvalid();
  453. }
  454. // Trivial case: When we have 1 body or node, return it
  455. if (inNumber == 1)
  456. {
  457. if (ioNodeIDs->IsNode())
  458. {
  459. // When returning an existing node as root, ensure that no parent has been set
  460. Node &node = mAllocator->Get(ioNodeIDs->GetNodeIndex());
  461. node.mParentNodeIndex = cInvalidNodeIndex;
  462. }
  463. outBounds = GetNodeOrBodyBounds(inBodies, *ioNodeIDs);
  464. return *ioNodeIDs;
  465. }
  466. // Calculate centers of all bodies that are to be inserted
  467. Vec3 *centers = new Vec3 [inNumber];
  468. JPH_ASSERT(IsAligned(centers, JPH_VECTOR_ALIGNMENT));
  469. Vec3 *c = centers;
  470. for (const NodeID *n = ioNodeIDs, *n_end = ioNodeIDs + inNumber; n < n_end; ++n, ++c)
  471. *c = GetNodeOrBodyBounds(inBodies, *n).GetCenter();
  472. // The algorithm is a recursive tree build, but to avoid the call overhead we keep track of a stack here
  473. struct StackEntry
  474. {
  475. uint32 mNodeIdx; // Node index of node that is generated
  476. int mChildIdx; // Index of child that we're currently processing
  477. int mSplit[5]; // Indices where the node ID's have been split to form 4 partitions
  478. uint32 mDepth; // Depth of this node in the tree
  479. Vec3 mNodeBoundsMin; // Bounding box of this node, accumulated while iterating over children
  480. Vec3 mNodeBoundsMax;
  481. };
  482. static_assert(sizeof(StackEntry) == 64);
  483. StackEntry stack[cStackSize / 4]; // We don't process 4 at a time in this loop but 1, so the stack can be 4x as small
  484. int top = 0;
  485. // Create root node
  486. stack[0].mNodeIdx = AllocateNode(inMaxDepthMarkChanged > 0);
  487. stack[0].mChildIdx = -1;
  488. stack[0].mDepth = 0;
  489. stack[0].mNodeBoundsMin = Vec3::sReplicate(cLargeFloat);
  490. stack[0].mNodeBoundsMax = Vec3::sReplicate(-cLargeFloat);
  491. sPartition4(ioNodeIDs, centers, 0, inNumber, stack[0].mSplit);
  492. for (;;)
  493. {
  494. StackEntry &cur_stack = stack[top];
  495. // Next child
  496. cur_stack.mChildIdx++;
  497. // Check if all children processed
  498. if (cur_stack.mChildIdx >= 4)
  499. {
  500. // Terminate if there's nothing left to pop
  501. if (top <= 0)
  502. break;
  503. // Add our bounds to our parents bounds
  504. StackEntry &prev_stack = stack[top - 1];
  505. prev_stack.mNodeBoundsMin = Vec3::sMin(prev_stack.mNodeBoundsMin, cur_stack.mNodeBoundsMin);
  506. prev_stack.mNodeBoundsMax = Vec3::sMax(prev_stack.mNodeBoundsMax, cur_stack.mNodeBoundsMax);
  507. // Store parent node
  508. Node &node = mAllocator->Get(cur_stack.mNodeIdx);
  509. node.mParentNodeIndex = prev_stack.mNodeIdx;
  510. // Store this node's properties in the parent node
  511. Node &parent_node = mAllocator->Get(prev_stack.mNodeIdx);
  512. parent_node.mChildNodeID[prev_stack.mChildIdx] = NodeID::sFromNodeIndex(cur_stack.mNodeIdx);
  513. parent_node.SetChildBounds(prev_stack.mChildIdx, AABox(cur_stack.mNodeBoundsMin, cur_stack.mNodeBoundsMax));
  514. // Pop entry from stack
  515. --top;
  516. }
  517. else
  518. {
  519. // Get low and high index to bodies to process
  520. int low = cur_stack.mSplit[cur_stack.mChildIdx];
  521. int high = cur_stack.mSplit[cur_stack.mChildIdx + 1];
  522. int num_bodies = high - low;
  523. if (num_bodies == 1)
  524. {
  525. // Get body info
  526. NodeID child_node_id = ioNodeIDs[low];
  527. AABox bounds = GetNodeOrBodyBounds(inBodies, child_node_id);
  528. // Update node
  529. Node &node = mAllocator->Get(cur_stack.mNodeIdx);
  530. node.mChildNodeID[cur_stack.mChildIdx] = child_node_id;
  531. node.SetChildBounds(cur_stack.mChildIdx, bounds);
  532. if (child_node_id.IsNode())
  533. {
  534. // Update parent for this node
  535. Node &child_node = mAllocator->Get(child_node_id.GetNodeIndex());
  536. child_node.mParentNodeIndex = cur_stack.mNodeIdx;
  537. }
  538. else
  539. {
  540. // Set location in tracking
  541. SetBodyLocation(ioTracking, child_node_id.GetBodyID(), cur_stack.mNodeIdx, cur_stack.mChildIdx);
  542. }
  543. // Encapsulate bounding box in parent
  544. cur_stack.mNodeBoundsMin = Vec3::sMin(cur_stack.mNodeBoundsMin, bounds.mMin);
  545. cur_stack.mNodeBoundsMax = Vec3::sMax(cur_stack.mNodeBoundsMax, bounds.mMax);
  546. }
  547. else if (num_bodies > 1)
  548. {
  549. // Allocate new node
  550. StackEntry &new_stack = stack[++top];
  551. JPH_ASSERT(top < cStackSize / 4);
  552. uint32 next_depth = cur_stack.mDepth + 1;
  553. new_stack.mNodeIdx = AllocateNode(inMaxDepthMarkChanged > next_depth);
  554. new_stack.mChildIdx = -1;
  555. new_stack.mDepth = next_depth;
  556. new_stack.mNodeBoundsMin = Vec3::sReplicate(cLargeFloat);
  557. new_stack.mNodeBoundsMax = Vec3::sReplicate(-cLargeFloat);
  558. sPartition4(ioNodeIDs, centers, low, high, new_stack.mSplit);
  559. }
  560. }
  561. }
  562. // Delete temporary data
  563. delete [] centers;
  564. // Store bounding box of root
  565. outBounds.mMin = stack[0].mNodeBoundsMin;
  566. outBounds.mMax = stack[0].mNodeBoundsMax;
  567. // Return root
  568. return NodeID::sFromNodeIndex(stack[0].mNodeIdx);
  569. }
  570. void QuadTree::MarkNodeAndParentsChanged(uint32 inNodeIndex)
  571. {
  572. uint32 node_idx = inNodeIndex;
  573. do
  574. {
  575. // If node has changed, parent will be too
  576. Node &node = mAllocator->Get(node_idx);
  577. if (node.mIsChanged)
  578. break;
  579. // Mark node as changed
  580. node.mIsChanged = true;
  581. // Get our parent
  582. node_idx = node.mParentNodeIndex;
  583. }
  584. while (node_idx != cInvalidNodeIndex);
  585. }
  586. void QuadTree::WidenAndMarkNodeAndParentsChanged(uint32 inNodeIndex, const AABox &inNewBounds)
  587. {
  588. uint32 node_idx = inNodeIndex;
  589. for (;;)
  590. {
  591. // Mark node as changed
  592. Node &node = mAllocator->Get(node_idx);
  593. node.mIsChanged = true;
  594. // Get our parent
  595. uint32 parent_idx = node.mParentNodeIndex;
  596. if (parent_idx == cInvalidNodeIndex)
  597. break;
  598. // Find which child of the parent we're in
  599. Node &parent_node = mAllocator->Get(parent_idx);
  600. NodeID node_id = NodeID::sFromNodeIndex(node_idx);
  601. int child_idx = -1;
  602. for (int i = 0; i < 4; ++i)
  603. if (parent_node.mChildNodeID[i] == node_id)
  604. {
  605. // Found one, set the node index and child index and update the bounding box too
  606. child_idx = i;
  607. break;
  608. }
  609. JPH_ASSERT(child_idx != -1, "Nodes don't get removed from the tree, we must have found it");
  610. // To avoid any race conditions with other threads we only enlarge bounding boxes
  611. if (!parent_node.EncapsulateChildBounds(child_idx, inNewBounds))
  612. {
  613. // No changes to bounding box, only marking as changed remains to be done
  614. if (!parent_node.mIsChanged)
  615. MarkNodeAndParentsChanged(parent_idx);
  616. break;
  617. }
  618. // Update node index
  619. node_idx = parent_idx;
  620. }
  621. }
  622. bool QuadTree::TryInsertLeaf(TrackingVector &ioTracking, int inNodeIndex, NodeID inLeafID, const AABox &inLeafBounds, int inLeafNumBodies)
  623. {
  624. // Tentatively assign the node as parent
  625. bool leaf_is_node = inLeafID.IsNode();
  626. if (leaf_is_node)
  627. {
  628. uint32 leaf_idx = inLeafID.GetNodeIndex();
  629. mAllocator->Get(leaf_idx).mParentNodeIndex = inNodeIndex;
  630. }
  631. // Fetch node that we're adding to
  632. Node &node = mAllocator->Get(inNodeIndex);
  633. // Find an empty child
  634. for (uint32 child_idx = 0; child_idx < 4; ++child_idx)
  635. if (node.mChildNodeID[child_idx].CompareExchange(NodeID::sInvalid(), inLeafID)) // Check if we can claim it
  636. {
  637. // We managed to add it to the node
  638. // If leaf was a body, we need to update its bookkeeping
  639. if (!leaf_is_node)
  640. SetBodyLocation(ioTracking, inLeafID.GetBodyID(), inNodeIndex, child_idx);
  641. // Now set the bounding box making the child valid for queries
  642. node.SetChildBounds(child_idx, inLeafBounds);
  643. // Widen the bounds for our parents too
  644. WidenAndMarkNodeAndParentsChanged(inNodeIndex, inLeafBounds);
  645. // Update body counter
  646. mNumBodies += inLeafNumBodies;
  647. // And we're done
  648. return true;
  649. }
  650. return false;
  651. }
  652. bool QuadTree::TryCreateNewRoot(TrackingVector &ioTracking, atomic<uint32> &ioRootNodeIndex, NodeID inLeafID, const AABox &inLeafBounds, int inLeafNumBodies)
  653. {
  654. // Fetch old root
  655. uint32 root_idx = ioRootNodeIndex;
  656. Node &root = mAllocator->Get(root_idx);
  657. // Create new root, mark this new root as changed as we're not creating a very efficient tree at this point
  658. uint32 new_root_idx = AllocateNode(true);
  659. Node &new_root = mAllocator->Get(new_root_idx);
  660. // First child is current root, note that since the tree may be modified concurrently we cannot assume that the bounds of our child will be correct so we set a very large bounding box
  661. new_root.mChildNodeID[0] = NodeID::sFromNodeIndex(root_idx);
  662. new_root.SetChildBounds(0, AABox(Vec3::sReplicate(-cLargeFloat), Vec3::sReplicate(cLargeFloat)));
  663. // Second child is new leaf
  664. new_root.mChildNodeID[1] = inLeafID;
  665. new_root.SetChildBounds(1, inLeafBounds);
  666. // Tentatively assign new root as parent
  667. bool leaf_is_node = inLeafID.IsNode();
  668. if (leaf_is_node)
  669. {
  670. uint32 leaf_idx = inLeafID.GetNodeIndex();
  671. mAllocator->Get(leaf_idx).mParentNodeIndex = new_root_idx;
  672. }
  673. // Try to swap it
  674. if (ioRootNodeIndex.compare_exchange_strong(root_idx, new_root_idx))
  675. {
  676. // We managed to set the new root
  677. // If leaf was a body, we need to update its bookkeeping
  678. if (!leaf_is_node)
  679. SetBodyLocation(ioTracking, inLeafID.GetBodyID(), new_root_idx, 1);
  680. // Store parent node for old root
  681. root.mParentNodeIndex = new_root_idx;
  682. // Update body counter
  683. mNumBodies += inLeafNumBodies;
  684. // And we're done
  685. return true;
  686. }
  687. // Failed to swap, someone else must have created a new root, try again
  688. mAllocator->DestructObject(new_root_idx);
  689. return false;
  690. }
  691. void QuadTree::AddBodiesPrepare(const BodyVector &inBodies, TrackingVector &ioTracking, BodyID *ioBodyIDs, int inNumber, AddState &outState)
  692. {
  693. // Assert sane input
  694. JPH_ASSERT(ioBodyIDs != nullptr);
  695. JPH_ASSERT(inNumber > 0);
  696. #ifdef JPH_ENABLE_ASSERTS
  697. // Below we just cast the body ID's to node ID's, check here that that is valid
  698. for (const BodyID *b = ioBodyIDs, *b_end = ioBodyIDs + inNumber; b < b_end; ++b)
  699. NodeID::sFromBodyID(*b);
  700. #endif
  701. // Build subtree for the new bodies, note that we mark all nodes as 'not changed'
  702. // so they will stay together as a batch and will make the tree rebuild cheaper
  703. outState.mLeafID = BuildTree(inBodies, ioTracking, (NodeID *)ioBodyIDs, inNumber, 0, outState.mLeafBounds);
  704. #ifdef JPH_DEBUG
  705. if (outState.mLeafID.IsNode())
  706. ValidateTree(inBodies, ioTracking, outState.mLeafID.GetNodeIndex(), inNumber);
  707. #endif
  708. }
  709. void QuadTree::AddBodiesFinalize(TrackingVector &ioTracking, int inNumberBodies, const AddState &inState)
  710. {
  711. // Assert sane input
  712. JPH_ASSERT(inNumberBodies > 0);
  713. // Mark tree dirty
  714. mIsDirty = true;
  715. // Get the current root node
  716. RootNode &root_node = GetCurrentRoot();
  717. for (;;)
  718. {
  719. // Check if we can insert the body in the root
  720. if (TryInsertLeaf(ioTracking, root_node.mIndex, inState.mLeafID, inState.mLeafBounds, inNumberBodies))
  721. return;
  722. // Check if we can create a new root
  723. if (TryCreateNewRoot(ioTracking, root_node.mIndex, inState.mLeafID, inState.mLeafBounds, inNumberBodies))
  724. return;
  725. }
  726. }
  727. void QuadTree::AddBodiesAbort(TrackingVector &ioTracking, const AddState &inState)
  728. {
  729. // Collect all bodies
  730. Allocator::Batch free_batch;
  731. NodeID node_stack[cStackSize];
  732. node_stack[0] = inState.mLeafID;
  733. JPH_ASSERT(node_stack[0].IsValid());
  734. int top = 0;
  735. do
  736. {
  737. // Check if node is a body
  738. NodeID child_node_id = node_stack[top];
  739. if (child_node_id.IsBody())
  740. {
  741. // Reset location of body
  742. sInvalidateBodyLocation(ioTracking, child_node_id.GetBodyID());
  743. }
  744. else
  745. {
  746. // Process normal node
  747. uint32 node_idx = child_node_id.GetNodeIndex();
  748. const Node &node = mAllocator->Get(node_idx);
  749. for (NodeID sub_child_node_id : node.mChildNodeID)
  750. if (sub_child_node_id.IsValid())
  751. {
  752. JPH_ASSERT(top < cStackSize);
  753. node_stack[top] = sub_child_node_id;
  754. top++;
  755. }
  756. // Mark it to be freed
  757. mAllocator->AddObjectToBatch(free_batch, node_idx);
  758. }
  759. --top;
  760. }
  761. while (top >= 0);
  762. // Now free all nodes as a single batch
  763. mAllocator->DestructObjectBatch(free_batch);
  764. }
  765. void QuadTree::RemoveBodies([[maybe_unused]] const BodyVector &inBodies, TrackingVector &ioTracking, const BodyID *ioBodyIDs, int inNumber)
  766. {
  767. // Assert sane input
  768. JPH_ASSERT(ioBodyIDs != nullptr);
  769. JPH_ASSERT(inNumber > 0);
  770. // Mark tree dirty
  771. mIsDirty = true;
  772. for (const BodyID *cur = ioBodyIDs, *end = ioBodyIDs + inNumber; cur < end; ++cur)
  773. {
  774. // Check if BodyID is correct
  775. JPH_ASSERT(inBodies[cur->GetIndex()]->GetID() == *cur, "Provided BodyID doesn't match BodyID in body manager");
  776. // Get location of body
  777. uint32 node_idx, child_idx;
  778. GetBodyLocation(ioTracking, *cur, node_idx, child_idx);
  779. // First we reset our internal bookkeeping
  780. sInvalidateBodyLocation(ioTracking, *cur);
  781. // Then we make the bounding box invalid, no queries can find this node anymore
  782. Node &node = mAllocator->Get(node_idx);
  783. node.InvalidateChildBounds(child_idx);
  784. // Finally we reset the child id, this makes the node available for adds again
  785. node.mChildNodeID[child_idx] = NodeID::sInvalid();
  786. // We don't need to bubble up our bounding box changes to our parents since we never make volumes smaller, only bigger
  787. // But we do need to mark the nodes as changed so that the tree can be rebuilt
  788. MarkNodeAndParentsChanged(node_idx);
  789. }
  790. mNumBodies -= inNumber;
  791. }
  792. void QuadTree::NotifyBodiesAABBChanged(const BodyVector &inBodies, const TrackingVector &inTracking, const BodyID *ioBodyIDs, int inNumber)
  793. {
  794. // Assert sane input
  795. JPH_ASSERT(ioBodyIDs != nullptr);
  796. JPH_ASSERT(inNumber > 0);
  797. for (const BodyID *cur = ioBodyIDs, *end = ioBodyIDs + inNumber; cur < end; ++cur)
  798. {
  799. // Check if BodyID is correct
  800. const Body *body = inBodies[cur->GetIndex()];
  801. JPH_ASSERT(body->GetID() == *cur, "Provided BodyID doesn't match BodyID in body manager");
  802. // Get the new bounding box
  803. const AABox &new_bounds = body->GetWorldSpaceBounds();
  804. // Get location of body
  805. uint32 node_idx, child_idx;
  806. GetBodyLocation(inTracking, *cur, node_idx, child_idx);
  807. // Widen bounds for node
  808. Node &node = mAllocator->Get(node_idx);
  809. if (node.EncapsulateChildBounds(child_idx, new_bounds))
  810. {
  811. // Mark tree dirty
  812. mIsDirty = true;
  813. // If bounds changed, widen the bounds for our parents too
  814. WidenAndMarkNodeAndParentsChanged(node_idx, new_bounds);
  815. }
  816. }
  817. }
  818. template <class Visitor>
  819. JPH_INLINE void QuadTree::WalkTree(const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking, Visitor &ioVisitor JPH_IF_TRACK_BROADPHASE_STATS(, LayerToStats &ioStats)) const
  820. {
  821. // Get the root
  822. const RootNode &root_node = GetCurrentRoot();
  823. #ifdef JPH_TRACK_BROADPHASE_STATS
  824. // Start tracking stats
  825. int bodies_visited = 0;
  826. int hits_collected = 0;
  827. int nodes_visited = 0;
  828. uint64 collector_ticks = 0;
  829. uint64 start = GetProcessorTickCount();
  830. #endif // JPH_TRACK_BROADPHASE_STATS
  831. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack_array;
  832. node_stack_array.resize(cStackSize);
  833. NodeID *node_stack = node_stack_array.data();
  834. node_stack[0] = root_node.GetNodeID();
  835. int top = 0;
  836. do
  837. {
  838. // Check if node is a body
  839. NodeID child_node_id = node_stack[top];
  840. if (child_node_id.IsBody())
  841. {
  842. // Track amount of bodies visited
  843. JPH_IF_TRACK_BROADPHASE_STATS(++bodies_visited;)
  844. BodyID body_id = child_node_id.GetBodyID();
  845. ObjectLayer object_layer = inTracking[body_id.GetIndex()].mObjectLayer; // We're not taking a lock on the body, so it may be in the process of being removed so check if the object layer is invalid
  846. if (object_layer != cObjectLayerInvalid && inObjectLayerFilter.ShouldCollide(object_layer))
  847. {
  848. JPH_PROFILE("VisitBody");
  849. // Track amount of hits
  850. JPH_IF_TRACK_BROADPHASE_STATS(++hits_collected;)
  851. // Start track time the collector takes
  852. JPH_IF_TRACK_BROADPHASE_STATS(uint64 collector_start = GetProcessorTickCount();)
  853. // We found a body we collide with, call our visitor
  854. ioVisitor.VisitBody(body_id, top);
  855. // End track time the collector takes
  856. JPH_IF_TRACK_BROADPHASE_STATS(collector_ticks += GetProcessorTickCount() - collector_start;)
  857. // Check if we're done
  858. if (ioVisitor.ShouldAbort())
  859. break;
  860. }
  861. }
  862. else if (child_node_id.IsValid())
  863. {
  864. JPH_IF_TRACK_BROADPHASE_STATS(++nodes_visited;)
  865. // Ensure there is space on the stack (falls back to heap if there isn't)
  866. if (top + 4 >= (int)node_stack_array.size())
  867. {
  868. sQuadTreePerformanceWarning();
  869. node_stack_array.resize(node_stack_array.size() << 1);
  870. node_stack = node_stack_array.data();
  871. ioVisitor.OnStackResized(node_stack_array.size());
  872. }
  873. // Process normal node
  874. const Node &node = mAllocator->Get(child_node_id.GetNodeIndex());
  875. JPH_ASSERT(IsAligned(&node, JPH_CACHE_LINE_SIZE));
  876. // Load bounds of 4 children
  877. Vec4 bounds_minx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinX);
  878. Vec4 bounds_miny = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinY);
  879. Vec4 bounds_minz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinZ);
  880. Vec4 bounds_maxx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxX);
  881. Vec4 bounds_maxy = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxY);
  882. Vec4 bounds_maxz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxZ);
  883. // Load ids for 4 children
  884. UVec4 child_ids = UVec4::sLoadInt4Aligned((const uint32 *)&node.mChildNodeID[0]);
  885. // Check which sub nodes to visit
  886. int num_results = ioVisitor.VisitNodes(bounds_minx, bounds_miny, bounds_minz, bounds_maxx, bounds_maxy, bounds_maxz, child_ids, top);
  887. child_ids.StoreInt4((uint32 *)&node_stack[top]);
  888. top += num_results;
  889. }
  890. // Fetch next node until we find one that the visitor wants to see
  891. do
  892. --top;
  893. while (top >= 0 && !ioVisitor.ShouldVisitNode(top));
  894. }
  895. while (top >= 0);
  896. #ifdef JPH_TRACK_BROADPHASE_STATS
  897. // Calculate total time the broadphase walk took
  898. uint64 total_ticks = GetProcessorTickCount() - start;
  899. // Update stats under lock protection (slow!)
  900. {
  901. unique_lock lock(mStatsMutex);
  902. Stat &s = ioStats[inObjectLayerFilter.GetDescription()];
  903. s.mNumQueries++;
  904. s.mNodesVisited += nodes_visited;
  905. s.mBodiesVisited += bodies_visited;
  906. s.mHitsReported += hits_collected;
  907. s.mTotalTicks += total_ticks;
  908. s.mCollectorTicks += collector_ticks;
  909. }
  910. #endif // JPH_TRACK_BROADPHASE_STATS
  911. }
  912. void QuadTree::CastRay(const RayCast &inRay, RayCastBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  913. {
  914. class Visitor
  915. {
  916. public:
  917. /// Constructor
  918. JPH_INLINE Visitor(const RayCast &inRay, RayCastBodyCollector &ioCollector) :
  919. mOrigin(inRay.mOrigin),
  920. mInvDirection(inRay.mDirection),
  921. mCollector(ioCollector)
  922. {
  923. mFractionStack.resize(cStackSize);
  924. mFractionStack[0] = -1;
  925. }
  926. /// Returns true if further processing of the tree should be aborted
  927. JPH_INLINE bool ShouldAbort() const
  928. {
  929. return mCollector.ShouldEarlyOut();
  930. }
  931. /// Returns true if this node / body should be visited, false if no hit can be generated
  932. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  933. {
  934. return mFractionStack[inStackTop] < mCollector.GetEarlyOutFraction();
  935. }
  936. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  937. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop)
  938. {
  939. // Test the ray against 4 bounding boxes
  940. Vec4 fraction = RayAABox4(mOrigin, mInvDirection, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  941. // Sort so that highest values are first (we want to first process closer hits and we process stack top to bottom)
  942. return SortReverseAndStore(fraction, mCollector.GetEarlyOutFraction(), ioChildNodeIDs, &mFractionStack[inStackTop]);
  943. }
  944. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  945. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  946. {
  947. // Store potential hit with body
  948. BroadPhaseCastResult result { inBodyID, mFractionStack[inStackTop] };
  949. mCollector.AddHit(result);
  950. }
  951. /// Called when the stack is resized, this allows us to resize the fraction stack to match the new stack size
  952. JPH_INLINE void OnStackResized(size_t inNewStackSize)
  953. {
  954. mFractionStack.resize(inNewStackSize);
  955. }
  956. private:
  957. Vec3 mOrigin;
  958. RayInvDirection mInvDirection;
  959. RayCastBodyCollector & mCollector;
  960. Array<float, STLLocalAllocator<float, cStackSize>> mFractionStack;
  961. };
  962. Visitor visitor(inRay, ioCollector);
  963. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCastRayStats));
  964. }
  965. void QuadTree::CollideAABox(const AABox &inBox, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  966. {
  967. class Visitor
  968. {
  969. public:
  970. /// Constructor
  971. JPH_INLINE Visitor(const AABox &inBox, CollideShapeBodyCollector &ioCollector) :
  972. mBox(inBox),
  973. mCollector(ioCollector)
  974. {
  975. }
  976. /// Returns true if further processing of the tree should be aborted
  977. JPH_INLINE bool ShouldAbort() const
  978. {
  979. return mCollector.ShouldEarlyOut();
  980. }
  981. /// Returns true if this node / body should be visited, false if no hit can be generated
  982. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  983. {
  984. return true;
  985. }
  986. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  987. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  988. {
  989. // Test the box vs 4 boxes
  990. UVec4 hitting = AABox4VsBox(mBox, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  991. return CountAndSortTrues(hitting, ioChildNodeIDs);
  992. }
  993. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  994. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  995. {
  996. // Store potential hit with body
  997. mCollector.AddHit(inBodyID);
  998. }
  999. /// Called when the stack is resized
  1000. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1001. {
  1002. // Nothing to do
  1003. }
  1004. private:
  1005. const AABox & mBox;
  1006. CollideShapeBodyCollector & mCollector;
  1007. };
  1008. Visitor visitor(inBox, ioCollector);
  1009. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideAABoxStats));
  1010. }
  1011. void QuadTree::CollideSphere(Vec3Arg inCenter, float inRadius, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1012. {
  1013. class Visitor
  1014. {
  1015. public:
  1016. /// Constructor
  1017. JPH_INLINE Visitor(Vec3Arg inCenter, float inRadius, CollideShapeBodyCollector &ioCollector) :
  1018. mCenterX(inCenter.SplatX()),
  1019. mCenterY(inCenter.SplatY()),
  1020. mCenterZ(inCenter.SplatZ()),
  1021. mRadiusSq(Vec4::sReplicate(Square(inRadius))),
  1022. mCollector(ioCollector)
  1023. {
  1024. }
  1025. /// Returns true if further processing of the tree should be aborted
  1026. JPH_INLINE bool ShouldAbort() const
  1027. {
  1028. return mCollector.ShouldEarlyOut();
  1029. }
  1030. /// Returns true if this node / body should be visited, false if no hit can be generated
  1031. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1032. {
  1033. return true;
  1034. }
  1035. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1036. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1037. {
  1038. // Test 4 boxes vs sphere
  1039. UVec4 hitting = AABox4VsSphere(mCenterX, mCenterY, mCenterZ, mRadiusSq, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1040. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1041. }
  1042. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1043. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1044. {
  1045. // Store potential hit with body
  1046. mCollector.AddHit(inBodyID);
  1047. }
  1048. /// Called when the stack is resized
  1049. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1050. {
  1051. // Nothing to do
  1052. }
  1053. private:
  1054. Vec4 mCenterX;
  1055. Vec4 mCenterY;
  1056. Vec4 mCenterZ;
  1057. Vec4 mRadiusSq;
  1058. CollideShapeBodyCollector & mCollector;
  1059. };
  1060. Visitor visitor(inCenter, inRadius, ioCollector);
  1061. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideSphereStats));
  1062. }
  1063. void QuadTree::CollidePoint(Vec3Arg inPoint, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1064. {
  1065. class Visitor
  1066. {
  1067. public:
  1068. /// Constructor
  1069. JPH_INLINE Visitor(Vec3Arg inPoint, CollideShapeBodyCollector &ioCollector) :
  1070. mPoint(inPoint),
  1071. mCollector(ioCollector)
  1072. {
  1073. }
  1074. /// Returns true if further processing of the tree should be aborted
  1075. JPH_INLINE bool ShouldAbort() const
  1076. {
  1077. return mCollector.ShouldEarlyOut();
  1078. }
  1079. /// Returns true if this node / body should be visited, false if no hit can be generated
  1080. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1081. {
  1082. return true;
  1083. }
  1084. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1085. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1086. {
  1087. // Test if point overlaps with box
  1088. UVec4 hitting = AABox4VsPoint(mPoint, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1089. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1090. }
  1091. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1092. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1093. {
  1094. // Store potential hit with body
  1095. mCollector.AddHit(inBodyID);
  1096. }
  1097. /// Called when the stack is resized
  1098. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1099. {
  1100. // Nothing to do
  1101. }
  1102. private:
  1103. Vec3 mPoint;
  1104. CollideShapeBodyCollector & mCollector;
  1105. };
  1106. Visitor visitor(inPoint, ioCollector);
  1107. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollidePointStats));
  1108. }
  1109. void QuadTree::CollideOrientedBox(const OrientedBox &inBox, CollideShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1110. {
  1111. class Visitor
  1112. {
  1113. public:
  1114. /// Constructor
  1115. JPH_INLINE Visitor(const OrientedBox &inBox, CollideShapeBodyCollector &ioCollector) :
  1116. mBox(inBox),
  1117. mCollector(ioCollector)
  1118. {
  1119. }
  1120. /// Returns true if further processing of the tree should be aborted
  1121. JPH_INLINE bool ShouldAbort() const
  1122. {
  1123. return mCollector.ShouldEarlyOut();
  1124. }
  1125. /// Returns true if this node / body should be visited, false if no hit can be generated
  1126. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1127. {
  1128. return true;
  1129. }
  1130. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1131. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop) const
  1132. {
  1133. // Test if point overlaps with box
  1134. UVec4 hitting = AABox4VsBox(mBox, inBoundsMinX, inBoundsMinY, inBoundsMinZ, inBoundsMaxX, inBoundsMaxY, inBoundsMaxZ);
  1135. return CountAndSortTrues(hitting, ioChildNodeIDs);
  1136. }
  1137. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1138. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1139. {
  1140. // Store potential hit with body
  1141. mCollector.AddHit(inBodyID);
  1142. }
  1143. /// Called when the stack is resized
  1144. JPH_INLINE void OnStackResized([[maybe_unused]] size_t inNewStackSize) const
  1145. {
  1146. // Nothing to do
  1147. }
  1148. private:
  1149. OrientedBox mBox;
  1150. CollideShapeBodyCollector & mCollector;
  1151. };
  1152. Visitor visitor(inBox, ioCollector);
  1153. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCollideOrientedBoxStats));
  1154. }
  1155. void QuadTree::CastAABox(const AABoxCast &inBox, CastShapeBodyCollector &ioCollector, const ObjectLayerFilter &inObjectLayerFilter, const TrackingVector &inTracking) const
  1156. {
  1157. class Visitor
  1158. {
  1159. public:
  1160. /// Constructor
  1161. JPH_INLINE Visitor(const AABoxCast &inBox, CastShapeBodyCollector &ioCollector) :
  1162. mOrigin(inBox.mBox.GetCenter()),
  1163. mExtent(inBox.mBox.GetExtent()),
  1164. mInvDirection(inBox.mDirection),
  1165. mCollector(ioCollector)
  1166. {
  1167. mFractionStack.resize(cStackSize);
  1168. mFractionStack[0] = -1;
  1169. }
  1170. /// Returns true if further processing of the tree should be aborted
  1171. JPH_INLINE bool ShouldAbort() const
  1172. {
  1173. return mCollector.ShouldEarlyOut();
  1174. }
  1175. /// Returns true if this node / body should be visited, false if no hit can be generated
  1176. JPH_INLINE bool ShouldVisitNode(int inStackTop) const
  1177. {
  1178. return mFractionStack[inStackTop] < mCollector.GetPositiveEarlyOutFraction();
  1179. }
  1180. /// Visit nodes, returns number of hits found and sorts ioChildNodeIDs so that they are at the beginning of the vector.
  1181. JPH_INLINE int VisitNodes(Vec4Arg inBoundsMinX, Vec4Arg inBoundsMinY, Vec4Arg inBoundsMinZ, Vec4Arg inBoundsMaxX, Vec4Arg inBoundsMaxY, Vec4Arg inBoundsMaxZ, UVec4 &ioChildNodeIDs, int inStackTop)
  1182. {
  1183. // Enlarge them by the casted aabox extents
  1184. Vec4 bounds_min_x = inBoundsMinX, bounds_min_y = inBoundsMinY, bounds_min_z = inBoundsMinZ, bounds_max_x = inBoundsMaxX, bounds_max_y = inBoundsMaxY, bounds_max_z = inBoundsMaxZ;
  1185. AABox4EnlargeWithExtent(mExtent, bounds_min_x, bounds_min_y, bounds_min_z, bounds_max_x, bounds_max_y, bounds_max_z);
  1186. // Test 4 children
  1187. Vec4 fraction = RayAABox4(mOrigin, mInvDirection, bounds_min_x, bounds_min_y, bounds_min_z, bounds_max_x, bounds_max_y, bounds_max_z);
  1188. // Sort so that highest values are first (we want to first process closer hits and we process stack top to bottom)
  1189. return SortReverseAndStore(fraction, mCollector.GetPositiveEarlyOutFraction(), ioChildNodeIDs, &mFractionStack[inStackTop]);
  1190. }
  1191. /// Visit a body, returns false if the algorithm should terminate because no hits can be generated anymore
  1192. JPH_INLINE void VisitBody(const BodyID &inBodyID, int inStackTop)
  1193. {
  1194. // Store potential hit with body
  1195. BroadPhaseCastResult result { inBodyID, mFractionStack[inStackTop] };
  1196. mCollector.AddHit(result);
  1197. }
  1198. /// Called when the stack is resized, this allows us to resize the fraction stack to match the new stack size
  1199. JPH_INLINE void OnStackResized(size_t inNewStackSize)
  1200. {
  1201. mFractionStack.resize(inNewStackSize);
  1202. }
  1203. private:
  1204. Vec3 mOrigin;
  1205. Vec3 mExtent;
  1206. RayInvDirection mInvDirection;
  1207. CastShapeBodyCollector & mCollector;
  1208. Array<float, STLLocalAllocator<float, cStackSize>> mFractionStack;
  1209. };
  1210. Visitor visitor(inBox, ioCollector);
  1211. WalkTree(inObjectLayerFilter, inTracking, visitor JPH_IF_TRACK_BROADPHASE_STATS(, mCastAABoxStats));
  1212. }
  1213. void QuadTree::FindCollidingPairs(const BodyVector &inBodies, const BodyID *inActiveBodies, int inNumActiveBodies, float inSpeculativeContactDistance, BodyPairCollector &ioPairCollector, const ObjectLayerPairFilter &inObjectLayerPairFilter) const
  1214. {
  1215. // Note that we don't lock the tree at this point. We know that the tree is not going to be swapped or deleted while finding collision pairs due to the way the jobs are scheduled in the PhysicsSystem::Update.
  1216. // We double check this at the end of the function.
  1217. const RootNode &root_node = GetCurrentRoot();
  1218. JPH_ASSERT(root_node.mIndex != cInvalidNodeIndex);
  1219. // Assert sane input
  1220. JPH_ASSERT(inActiveBodies != nullptr);
  1221. JPH_ASSERT(inNumActiveBodies > 0);
  1222. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack_array;
  1223. node_stack_array.resize(cStackSize);
  1224. NodeID *node_stack = node_stack_array.data();
  1225. // Loop over all active bodies
  1226. for (int b1 = 0; b1 < inNumActiveBodies; ++b1)
  1227. {
  1228. BodyID b1_id = inActiveBodies[b1];
  1229. const Body &body1 = *inBodies[b1_id.GetIndex()];
  1230. JPH_ASSERT(!body1.IsStatic());
  1231. #ifdef JPH_TRACK_SIMULATION_STATS
  1232. uint64 start_tick = GetProcessorTickCount();
  1233. #endif
  1234. // Expand the bounding box by the speculative contact distance
  1235. AABox bounds1 = body1.GetWorldSpaceBounds();
  1236. bounds1.ExpandBy(Vec3::sReplicate(inSpeculativeContactDistance));
  1237. // Test each body with the tree
  1238. node_stack[0] = root_node.GetNodeID();
  1239. int top = 0;
  1240. do
  1241. {
  1242. // Check if node is a body
  1243. NodeID child_node_id = node_stack[top];
  1244. if (child_node_id.IsBody())
  1245. {
  1246. // Don't collide with self
  1247. BodyID b2_id = child_node_id.GetBodyID();
  1248. if (b1_id != b2_id)
  1249. {
  1250. // Collision between dynamic pairs need to be picked up only once
  1251. const Body &body2 = *inBodies[b2_id.GetIndex()];
  1252. if (inObjectLayerPairFilter.ShouldCollide(body1.GetObjectLayer(), body2.GetObjectLayer())
  1253. && Body::sFindCollidingPairsCanCollide(body1, body2)
  1254. && bounds1.Overlaps(body2.GetWorldSpaceBounds())) // In the broadphase we widen the bounding box when a body moves, do a final check to see if the bounding boxes actually overlap
  1255. {
  1256. // Store potential hit between bodies
  1257. ioPairCollector.AddHit({ b1_id, b2_id });
  1258. }
  1259. }
  1260. }
  1261. else if (child_node_id.IsValid())
  1262. {
  1263. // Process normal node
  1264. const Node &node = mAllocator->Get(child_node_id.GetNodeIndex());
  1265. JPH_ASSERT(IsAligned(&node, JPH_CACHE_LINE_SIZE));
  1266. // Get bounds of 4 children
  1267. Vec4 bounds_minx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinX);
  1268. Vec4 bounds_miny = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinY);
  1269. Vec4 bounds_minz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMinZ);
  1270. Vec4 bounds_maxx = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxX);
  1271. Vec4 bounds_maxy = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxY);
  1272. Vec4 bounds_maxz = Vec4::sLoadFloat4Aligned((const Float4 *)&node.mBoundsMaxZ);
  1273. // Test overlap
  1274. UVec4 overlap = AABox4VsBox(bounds1, bounds_minx, bounds_miny, bounds_minz, bounds_maxx, bounds_maxy, bounds_maxz);
  1275. int num_results = overlap.CountTrues();
  1276. if (num_results > 0)
  1277. {
  1278. // Load ids for 4 children
  1279. UVec4 child_ids = UVec4::sLoadInt4Aligned((const uint32 *)&node.mChildNodeID[0]);
  1280. // Sort so that overlaps are first
  1281. child_ids = UVec4::sSort4True(overlap, child_ids);
  1282. // Ensure there is space on the stack (falls back to heap if there isn't)
  1283. if (top + 4 >= (int)node_stack_array.size())
  1284. {
  1285. sQuadTreePerformanceWarning();
  1286. node_stack_array.resize(node_stack_array.size() << 1);
  1287. node_stack = node_stack_array.data();
  1288. }
  1289. // Push them onto the stack
  1290. child_ids.StoreInt4((uint32 *)&node_stack[top]);
  1291. top += num_results;
  1292. }
  1293. }
  1294. --top;
  1295. }
  1296. while (top >= 0);
  1297. #ifdef JPH_TRACK_SIMULATION_STATS
  1298. uint64 num_ticks = GetProcessorTickCount() - start_tick;
  1299. const_cast<MotionProperties::SimulationStats &>(body1.GetMotionPropertiesUnchecked()->GetSimulationStats()).mBroadPhaseTicks += num_ticks;
  1300. #endif
  1301. }
  1302. // Test that the root node was not swapped while finding collision pairs.
  1303. // This would mean that UpdateFinalize/DiscardOldTree ran during collision detection which should not be possible due to the way the jobs are scheduled.
  1304. JPH_ASSERT(root_node.mIndex != cInvalidNodeIndex);
  1305. JPH_ASSERT(&root_node == &GetCurrentRoot());
  1306. }
  1307. #ifdef JPH_DEBUG
  1308. void QuadTree::ValidateTree(const BodyVector &inBodies, const TrackingVector &inTracking, uint32 inNodeIndex, uint32 inNumExpectedBodies) const
  1309. {
  1310. JPH_PROFILE_FUNCTION();
  1311. // Root should be valid
  1312. JPH_ASSERT(inNodeIndex != cInvalidNodeIndex);
  1313. // To avoid call overhead, create a stack in place
  1314. JPH_SUPPRESS_WARNING_PUSH
  1315. JPH_CLANG_SUPPRESS_WARNING("-Wunused-member-function") // The default constructor of StackEntry is unused when using Jolt's Array class but not when using std::vector
  1316. struct StackEntry
  1317. {
  1318. StackEntry() = default;
  1319. inline StackEntry(uint32 inNodeIndex, uint32 inParentNodeIndex) : mNodeIndex(inNodeIndex), mParentNodeIndex(inParentNodeIndex) { }
  1320. uint32 mNodeIndex;
  1321. uint32 mParentNodeIndex;
  1322. };
  1323. JPH_SUPPRESS_WARNING_POP
  1324. Array<StackEntry, STLLocalAllocator<StackEntry, cStackSize>> stack;
  1325. stack.reserve(cStackSize);
  1326. stack.emplace_back(inNodeIndex, cInvalidNodeIndex);
  1327. uint32 num_bodies = 0;
  1328. do
  1329. {
  1330. // Copy entry from the stack
  1331. StackEntry cur_stack = stack.back();
  1332. stack.pop_back();
  1333. // Validate parent
  1334. const Node &node = mAllocator->Get(cur_stack.mNodeIndex);
  1335. JPH_ASSERT(node.mParentNodeIndex == cur_stack.mParentNodeIndex);
  1336. // Validate that when a parent is not-changed that all of its children are also
  1337. JPH_ASSERT(cur_stack.mParentNodeIndex == cInvalidNodeIndex || mAllocator->Get(cur_stack.mParentNodeIndex).mIsChanged || !node.mIsChanged);
  1338. // Loop children
  1339. for (uint32 i = 0; i < 4; ++i)
  1340. {
  1341. NodeID child_node_id = node.mChildNodeID[i];
  1342. if (child_node_id.IsValid())
  1343. {
  1344. if (child_node_id.IsNode())
  1345. {
  1346. // Child is a node, recurse
  1347. uint32 child_idx = child_node_id.GetNodeIndex();
  1348. stack.emplace_back(child_idx, cur_stack.mNodeIndex);
  1349. // Validate that the bounding box is bigger or equal to the bounds in the tree
  1350. // Bounding box could also be invalid if all children of our child were removed
  1351. AABox child_bounds;
  1352. node.GetChildBounds(i, child_bounds);
  1353. AABox real_child_bounds;
  1354. mAllocator->Get(child_idx).GetNodeBounds(real_child_bounds);
  1355. JPH_ASSERT(child_bounds.Contains(real_child_bounds) || !real_child_bounds.IsValid());
  1356. }
  1357. else
  1358. {
  1359. // Increment number of bodies found
  1360. ++num_bodies;
  1361. // Check if tracker matches position of body
  1362. uint32 node_idx, child_idx;
  1363. GetBodyLocation(inTracking, child_node_id.GetBodyID(), node_idx, child_idx);
  1364. JPH_ASSERT(node_idx == cur_stack.mNodeIndex);
  1365. JPH_ASSERT(child_idx == i);
  1366. // Validate that the body cached bounds still match the actual bounds
  1367. const Body *body = inBodies[child_node_id.GetBodyID().GetIndex()];
  1368. body->ValidateCachedBounds();
  1369. // Validate that the node bounds are bigger or equal to the body bounds
  1370. AABox body_bounds;
  1371. node.GetChildBounds(i, body_bounds);
  1372. JPH_ASSERT(body_bounds.Contains(body->GetWorldSpaceBounds()));
  1373. }
  1374. }
  1375. }
  1376. }
  1377. while (!stack.empty());
  1378. // Check that the amount of bodies in the tree matches our counter
  1379. JPH_ASSERT(num_bodies == inNumExpectedBodies);
  1380. }
  1381. #endif
  1382. #ifdef JPH_DUMP_BROADPHASE_TREE
  1383. void QuadTree::DumpTree(const NodeID &inRoot, const char *inFileNamePrefix) const
  1384. {
  1385. // Open DOT file
  1386. std::ofstream f;
  1387. f.open(StringFormat("%s.dot", inFileNamePrefix).c_str(), std::ofstream::out | std::ofstream::trunc);
  1388. if (!f.is_open())
  1389. return;
  1390. // Write header
  1391. f << "digraph {\n";
  1392. // Iterate the entire tree
  1393. Array<NodeID, STLLocalAllocator<NodeID, cStackSize>> node_stack;
  1394. node_stack.reserve(cStackSize);
  1395. node_stack.push_back(inRoot);
  1396. JPH_ASSERT(inRoot.IsValid());
  1397. do
  1398. {
  1399. // Check if node is a body
  1400. NodeID node_id = node_stack.back();
  1401. node_stack.pop_back();
  1402. if (node_id.IsBody())
  1403. {
  1404. // Output body
  1405. String body_id = ConvertToString(node_id.GetBodyID().GetIndex());
  1406. f << "body" << body_id << "[label = \"Body " << body_id << "\"]\n";
  1407. }
  1408. else
  1409. {
  1410. // Process normal node
  1411. uint32 node_idx = node_id.GetNodeIndex();
  1412. const Node &node = mAllocator->Get(node_idx);
  1413. // Get bounding box
  1414. AABox bounds;
  1415. node.GetNodeBounds(bounds);
  1416. // Output node
  1417. String node_str = ConvertToString(node_idx);
  1418. f << "node" << node_str << "[label = \"Node " << node_str << "\nVolume: " << ConvertToString(bounds.GetVolume()) << "\" color=" << (node.mIsChanged? "red" : "black") << "]\n";
  1419. // Recurse and get all children
  1420. for (NodeID child_node_id : node.mChildNodeID)
  1421. if (child_node_id.IsValid())
  1422. {
  1423. node_stack.push_back(child_node_id);
  1424. // Output link
  1425. f << "node" << node_str << " -> ";
  1426. if (child_node_id.IsBody())
  1427. f << "body" << ConvertToString(child_node_id.GetBodyID().GetIndex());
  1428. else
  1429. f << "node" << ConvertToString(child_node_id.GetNodeIndex());
  1430. f << "\n";
  1431. }
  1432. }
  1433. }
  1434. while (!node_stack.empty());
  1435. // Finish DOT file
  1436. f << "}\n";
  1437. f.close();
  1438. // Convert to svg file
  1439. String cmd = StringFormat("dot %s.dot -Tsvg -o %s.svg", inFileNamePrefix, inFileNamePrefix);
  1440. system(cmd.c_str());
  1441. }
  1442. #endif // JPH_DUMP_BROADPHASE_TREE
  1443. #ifdef JPH_TRACK_BROADPHASE_STATS
  1444. uint64 QuadTree::GetTicks100Pct(const LayerToStats &inLayer) const
  1445. {
  1446. uint64 total_ticks = 0;
  1447. for (const LayerToStats::value_type &kv : inLayer)
  1448. total_ticks += kv.second.mTotalTicks;
  1449. return total_ticks;
  1450. }
  1451. void QuadTree::ReportStats(const char *inName, const LayerToStats &inLayer, uint64 inTicks100Pct) const
  1452. {
  1453. for (const LayerToStats::value_type &kv : inLayer)
  1454. {
  1455. double total_pct = 100.0 * double(kv.second.mTotalTicks) / double(inTicks100Pct);
  1456. double total_pct_excl_collector = 100.0 * double(kv.second.mTotalTicks - kv.second.mCollectorTicks) / double(inTicks100Pct);
  1457. double hits_reported_vs_bodies_visited = kv.second.mBodiesVisited > 0? 100.0 * double(kv.second.mHitsReported) / double(kv.second.mBodiesVisited) : 100.0;
  1458. double hits_reported_vs_nodes_visited = kv.second.mNodesVisited > 0? double(kv.second.mHitsReported) / double(kv.second.mNodesVisited) : -1.0;
  1459. std::stringstream str;
  1460. str << inName << ", " << kv.first << ", " << mName << ", " << kv.second.mNumQueries << ", " << total_pct << ", " << total_pct_excl_collector << ", " << kv.second.mNodesVisited << ", " << kv.second.mBodiesVisited << ", " << kv.second.mHitsReported << ", " << hits_reported_vs_bodies_visited << ", " << hits_reported_vs_nodes_visited;
  1461. Trace(str.str().c_str());
  1462. }
  1463. }
  1464. uint64 QuadTree::GetTicks100Pct() const
  1465. {
  1466. uint64 total_ticks = 0;
  1467. total_ticks += GetTicks100Pct(mCastRayStats);
  1468. total_ticks += GetTicks100Pct(mCollideAABoxStats);
  1469. total_ticks += GetTicks100Pct(mCollideSphereStats);
  1470. total_ticks += GetTicks100Pct(mCollidePointStats);
  1471. total_ticks += GetTicks100Pct(mCollideOrientedBoxStats);
  1472. total_ticks += GetTicks100Pct(mCastAABoxStats);
  1473. return total_ticks;
  1474. }
  1475. void QuadTree::ReportStats(uint64 inTicks100Pct) const
  1476. {
  1477. unique_lock lock(mStatsMutex);
  1478. ReportStats("RayCast", mCastRayStats, inTicks100Pct);
  1479. ReportStats("CollideAABox", mCollideAABoxStats, inTicks100Pct);
  1480. ReportStats("CollideSphere", mCollideSphereStats, inTicks100Pct);
  1481. ReportStats("CollidePoint", mCollidePointStats, inTicks100Pct);
  1482. ReportStats("CollideOrientedBox", mCollideOrientedBoxStats, inTicks100Pct);
  1483. ReportStats("CastAABox", mCastAABoxStats, inTicks100Pct);
  1484. }
  1485. #endif // JPH_TRACK_BROADPHASE_STATS
  1486. uint QuadTree::GetMaxTreeDepth(const NodeID &inNodeID) const
  1487. {
  1488. // Reached a leaf?
  1489. if (!inNodeID.IsValid() || inNodeID.IsBody())
  1490. return 0;
  1491. // Recurse to children
  1492. uint max_depth = 0;
  1493. const Node &node = mAllocator->Get(inNodeID.GetNodeIndex());
  1494. for (NodeID child_node_id : node.mChildNodeID)
  1495. max_depth = max(max_depth, GetMaxTreeDepth(child_node_id));
  1496. return max_depth + 1;
  1497. }
  1498. JPH_NAMESPACE_END