bvh_public.inc 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505
  1. public:
  2. BVHHandle item_add(T *p_userdata, bool p_active, const BOUNDS &p_aabb, int32_t p_subindex, bool p_pairable, uint32_t p_pairable_type, uint32_t p_pairable_mask, bool p_invisible = false) {
  3. #ifdef BVH_VERBOSE_TREE
  4. VERBOSE_PRINT("\nitem_add BEFORE");
  5. _debug_recursive_print_tree(0);
  6. VERBOSE_PRINT("\n");
  7. #endif
  8. BVHABB_CLASS abb;
  9. abb.from(p_aabb);
  10. // NOTE that we do not expand the AABB for the first create even if
  11. // leaf expansion is switched on. This is for two reasons:
  12. // (1) We don't know if this object will move in future, in which case a non-expanded
  13. // bound would be better...
  14. // (2) We don't yet know how many objects will be paired, which is used to modify
  15. // the expansion margin.
  16. // handle to be filled with the new item ref
  17. BVHHandle handle;
  18. // ref id easier to pass around than handle
  19. uint32_t ref_id;
  20. // this should never fail
  21. ItemRef *ref = _refs.request(ref_id);
  22. // the extra data should be parallel list to the references
  23. uint32_t extra_id;
  24. ItemExtra *extra = _extra.request(extra_id);
  25. BVH_ASSERT(extra_id == ref_id);
  26. // pairs info
  27. if (USE_PAIRS) {
  28. uint32_t pairs_id;
  29. ItemPairs *pairs = _pairs.request(pairs_id);
  30. pairs->clear();
  31. BVH_ASSERT(pairs_id == ref_id);
  32. }
  33. extra->subindex = p_subindex;
  34. extra->userdata = p_userdata;
  35. extra->last_updated_tick = 0;
  36. // add an active reference to the list for slow incremental optimize
  37. // this list must be kept in sync with the references as they are added or removed.
  38. extra->active_ref_id = _active_refs.size();
  39. _active_refs.push_back(ref_id);
  40. if (USE_PAIRS) {
  41. extra->pairable_mask = p_pairable_mask;
  42. extra->pairable_type = p_pairable_type;
  43. extra->pairable = p_pairable;
  44. } else {
  45. // just for safety, in case this gets queried etc
  46. extra->pairable = 0;
  47. p_pairable = false;
  48. }
  49. // assign to handle to return
  50. handle.set_id(ref_id);
  51. uint32_t tree_id = 0;
  52. if (p_pairable) {
  53. tree_id = 1;
  54. }
  55. create_root_node(tree_id);
  56. // we must choose where to add to tree
  57. if (p_active) {
  58. ref->tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  59. bool refit = _node_add_item(ref->tnode_id, ref_id, abb);
  60. if (refit) {
  61. // only need to refit from the parent
  62. const TNode &add_node = _nodes[ref->tnode_id];
  63. if (add_node.parent_id != BVHCommon::INVALID) {
  64. refit_upward_and_balance(add_node.parent_id, tree_id);
  65. }
  66. }
  67. } else {
  68. ref->set_inactive();
  69. }
  70. #ifdef BVH_VERBOSE
  71. // memory use
  72. int mem = _refs.estimate_memory_use();
  73. mem += _nodes.estimate_memory_use();
  74. String sz = _debug_aabb_to_string(abb);
  75. VERBOSE_PRINT("\titem_add [" + itos(ref_id) + "] " + itos(_refs.size()) + " refs,\t" + itos(_nodes.size()) + " nodes " + sz);
  76. VERBOSE_PRINT("mem use : " + itos(mem) + ", num nodes : " + itos(_nodes.size()));
  77. #endif
  78. return handle;
  79. }
  80. void _debug_print_refs() {
  81. #ifdef BVH_VERBOSE_TREE
  82. print_line("refs.....");
  83. for (int n = 0; n < _refs.size(); n++) {
  84. const ItemRef &ref = _refs[n];
  85. print_line("tnode_id " + itos(ref.tnode_id) + ", item_id " + itos(ref.item_id));
  86. }
  87. #endif
  88. }
  89. // returns false if noop
  90. bool item_move(BVHHandle p_handle, const BOUNDS &p_aabb) {
  91. uint32_t ref_id = p_handle.id();
  92. // get the reference
  93. ItemRef &ref = _refs[ref_id];
  94. if (!ref.is_active()) {
  95. return false;
  96. }
  97. BVHABB_CLASS abb;
  98. abb.from(p_aabb);
  99. #ifdef BVH_EXPAND_LEAF_AABBS
  100. if (USE_PAIRS) {
  101. // scale the pairing expansion by the number of pairs.
  102. abb.expand(_pairs[ref_id].scale_expansion_margin(_pairing_expansion));
  103. } else {
  104. abb.expand(_pairing_expansion);
  105. }
  106. #endif
  107. BVH_ASSERT(ref.tnode_id != BVHCommon::INVALID);
  108. TNode &tnode = _nodes[ref.tnode_id];
  109. // does it fit within the current leaf aabb?
  110. if (tnode.aabb.is_other_within(abb)) {
  111. // do nothing .. fast path .. not moved enough to need refit
  112. // however we WILL update the exact aabb in the leaf, as this will be needed
  113. // for accurate collision detection
  114. TLeaf &leaf = _node_get_leaf(tnode);
  115. BVHABB_CLASS &leaf_abb = leaf.get_aabb(ref.item_id);
  116. // no change?
  117. #ifdef BVH_EXPAND_LEAF_AABBS
  118. BOUNDS leaf_aabb;
  119. leaf_abb.to(leaf_aabb);
  120. // This test should pass in a lot of cases, and by returning false we can avoid
  121. // collision pairing checks later, which greatly reduces processing.
  122. if (expanded_aabb_encloses_not_shrink(leaf_aabb, p_aabb)) {
  123. return false;
  124. }
  125. #else
  126. if (leaf_abb == abb) {
  127. return false;
  128. }
  129. #endif
  130. #ifdef BVH_VERBOSE_MOVES
  131. print_line("item_move " + itos(p_handle.id()) + "(within tnode aabb) : " + _debug_aabb_to_string(abb));
  132. #endif
  133. leaf_abb = abb;
  134. _integrity_check_all();
  135. return true;
  136. }
  137. #ifdef BVH_VERBOSE_MOVES
  138. print_line("item_move " + itos(p_handle.id()) + "(outside tnode aabb) : " + _debug_aabb_to_string(abb));
  139. #endif
  140. uint32_t tree_id = _handle_get_tree_id(p_handle);
  141. // remove and reinsert
  142. node_remove_item(ref_id, tree_id);
  143. // we must choose where to add to tree
  144. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  145. // add to the tree
  146. bool needs_refit = _node_add_item(ref.tnode_id, ref_id, abb);
  147. // only need to refit from the PARENT
  148. if (needs_refit) {
  149. // only need to refit from the parent
  150. const TNode &add_node = _nodes[ref.tnode_id];
  151. if (add_node.parent_id != BVHCommon::INVALID) {
  152. // not sure we need to rebalance all the time, this can be done less often
  153. refit_upward(add_node.parent_id);
  154. }
  155. //refit_upward_and_balance(add_node.parent_id);
  156. }
  157. return true;
  158. }
  159. void item_remove(BVHHandle p_handle) {
  160. uint32_t ref_id = p_handle.id();
  161. uint32_t tree_id = _handle_get_tree_id(p_handle);
  162. VERBOSE_PRINT("item_remove [" + itos(ref_id) + "] ");
  163. ////////////////////////////////////////
  164. // remove the active reference from the list for slow incremental optimize
  165. // this list must be kept in sync with the references as they are added or removed.
  166. uint32_t active_ref_id = _extra[ref_id].active_ref_id;
  167. uint32_t ref_id_moved_back = _active_refs[_active_refs.size() - 1];
  168. // swap back and decrement for fast unordered remove
  169. _active_refs[active_ref_id] = ref_id_moved_back;
  170. _active_refs.resize(_active_refs.size() - 1);
  171. // keep the moved active reference up to date
  172. _extra[ref_id_moved_back].active_ref_id = active_ref_id;
  173. ////////////////////////////////////////
  174. // remove the item from the node (only if active)
  175. if (_refs[ref_id].is_active()) {
  176. node_remove_item(ref_id, tree_id);
  177. }
  178. // remove the item reference
  179. _refs.free(ref_id);
  180. _extra.free(ref_id);
  181. if (USE_PAIRS) {
  182. _pairs.free(ref_id);
  183. }
  184. // don't think refit_all is necessary?
  185. //refit_all(_tree_id);
  186. #ifdef BVH_VERBOSE_TREE
  187. _debug_recursive_print_tree(tree_id);
  188. #endif
  189. }
  190. // returns success
  191. bool item_activate(BVHHandle p_handle, const BOUNDS &p_aabb) {
  192. uint32_t ref_id = p_handle.id();
  193. ItemRef &ref = _refs[ref_id];
  194. if (ref.is_active()) {
  195. // noop
  196. return false;
  197. }
  198. // add to tree
  199. BVHABB_CLASS abb;
  200. abb.from(p_aabb);
  201. uint32_t tree_id = _handle_get_tree_id(p_handle);
  202. // we must choose where to add to tree
  203. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  204. _node_add_item(ref.tnode_id, ref_id, abb);
  205. refit_upward_and_balance(ref.tnode_id, tree_id);
  206. return true;
  207. }
  208. // returns success
  209. bool item_deactivate(BVHHandle p_handle) {
  210. uint32_t ref_id = p_handle.id();
  211. ItemRef &ref = _refs[ref_id];
  212. if (!ref.is_active()) {
  213. // noop
  214. return false;
  215. }
  216. uint32_t tree_id = _handle_get_tree_id(p_handle);
  217. // remove from tree
  218. BVHABB_CLASS abb;
  219. node_remove_item(ref_id, tree_id, &abb);
  220. // mark as inactive
  221. ref.set_inactive();
  222. return true;
  223. }
  224. bool item_get_active(BVHHandle p_handle) const {
  225. uint32_t ref_id = p_handle.id();
  226. const ItemRef &ref = _refs[ref_id];
  227. return ref.is_active();
  228. }
  229. // during collision testing, we want to set the mask and whether pairable for the item testing from
  230. void item_fill_cullparams(BVHHandle p_handle, CullParams &r_params) const {
  231. uint32_t ref_id = p_handle.id();
  232. const ItemExtra &extra = _extra[ref_id];
  233. // testing from a non pairable item, we only want to test pairable items
  234. r_params.test_pairable_only = extra.pairable == 0;
  235. // we take into account the mask of the item testing from
  236. r_params.mask = extra.pairable_mask;
  237. r_params.pairable_type = extra.pairable_type;
  238. }
  239. bool item_is_pairable(const BVHHandle &p_handle) {
  240. uint32_t ref_id = p_handle.id();
  241. const ItemExtra &extra = _extra[ref_id];
  242. return extra.pairable != 0;
  243. }
  244. void item_get_ABB(const BVHHandle &p_handle, BVHABB_CLASS &r_abb) {
  245. // change tree?
  246. uint32_t ref_id = p_handle.id();
  247. const ItemRef &ref = _refs[ref_id];
  248. TNode &tnode = _nodes[ref.tnode_id];
  249. TLeaf &leaf = _node_get_leaf(tnode);
  250. r_abb = leaf.get_aabb(ref.item_id);
  251. }
  252. bool item_set_pairable(const BVHHandle &p_handle, bool p_pairable, uint32_t p_pairable_type, uint32_t p_pairable_mask) {
  253. // change tree?
  254. uint32_t ref_id = p_handle.id();
  255. ItemExtra &ex = _extra[ref_id];
  256. ItemRef &ref = _refs[ref_id];
  257. bool active = ref.is_active();
  258. bool pairable_changed = (ex.pairable != 0) != p_pairable;
  259. bool state_changed = pairable_changed || (ex.pairable_type != p_pairable_type) || (ex.pairable_mask != p_pairable_mask);
  260. ex.pairable_type = p_pairable_type;
  261. ex.pairable_mask = p_pairable_mask;
  262. if (active && pairable_changed) {
  263. // record abb
  264. TNode &tnode = _nodes[ref.tnode_id];
  265. TLeaf &leaf = _node_get_leaf(tnode);
  266. BVHABB_CLASS abb = leaf.get_aabb(ref.item_id);
  267. // make sure current tree is correct prior to changing
  268. uint32_t tree_id = _handle_get_tree_id(p_handle);
  269. // remove from old tree
  270. node_remove_item(ref_id, tree_id);
  271. // we must set the pairable AFTER getting the current tree
  272. // because the pairable status determines which tree
  273. ex.pairable = p_pairable;
  274. // add to new tree
  275. tree_id = _handle_get_tree_id(p_handle);
  276. create_root_node(tree_id);
  277. // we must choose where to add to tree
  278. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  279. bool needs_refit = _node_add_item(ref.tnode_id, ref_id, abb);
  280. // only need to refit from the PARENT
  281. if (needs_refit) {
  282. // only need to refit from the parent
  283. const TNode &add_node = _nodes[ref.tnode_id];
  284. if (add_node.parent_id != BVHCommon::INVALID) {
  285. refit_upward_and_balance(add_node.parent_id, tree_id);
  286. }
  287. }
  288. } else {
  289. // always keep this up to date
  290. ex.pairable = p_pairable;
  291. }
  292. return state_changed;
  293. }
  294. void incremental_optimize() {
  295. // first update all aabbs as one off step..
  296. // this is cheaper than doing it on each move as each leaf may get touched multiple times
  297. // in a frame.
  298. for (int n = 0; n < NUM_TREES; n++) {
  299. if (_root_node_id[n] != BVHCommon::INVALID) {
  300. refit_branch(_root_node_id[n]);
  301. }
  302. }
  303. // now do small section reinserting to get things moving
  304. // gradually, and keep items in the right leaf
  305. if (_current_active_ref >= _active_refs.size()) {
  306. _current_active_ref = 0;
  307. }
  308. // special case
  309. if (!_active_refs.size()) {
  310. return;
  311. }
  312. uint32_t ref_id = _active_refs[_current_active_ref++];
  313. _logic_item_remove_and_reinsert(ref_id);
  314. #ifdef BVH_VERBOSE
  315. /*
  316. // memory use
  317. int mem_refs = _refs.estimate_memory_use();
  318. int mem_nodes = _nodes.estimate_memory_use();
  319. int mem_leaves = _leaves.estimate_memory_use();
  320. String sz;
  321. sz += "mem_refs : " + itos(mem_refs) + " ";
  322. sz += "mem_nodes : " + itos(mem_nodes) + " ";
  323. sz += "mem_leaves : " + itos(mem_leaves) + " ";
  324. sz += ", num nodes : " + itos(_nodes.size());
  325. print_line(sz);
  326. */
  327. #endif
  328. }
  329. void update() {
  330. incremental_optimize();
  331. // keep the expansion values up to date with the world bound
  332. //#define BVH_ALLOW_AUTO_EXPANSION
  333. #ifdef BVH_ALLOW_AUTO_EXPANSION
  334. if (_auto_node_expansion || _auto_pairing_expansion) {
  335. BVHABB_CLASS world_bound;
  336. world_bound.set_to_max_opposite_extents();
  337. bool bound_valid = false;
  338. for (int n = 0; n < NUM_TREES; n++) {
  339. uint32_t node_id = _root_node_id[n];
  340. if (node_id != BVHCommon::INVALID) {
  341. world_bound.merge(_nodes[node_id].aabb);
  342. bound_valid = true;
  343. }
  344. }
  345. // if there are no nodes, do nothing, but if there are...
  346. if (bound_valid) {
  347. BOUNDS bb;
  348. world_bound.to(bb);
  349. real_t size = bb.get_longest_axis_size();
  350. // automatic AI decision for best parameters.
  351. // These can be overridden in project settings.
  352. // these magic numbers are determined by experiment
  353. if (_auto_node_expansion) {
  354. _node_expansion = size * 0.025;
  355. }
  356. if (_auto_pairing_expansion) {
  357. _pairing_expansion = size * 0.009;
  358. }
  359. }
  360. }
  361. #endif
  362. }
  363. void params_set_pairing_expansion(real_t p_value) {
  364. if (p_value < 0.0) {
  365. #ifdef BVH_ALLOW_AUTO_EXPANSION
  366. _auto_pairing_expansion = true;
  367. #endif
  368. return;
  369. }
  370. #ifdef BVH_ALLOW_AUTO_EXPANSION
  371. _auto_pairing_expansion = false;
  372. #endif
  373. _pairing_expansion = p_value;
  374. // calculate shrinking threshold
  375. const real_t fudge_factor = 1.1;
  376. _aabb_shrinkage_threshold = _pairing_expansion * POINT::AXIS_COUNT * 2.0 * fudge_factor;
  377. }
  378. // This routine is not just an enclose check, it also checks for special case of shrinkage
  379. bool expanded_aabb_encloses_not_shrink(const BOUNDS &p_expanded_aabb, const BOUNDS &p_aabb) const {
  380. if (!p_expanded_aabb.encloses(p_aabb)) {
  381. return false;
  382. }
  383. // Check for special case of shrinkage. If the aabb has shrunk
  384. // significantly we want to create a new expanded bound, because
  385. // the previous expanded bound will have diverged significantly.
  386. const POINT &exp_size = p_expanded_aabb.size;
  387. const POINT &new_size = p_aabb.size;
  388. real_t exp_l = 0.0;
  389. real_t new_l = 0.0;
  390. for (int i = 0; i < POINT::AXIS_COUNT; ++i) {
  391. exp_l += exp_size[i];
  392. new_l += new_size[i];
  393. }
  394. // is difference above some metric
  395. real_t diff = exp_l - new_l;
  396. if (diff < _aabb_shrinkage_threshold) {
  397. return true;
  398. }
  399. return false;
  400. }