bvh_public.inc 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423
  1. public:
  2. BVHHandle item_add(T *p_userdata, bool p_active, const Bounds &p_aabb, int32_t p_subindex, bool p_pairable, uint32_t p_pairable_type, uint32_t p_pairable_mask, bool p_invisible = false) {
  3. #ifdef BVH_VERBOSE_TREE
  4. VERBOSE_PRINT("\nitem_add BEFORE");
  5. _debug_recursive_print_tree(0);
  6. VERBOSE_PRINT("\n");
  7. #endif
  8. BVHABB_CLASS abb;
  9. abb.from(p_aabb);
  10. // handle to be filled with the new item ref
  11. BVHHandle handle;
  12. // ref id easier to pass around than handle
  13. uint32_t ref_id;
  14. // this should never fail
  15. ItemRef *ref = _refs.request(ref_id);
  16. // the extra data should be parallel list to the references
  17. uint32_t extra_id;
  18. ItemExtra *extra = _extra.request(extra_id);
  19. BVH_ASSERT(extra_id == ref_id);
  20. // pairs info
  21. if (USE_PAIRS) {
  22. uint32_t pairs_id;
  23. ItemPairs *pairs = _pairs.request(pairs_id);
  24. pairs->clear();
  25. BVH_ASSERT(pairs_id == ref_id);
  26. }
  27. extra->subindex = p_subindex;
  28. extra->userdata = p_userdata;
  29. extra->last_updated_tick = 0;
  30. // add an active reference to the list for slow incremental optimize
  31. // this list must be kept in sync with the references as they are added or removed.
  32. extra->active_ref_id = _active_refs.size();
  33. _active_refs.push_back(ref_id);
  34. if (USE_PAIRS) {
  35. extra->pairable_mask = p_pairable_mask;
  36. extra->pairable_type = p_pairable_type;
  37. extra->pairable = p_pairable;
  38. } else {
  39. // just for safety, in case this gets queried etc
  40. extra->pairable = 0;
  41. p_pairable = false;
  42. }
  43. // assign to handle to return
  44. handle.set_id(ref_id);
  45. uint32_t tree_id = 0;
  46. if (p_pairable) {
  47. tree_id = 1;
  48. }
  49. create_root_node(tree_id);
  50. // we must choose where to add to tree
  51. if (p_active) {
  52. ref->tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  53. bool refit = _node_add_item(ref->tnode_id, ref_id, abb);
  54. if (refit) {
  55. // only need to refit from the parent
  56. const TNode &add_node = _nodes[ref->tnode_id];
  57. if (add_node.parent_id != BVHCommon::INVALID) {
  58. refit_upward_and_balance(add_node.parent_id, tree_id);
  59. }
  60. }
  61. } else {
  62. ref->set_inactive();
  63. }
  64. #ifdef BVH_VERBOSE
  65. // memory use
  66. int mem = _refs.estimate_memory_use();
  67. mem += _nodes.estimate_memory_use();
  68. String sz = _debug_aabb_to_string(abb);
  69. VERBOSE_PRINT("\titem_add [" + itos(ref_id) + "] " + itos(_refs.size()) + " refs,\t" + itos(_nodes.size()) + " nodes " + sz);
  70. VERBOSE_PRINT("mem use : " + itos(mem) + ", num nodes : " + itos(_nodes.size()));
  71. #endif
  72. return handle;
  73. }
  74. void _debug_print_refs() {
  75. #ifdef BVH_VERBOSE_TREE
  76. print_line("refs.....");
  77. for (int n = 0; n < _refs.size(); n++) {
  78. const ItemRef &ref = _refs[n];
  79. print_line("tnode_id " + itos(ref.tnode_id) + ", item_id " + itos(ref.item_id));
  80. }
  81. #endif
  82. }
  83. // returns false if noop
  84. bool item_move(BVHHandle p_handle, const Bounds &p_aabb) {
  85. uint32_t ref_id = p_handle.id();
  86. // get the reference
  87. ItemRef &ref = _refs[ref_id];
  88. if (!ref.is_active()) {
  89. return false;
  90. }
  91. BVHABB_CLASS abb;
  92. abb.from(p_aabb);
  93. BVH_ASSERT(ref.tnode_id != BVHCommon::INVALID);
  94. TNode &tnode = _nodes[ref.tnode_id];
  95. // does it fit within the current aabb?
  96. if (tnode.aabb.is_other_within(abb)) {
  97. // do nothing .. fast path .. not moved enough to need refit
  98. // however we WILL update the exact aabb in the leaf, as this will be needed
  99. // for accurate collision detection
  100. TLeaf &leaf = _node_get_leaf(tnode);
  101. BVHABB_CLASS &leaf_abb = leaf.get_aabb(ref.item_id);
  102. // no change?
  103. if (leaf_abb == abb) {
  104. return false;
  105. }
  106. leaf_abb = abb;
  107. _integrity_check_all();
  108. return true;
  109. }
  110. uint32_t tree_id = _handle_get_tree_id(p_handle);
  111. // remove and reinsert
  112. node_remove_item(ref_id, tree_id);
  113. // we must choose where to add to tree
  114. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  115. // add to the tree
  116. bool needs_refit = _node_add_item(ref.tnode_id, ref_id, abb);
  117. // only need to refit from the PARENT
  118. if (needs_refit) {
  119. // only need to refit from the parent
  120. const TNode &add_node = _nodes[ref.tnode_id];
  121. if (add_node.parent_id != BVHCommon::INVALID) {
  122. // not sure we need to rebalance all the time, this can be done less often
  123. refit_upward(add_node.parent_id);
  124. }
  125. //refit_upward_and_balance(add_node.parent_id);
  126. }
  127. return true;
  128. }
  129. void item_remove(BVHHandle p_handle) {
  130. uint32_t ref_id = p_handle.id();
  131. uint32_t tree_id = _handle_get_tree_id(p_handle);
  132. VERBOSE_PRINT("item_remove [" + itos(ref_id) + "] ");
  133. ////////////////////////////////////////
  134. // remove the active reference from the list for slow incremental optimize
  135. // this list must be kept in sync with the references as they are added or removed.
  136. uint32_t active_ref_id = _extra[ref_id].active_ref_id;
  137. uint32_t ref_id_moved_back = _active_refs[_active_refs.size() - 1];
  138. // swap back and decrement for fast unordered remove
  139. _active_refs[active_ref_id] = ref_id_moved_back;
  140. _active_refs.resize(_active_refs.size() - 1);
  141. // keep the moved active reference up to date
  142. _extra[ref_id_moved_back].active_ref_id = active_ref_id;
  143. ////////////////////////////////////////
  144. // remove the item from the node (only if active)
  145. if (_refs[ref_id].is_active()) {
  146. node_remove_item(ref_id, tree_id);
  147. }
  148. // remove the item reference
  149. _refs.free(ref_id);
  150. _extra.free(ref_id);
  151. if (USE_PAIRS) {
  152. _pairs.free(ref_id);
  153. }
  154. // don't think refit_all is necessary?
  155. //refit_all(_tree_id);
  156. #ifdef BVH_VERBOSE_TREE
  157. _debug_recursive_print_tree(tree_id);
  158. #endif
  159. }
  160. // returns success
  161. bool item_activate(BVHHandle p_handle, const Bounds &p_aabb) {
  162. uint32_t ref_id = p_handle.id();
  163. ItemRef &ref = _refs[ref_id];
  164. if (ref.is_active()) {
  165. // noop
  166. return false;
  167. }
  168. // add to tree
  169. BVHABB_CLASS abb;
  170. abb.from(p_aabb);
  171. uint32_t tree_id = _handle_get_tree_id(p_handle);
  172. // we must choose where to add to tree
  173. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  174. _node_add_item(ref.tnode_id, ref_id, abb);
  175. refit_upward_and_balance(ref.tnode_id, tree_id);
  176. return true;
  177. }
  178. // returns success
  179. bool item_deactivate(BVHHandle p_handle) {
  180. uint32_t ref_id = p_handle.id();
  181. ItemRef &ref = _refs[ref_id];
  182. if (!ref.is_active()) {
  183. // noop
  184. return false;
  185. }
  186. uint32_t tree_id = _handle_get_tree_id(p_handle);
  187. // remove from tree
  188. BVHABB_CLASS abb;
  189. node_remove_item(ref_id, tree_id, &abb);
  190. // mark as inactive
  191. ref.set_inactive();
  192. return true;
  193. }
  194. bool item_get_active(BVHHandle p_handle) const {
  195. uint32_t ref_id = p_handle.id();
  196. const ItemRef &ref = _refs[ref_id];
  197. return ref.is_active();
  198. }
  199. // during collision testing, we want to set the mask and whether pairable for the item testing from
  200. void item_fill_cullparams(BVHHandle p_handle, CullParams &r_params) const {
  201. uint32_t ref_id = p_handle.id();
  202. const ItemExtra &extra = _extra[ref_id];
  203. // testing from a non pairable item, we only want to test pairable items
  204. r_params.test_pairable_only = extra.pairable == 0;
  205. // we take into account the mask of the item testing from
  206. r_params.mask = extra.pairable_mask;
  207. r_params.pairable_type = extra.pairable_type;
  208. }
  209. bool item_is_pairable(const BVHHandle &p_handle) {
  210. uint32_t ref_id = p_handle.id();
  211. const ItemExtra &extra = _extra[ref_id];
  212. return extra.pairable != 0;
  213. }
  214. void item_get_ABB(const BVHHandle &p_handle, BVHABB_CLASS &r_abb) {
  215. // change tree?
  216. uint32_t ref_id = p_handle.id();
  217. const ItemRef &ref = _refs[ref_id];
  218. TNode &tnode = _nodes[ref.tnode_id];
  219. TLeaf &leaf = _node_get_leaf(tnode);
  220. r_abb = leaf.get_aabb(ref.item_id);
  221. }
  222. bool item_set_pairable(const BVHHandle &p_handle, bool p_pairable, uint32_t p_pairable_type, uint32_t p_pairable_mask) {
  223. // change tree?
  224. uint32_t ref_id = p_handle.id();
  225. ItemExtra &ex = _extra[ref_id];
  226. ItemRef &ref = _refs[ref_id];
  227. bool active = ref.is_active();
  228. bool pairable_changed = (ex.pairable != 0) != p_pairable;
  229. bool state_changed = pairable_changed || (ex.pairable_type != p_pairable_type) || (ex.pairable_mask != p_pairable_mask);
  230. ex.pairable_type = p_pairable_type;
  231. ex.pairable_mask = p_pairable_mask;
  232. if (active && pairable_changed) {
  233. // record abb
  234. TNode &tnode = _nodes[ref.tnode_id];
  235. TLeaf &leaf = _node_get_leaf(tnode);
  236. BVHABB_CLASS abb = leaf.get_aabb(ref.item_id);
  237. // make sure current tree is correct prior to changing
  238. uint32_t tree_id = _handle_get_tree_id(p_handle);
  239. // remove from old tree
  240. node_remove_item(ref_id, tree_id);
  241. // we must set the pairable AFTER getting the current tree
  242. // because the pairable status determines which tree
  243. ex.pairable = p_pairable;
  244. // add to new tree
  245. tree_id = _handle_get_tree_id(p_handle);
  246. create_root_node(tree_id);
  247. // we must choose where to add to tree
  248. ref.tnode_id = _logic_choose_item_add_node(_root_node_id[tree_id], abb);
  249. bool needs_refit = _node_add_item(ref.tnode_id, ref_id, abb);
  250. // only need to refit from the PARENT
  251. if (needs_refit) {
  252. // only need to refit from the parent
  253. const TNode &add_node = _nodes[ref.tnode_id];
  254. if (add_node.parent_id != BVHCommon::INVALID) {
  255. refit_upward_and_balance(add_node.parent_id, tree_id);
  256. }
  257. }
  258. } else {
  259. // always keep this up to date
  260. ex.pairable = p_pairable;
  261. }
  262. return state_changed;
  263. }
  264. void incremental_optimize() {
  265. // first update all aabbs as one off step..
  266. // this is cheaper than doing it on each move as each leaf may get touched multiple times
  267. // in a frame.
  268. for (int n = 0; n < NUM_TREES; n++) {
  269. if (_root_node_id[n] != BVHCommon::INVALID) {
  270. refit_branch(_root_node_id[n]);
  271. }
  272. }
  273. // now do small section reinserting to get things moving
  274. // gradually, and keep items in the right leaf
  275. if (_current_active_ref >= _active_refs.size()) {
  276. _current_active_ref = 0;
  277. }
  278. // special case
  279. if (!_active_refs.size()) {
  280. return;
  281. }
  282. uint32_t ref_id = _active_refs[_current_active_ref++];
  283. _logic_item_remove_and_reinsert(ref_id);
  284. #ifdef BVH_VERBOSE
  285. /*
  286. // memory use
  287. int mem_refs = _refs.estimate_memory_use();
  288. int mem_nodes = _nodes.estimate_memory_use();
  289. int mem_leaves = _leaves.estimate_memory_use();
  290. String sz;
  291. sz += "mem_refs : " + itos(mem_refs) + " ";
  292. sz += "mem_nodes : " + itos(mem_nodes) + " ";
  293. sz += "mem_leaves : " + itos(mem_leaves) + " ";
  294. sz += ", num nodes : " + itos(_nodes.size());
  295. print_line(sz);
  296. */
  297. #endif
  298. }
  299. void update() {
  300. incremental_optimize();
  301. // keep the expansion values up to date with the world bound
  302. //#define BVH_ALLOW_AUTO_EXPANSION
  303. #ifdef BVH_ALLOW_AUTO_EXPANSION
  304. if (_auto_node_expansion || _auto_pairing_expansion) {
  305. BVHABB_CLASS world_bound;
  306. world_bound.set_to_max_opposite_extents();
  307. bool bound_valid = false;
  308. for (int n = 0; n < NUM_TREES; n++) {
  309. uint32_t node_id = _root_node_id[n];
  310. if (node_id != BVHCommon::INVALID) {
  311. world_bound.merge(_nodes[node_id].aabb);
  312. bound_valid = true;
  313. }
  314. }
  315. // if there are no nodes, do nothing, but if there are...
  316. if (bound_valid) {
  317. Bounds bb;
  318. world_bound.to(bb);
  319. real_t size = bb.get_longest_axis_size();
  320. // automatic AI decision for best parameters.
  321. // These can be overridden in project settings.
  322. // these magic numbers are determined by experiment
  323. if (_auto_node_expansion) {
  324. _node_expansion = size * 0.025;
  325. }
  326. if (_auto_pairing_expansion) {
  327. _pairing_expansion = size * 0.009;
  328. }
  329. }
  330. }
  331. #endif
  332. }