Browse Source

Fix slow garbage collect after using a lot of states

rdb 8 years ago
parent
commit
6b882ac767

+ 4 - 0
panda/src/pgraph/renderAttrib.cxx

@@ -255,6 +255,10 @@ garbage_collect() {
   nassertr(_attribs->validate(), 0);
 #endif
 
+  // If we just cleaned up a lot of attribs, see if we can reduce the table in
+  // size.  This will help reduce iteration overhead in the future.
+  _attribs->consider_shrink_table();
+
   size_t new_size = _attribs->get_num_entries();
   return orig_size - new_size;
 }

+ 4 - 0
panda/src/pgraph/renderState.cxx

@@ -1001,6 +1001,10 @@ garbage_collect() {
   nassertr(_states->validate(), 0);
 #endif
 
+  // If we just cleaned up a lot of states, see if we can reduce the table in
+  // size.  This will help reduce iteration overhead in the future.
+  _states->consider_shrink_table();
+
   int new_size = _states->get_num_entries();
   return orig_size - new_size + num_attribs;
 }

+ 4 - 0
panda/src/pgraph/transformState.cxx

@@ -1253,6 +1253,10 @@ garbage_collect() {
   nassertr(_states->validate(), 0);
 #endif
 
+  // If we just cleaned up a lot of states, see if we can reduce the table in
+  // size.  This will help reduce iteration overhead in the future.
+  _states->consider_shrink_table();
+
   int new_size = _states->get_num_entries();
   return orig_size - new_size;
 }

+ 28 - 7
panda/src/putil/simpleHashMap.I

@@ -522,27 +522,48 @@ new_table() {
 template<class Key, class Value, class Compare>
 INLINE bool SimpleHashMap<Key, Value, Compare>::
 consider_expand_table() {
-  if (_num_entries >= (_table_size >> 1)) {
-    expand_table();
+  if (_num_entries < (_table_size >> 1)) {
+    return false;
+  } else {
+    resize_table(_table_size << 1);
+    return true;
+  }
+}
+
+/**
+ * Shrinks the table if the allocated storage is significantly larger than the
+ * number of elements in it.  Returns true if shrunk, false otherwise.
+ */
+template<class Key, class Value, class Compare>
+INLINE bool SimpleHashMap<Key, Value, Compare>::
+consider_shrink_table() {
+  // If the number of elements gets less than an eighth of the table size, we
+  // know it's probably time to shrink it down, lest it hurt iteration time.
+  if (_table_size <= 8 || (_table_size >> 3) < _num_entries) {
+    return false;
+  } else {
+    size_t new_size = _table_size;
+    do {
+      new_size >>= 1;
+    } while ((new_size >> 3) >= _num_entries);
+    resize_table(new_size);
     return true;
   }
-  return false;
 }
 
 /**
- * Doubles the size of the existing table.
+ * Resizes the existing table.
  */
 template<class Key, class Value, class Compare>
 void SimpleHashMap<Key, Value, Compare>::
-expand_table() {
+resize_table(size_t new_size) {
   nassertv(_table_size != 0);
 
   SimpleHashMap<Key, Value, Compare> old_map(_comp);
   swap(old_map);
 
-  // Double the table size.
   size_t old_table_size = old_map._table_size;
-  _table_size = (old_table_size << 1);
+  _table_size = new_size;
   nassertv(_table == NULL);
 
   // We allocate enough bytes for _table_size elements of TableEntry, plus

+ 3 - 1
panda/src/putil/simpleHashMap.h

@@ -56,6 +56,8 @@ public:
   void write(ostream &out) const;
   bool validate() const;
 
+  INLINE bool consider_shrink_table();
+
 private:
   class TableEntry;
 
@@ -68,7 +70,7 @@ private:
 
   void new_table();
   INLINE bool consider_expand_table();
-  void expand_table();
+  void resize_table(size_t new_size);
 
   class TableEntry {
   public: