SharedPtr.hpp 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. /*
  2. * Copyright (c)2013-2020 ZeroTier, Inc.
  3. *
  4. * Use of this software is governed by the Business Source License included
  5. * in the LICENSE.TXT file in the project's root directory.
  6. *
  7. * Change Date: 2025-01-01
  8. *
  9. * On the date above, in accordance with the Business Source License, use
  10. * of this software will be governed by version 2.0 of the Apache License.
  11. */
  12. /****/
  13. #ifndef ZT_SHAREDPTR_HPP
  14. #define ZT_SHAREDPTR_HPP
  15. #include "Constants.hpp"
  16. #include "TriviallyCopyable.hpp"
  17. namespace ZeroTier {
  18. /**
  19. * Simple zero-overhead introspective reference counted pointer
  20. *
  21. * This is an introspective shared pointer. Classes that need to be reference
  22. * counted must list this as a 'friend' and must have a private instance of
  23. * atomic<int> called __refCount.
  24. */
  25. template< typename T >
  26. class SharedPtr : public TriviallyCopyable
  27. {
  28. public:
  29. ZT_INLINE SharedPtr() noexcept: m_ptr((T *)0)
  30. {}
  31. explicit ZT_INLINE SharedPtr(T *obj) noexcept: m_ptr(obj)
  32. { if (likely(obj != nullptr)) ++*const_cast<std::atomic< int > *>(&(obj->__refCount)); }
  33. ZT_INLINE SharedPtr(const SharedPtr &sp) noexcept: m_ptr(sp._getAndInc())
  34. {}
  35. ZT_INLINE ~SharedPtr()
  36. {
  37. if (likely(m_ptr != nullptr)) {
  38. if (unlikely(--*const_cast<std::atomic< int > *>(&(m_ptr->__refCount)) <= 0))
  39. delete m_ptr;
  40. }
  41. }
  42. ZT_INLINE SharedPtr &operator=(const SharedPtr &sp)
  43. {
  44. if (likely(m_ptr != sp.m_ptr)) {
  45. T *p = sp._getAndInc();
  46. if (likely(m_ptr != nullptr)) {
  47. if (unlikely(--*const_cast<std::atomic< int > *>(&(m_ptr->__refCount)) <= 0))
  48. delete m_ptr;
  49. }
  50. m_ptr = p;
  51. }
  52. return *this;
  53. }
  54. /**
  55. * Set to a naked pointer and increment its reference count
  56. *
  57. * This assumes this SharedPtr is NULL and that ptr is not a 'zombie.' No
  58. * checks are performed.
  59. *
  60. * @param ptr Naked pointer to assign
  61. */
  62. ZT_INLINE void set(T *ptr) noexcept
  63. {
  64. zero();
  65. ++*const_cast<std::atomic< int > *>(&(ptr->__refCount));
  66. m_ptr = ptr;
  67. }
  68. /**
  69. * Stupidly set this SharedPtr to 'ptr', ignoring current value and not incrementing reference counter
  70. *
  71. * This must only be used in code that knows what it's doing. :)
  72. *
  73. * @param ptr Pointer to set
  74. */
  75. ZT_INLINE void unsafeSet(T *ptr) noexcept
  76. { m_ptr = ptr; }
  77. /**
  78. * Swap with another pointer 'for free' without ref count overhead
  79. *
  80. * @param with Pointer to swap with
  81. */
  82. ZT_INLINE void swap(SharedPtr &with) noexcept
  83. {
  84. T *tmp = m_ptr;
  85. m_ptr = with.m_ptr;
  86. with.m_ptr = tmp;
  87. }
  88. /**
  89. * Set this value to one from another pointer and set that pointer to zero (take ownership from)
  90. *
  91. * This is faster than setting and zeroing the source pointer since it
  92. * avoids a synchronized reference count change.
  93. *
  94. * @param from Origin pointer; will be zeroed
  95. */
  96. ZT_INLINE void move(SharedPtr &from)
  97. {
  98. if (likely(m_ptr != nullptr)) {
  99. if (--*const_cast<std::atomic< int > *>(&(m_ptr->__refCount)) <= 0)
  100. delete m_ptr;
  101. }
  102. m_ptr = from.m_ptr;
  103. from.m_ptr = nullptr;
  104. }
  105. ZT_INLINE operator bool() const noexcept
  106. { return (m_ptr != nullptr); }
  107. ZT_INLINE T &operator*() const noexcept
  108. { return *m_ptr; }
  109. ZT_INLINE T *operator->() const noexcept
  110. { return m_ptr; }
  111. /**
  112. * @return Raw pointer to held object
  113. */
  114. ZT_INLINE T *ptr() const noexcept
  115. { return m_ptr; }
  116. /**
  117. * Set this pointer to NULL
  118. */
  119. ZT_INLINE void zero()
  120. {
  121. if (likely(m_ptr != nullptr)) {
  122. if (unlikely(--*const_cast<std::atomic< int > *>(&(m_ptr->__refCount)) <= 0))
  123. delete m_ptr;
  124. m_ptr = nullptr;
  125. }
  126. }
  127. /**
  128. * Set pointer to NULL and delete object if reference count is only 1
  129. *
  130. * This can be called periodically to implement something like a weak
  131. * reference as it exists in other more managed languages like Java,
  132. * but with the caveat that it only works if there is only one remaining
  133. * SharedPtr to be treated as weak.
  134. *
  135. * @return True if object was in fact deleted OR this pointer was already NULL
  136. */
  137. ZT_INLINE bool weakGC()
  138. {
  139. if (likely(m_ptr != nullptr)) {
  140. int one = 1;
  141. if (const_cast<std::atomic< int > *>(&(m_ptr->__refCount))->compare_exchange_strong(one, (int)0)) {
  142. delete m_ptr;
  143. m_ptr = nullptr;
  144. return true;
  145. }
  146. return false;
  147. } else {
  148. return true;
  149. }
  150. }
  151. /**
  152. * Get the current reference count for this object, which can change at any time
  153. *
  154. * @return Number of references according to this object's ref count or 0 if NULL
  155. */
  156. ZT_INLINE int references() noexcept
  157. {
  158. if (likely(m_ptr != nullptr))
  159. return m_ptr->__refCount;
  160. return 0;
  161. }
  162. ZT_INLINE unsigned long hashCode() const noexcept
  163. { return (unsigned long)Utils::hash64((uint64_t)((uintptr_t)m_ptr)); }
  164. ZT_INLINE bool operator==(const SharedPtr &sp) const noexcept
  165. { return (m_ptr == sp.m_ptr); }
  166. ZT_INLINE bool operator!=(const SharedPtr &sp) const noexcept
  167. { return (m_ptr != sp.m_ptr); }
  168. ZT_INLINE bool operator>(const SharedPtr &sp) const noexcept
  169. { return (m_ptr > sp.m_ptr); }
  170. ZT_INLINE bool operator<(const SharedPtr &sp) const noexcept
  171. { return (m_ptr < sp.m_ptr); }
  172. ZT_INLINE bool operator>=(const SharedPtr &sp) const noexcept
  173. { return (m_ptr >= sp.m_ptr); }
  174. ZT_INLINE bool operator<=(const SharedPtr &sp) const noexcept
  175. { return (m_ptr <= sp.m_ptr); }
  176. private:
  177. ZT_INLINE T *_getAndInc() const noexcept
  178. {
  179. if (m_ptr)
  180. ++*const_cast<std::atomic< int > *>(&(m_ptr->__refCount));
  181. return m_ptr;
  182. }
  183. T *m_ptr;
  184. };
  185. } // namespace ZeroTier
  186. // Augment std::swap to speed up some operations with SharedPtr.
  187. namespace std {
  188. template< typename T >
  189. ZT_INLINE void swap(ZeroTier::SharedPtr< T > &a, ZeroTier::SharedPtr< T > &b) noexcept
  190. { a.swap(b); }
  191. } // namespace std
  192. #endif