basisu_containers.h 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035
  1. // basisu_containers.h
  2. #pragma once
  3. #include <stdlib.h>
  4. #include <stdio.h>
  5. #include <stdint.h>
  6. #include <assert.h>
  7. #include <algorithm>
  8. #if defined(__linux__) && !defined(ANDROID)
  9. // Only for malloc_usable_size() in basisu_containers_impl.h
  10. #include <malloc.h>
  11. #define HAS_MALLOC_USABLE_SIZE 1
  12. #endif
  13. // Set to 1 to always check vector operator[], front(), and back() even in release.
  14. #define BASISU_VECTOR_FORCE_CHECKING 0
  15. // If 1, the vector container will not query the CRT to get the size of resized memory blocks.
  16. #define BASISU_VECTOR_DETERMINISTIC 1
  17. #ifdef _MSC_VER
  18. #define BASISU_FORCE_INLINE __forceinline
  19. #else
  20. #define BASISU_FORCE_INLINE inline
  21. #endif
  22. namespace basisu
  23. {
  24. enum { cInvalidIndex = -1 };
  25. namespace helpers
  26. {
  27. inline bool is_power_of_2(uint32_t x) { return x && ((x & (x - 1U)) == 0U); }
  28. inline bool is_power_of_2(uint64_t x) { return x && ((x & (x - 1U)) == 0U); }
  29. template<class T> const T& minimum(const T& a, const T& b) { return (b < a) ? b : a; }
  30. template<class T> const T& maximum(const T& a, const T& b) { return (a < b) ? b : a; }
  31. inline uint32_t floor_log2i(uint32_t v)
  32. {
  33. uint32_t l = 0;
  34. while (v > 1U)
  35. {
  36. v >>= 1;
  37. l++;
  38. }
  39. return l;
  40. }
  41. inline uint32_t next_pow2(uint32_t val)
  42. {
  43. val--;
  44. val |= val >> 16;
  45. val |= val >> 8;
  46. val |= val >> 4;
  47. val |= val >> 2;
  48. val |= val >> 1;
  49. return val + 1;
  50. }
  51. inline uint64_t next_pow2(uint64_t val)
  52. {
  53. val--;
  54. val |= val >> 32;
  55. val |= val >> 16;
  56. val |= val >> 8;
  57. val |= val >> 4;
  58. val |= val >> 2;
  59. val |= val >> 1;
  60. return val + 1;
  61. }
  62. } // namespace helpers
  63. template <typename T>
  64. inline T* construct(T* p)
  65. {
  66. return new (static_cast<void*>(p)) T;
  67. }
  68. template <typename T, typename U>
  69. inline T* construct(T* p, const U& init)
  70. {
  71. return new (static_cast<void*>(p)) T(init);
  72. }
  73. template <typename T>
  74. inline void construct_array(T* p, size_t n)
  75. {
  76. T* q = p + n;
  77. for (; p != q; ++p)
  78. new (static_cast<void*>(p)) T;
  79. }
  80. template <typename T, typename U>
  81. inline void construct_array(T* p, size_t n, const U& init)
  82. {
  83. T* q = p + n;
  84. for (; p != q; ++p)
  85. new (static_cast<void*>(p)) T(init);
  86. }
  87. template <typename T>
  88. inline void destruct(T* p)
  89. {
  90. (void)p;
  91. p->~T();
  92. }
  93. template <typename T> inline void destruct_array(T* p, size_t n)
  94. {
  95. T* q = p + n;
  96. for (; p != q; ++p)
  97. p->~T();
  98. }
  99. template<typename T> struct int_traits { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };
  100. template<> struct int_traits<int8_t> { enum { cMin = INT8_MIN, cMax = INT8_MAX, cSigned = true }; };
  101. template<> struct int_traits<int16_t> { enum { cMin = INT16_MIN, cMax = INT16_MAX, cSigned = true }; };
  102. template<> struct int_traits<int32_t> { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };
  103. template<> struct int_traits<uint8_t> { enum { cMin = 0, cMax = UINT8_MAX, cSigned = false }; };
  104. template<> struct int_traits<uint16_t> { enum { cMin = 0, cMax = UINT16_MAX, cSigned = false }; };
  105. template<> struct int_traits<uint32_t> { enum { cMin = 0, cMax = UINT32_MAX, cSigned = false }; };
  106. template<typename T>
  107. struct scalar_type
  108. {
  109. enum { cFlag = false };
  110. static inline void construct(T* p) { basisu::construct(p); }
  111. static inline void construct(T* p, const T& init) { basisu::construct(p, init); }
  112. static inline void construct_array(T* p, size_t n) { basisu::construct_array(p, n); }
  113. static inline void destruct(T* p) { basisu::destruct(p); }
  114. static inline void destruct_array(T* p, size_t n) { basisu::destruct_array(p, n); }
  115. };
  116. template<typename T> struct scalar_type<T*>
  117. {
  118. enum { cFlag = true };
  119. static inline void construct(T** p) { memset(p, 0, sizeof(T*)); }
  120. static inline void construct(T** p, T* init) { *p = init; }
  121. static inline void construct_array(T** p, size_t n) { memset(p, 0, sizeof(T*) * n); }
  122. static inline void destruct(T** p) { p; }
  123. static inline void destruct_array(T** p, size_t n) { p, n; }
  124. };
  125. #define BASISU_DEFINE_BUILT_IN_TYPE(X) \
  126. template<> struct scalar_type<X> { \
  127. enum { cFlag = true }; \
  128. static inline void construct(X* p) { memset(p, 0, sizeof(X)); } \
  129. static inline void construct(X* p, const X& init) { memcpy(p, &init, sizeof(X)); } \
  130. static inline void construct_array(X* p, size_t n) { memset(p, 0, sizeof(X) * n); } \
  131. static inline void destruct(X* p) { p; } \
  132. static inline void destruct_array(X* p, size_t n) { p, n; } };
  133. BASISU_DEFINE_BUILT_IN_TYPE(bool)
  134. BASISU_DEFINE_BUILT_IN_TYPE(char)
  135. BASISU_DEFINE_BUILT_IN_TYPE(unsigned char)
  136. BASISU_DEFINE_BUILT_IN_TYPE(short)
  137. BASISU_DEFINE_BUILT_IN_TYPE(unsigned short)
  138. BASISU_DEFINE_BUILT_IN_TYPE(int)
  139. BASISU_DEFINE_BUILT_IN_TYPE(unsigned int)
  140. BASISU_DEFINE_BUILT_IN_TYPE(long)
  141. BASISU_DEFINE_BUILT_IN_TYPE(unsigned long)
  142. #ifdef __GNUC__
  143. BASISU_DEFINE_BUILT_IN_TYPE(long long)
  144. BASISU_DEFINE_BUILT_IN_TYPE(unsigned long long)
  145. #else
  146. BASISU_DEFINE_BUILT_IN_TYPE(__int64)
  147. BASISU_DEFINE_BUILT_IN_TYPE(unsigned __int64)
  148. #endif
  149. BASISU_DEFINE_BUILT_IN_TYPE(float)
  150. BASISU_DEFINE_BUILT_IN_TYPE(double)
  151. BASISU_DEFINE_BUILT_IN_TYPE(long double)
  152. #undef BASISU_DEFINE_BUILT_IN_TYPE
  153. template<typename T>
  154. struct bitwise_movable { enum { cFlag = false }; };
  155. #define BASISU_DEFINE_BITWISE_MOVABLE(Q) template<> struct bitwise_movable<Q> { enum { cFlag = true }; };
  156. template<typename T>
  157. struct bitwise_copyable { enum { cFlag = false }; };
  158. #define BASISU_DEFINE_BITWISE_COPYABLE(Q) template<> struct bitwise_copyable<Q> { enum { cFlag = true }; };
  159. #define BASISU_IS_POD(T) __is_pod(T)
  160. #define BASISU_IS_SCALAR_TYPE(T) (scalar_type<T>::cFlag)
  161. #if !defined(BASISU_HAVE_STD_TRIVIALLY_COPYABLE) && defined(__GNUC__) && __GNUC__<5
  162. //#define BASISU_IS_TRIVIALLY_COPYABLE(...) __has_trivial_copy(__VA_ARGS__)
  163. #define BASISU_IS_TRIVIALLY_COPYABLE(...) __is_trivially_copyable(__VA_ARGS__)
  164. #else
  165. #define BASISU_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value
  166. #endif
  167. // TODO: clean this up
  168. #define BASISU_IS_BITWISE_COPYABLE(T) (BASISU_IS_SCALAR_TYPE(T) || BASISU_IS_POD(T) || BASISU_IS_TRIVIALLY_COPYABLE(T) || (bitwise_copyable<T>::cFlag))
  169. #define BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) (BASISU_IS_BITWISE_COPYABLE(T) || (bitwise_movable<T>::cFlag))
  170. #define BASISU_HAS_DESTRUCTOR(T) ((!scalar_type<T>::cFlag) && (!__is_pod(T)))
  171. typedef char(&yes_t)[1];
  172. typedef char(&no_t)[2];
  173. template <class U> yes_t class_test(int U::*);
  174. template <class U> no_t class_test(...);
  175. template <class T> struct is_class
  176. {
  177. enum { value = (sizeof(class_test<T>(0)) == sizeof(yes_t)) };
  178. };
  179. template <typename T> struct is_pointer
  180. {
  181. enum { value = false };
  182. };
  183. template <typename T> struct is_pointer<T*>
  184. {
  185. enum { value = true };
  186. };
  187. struct empty_type { };
  188. BASISU_DEFINE_BITWISE_COPYABLE(empty_type);
  189. BASISU_DEFINE_BITWISE_MOVABLE(empty_type);
  190. template<typename T> struct rel_ops
  191. {
  192. friend bool operator!=(const T& x, const T& y) { return (!(x == y)); }
  193. friend bool operator> (const T& x, const T& y) { return (y < x); }
  194. friend bool operator<=(const T& x, const T& y) { return (!(y < x)); }
  195. friend bool operator>=(const T& x, const T& y) { return (!(x < y)); }
  196. };
  197. struct elemental_vector
  198. {
  199. void* m_p;
  200. uint32_t m_size;
  201. uint32_t m_capacity;
  202. typedef void (*object_mover)(void* pDst, void* pSrc, uint32_t num);
  203. bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, uint32_t element_size, object_mover pRelocate, bool nofail);
  204. };
  205. template<typename T>
  206. class vector : public rel_ops< vector<T> >
  207. {
  208. public:
  209. typedef T* iterator;
  210. typedef const T* const_iterator;
  211. typedef T value_type;
  212. typedef T& reference;
  213. typedef const T& const_reference;
  214. typedef T* pointer;
  215. typedef const T* const_pointer;
  216. inline vector() :
  217. m_p(NULL),
  218. m_size(0),
  219. m_capacity(0)
  220. {
  221. }
  222. inline vector(uint32_t n, const T& init) :
  223. m_p(NULL),
  224. m_size(0),
  225. m_capacity(0)
  226. {
  227. increase_capacity(n, false);
  228. construct_array(m_p, n, init);
  229. m_size = n;
  230. }
  231. inline vector(const vector& other) :
  232. m_p(NULL),
  233. m_size(0),
  234. m_capacity(0)
  235. {
  236. increase_capacity(other.m_size, false);
  237. m_size = other.m_size;
  238. if (BASISU_IS_BITWISE_COPYABLE(T))
  239. {
  240. #ifndef __EMSCRIPTEN__
  241. #ifdef __GNUC__
  242. #pragma GCC diagnostic push
  243. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  244. #endif
  245. #endif
  246. if ((m_p) && (other.m_p))
  247. memcpy(m_p, other.m_p, m_size * sizeof(T));
  248. #ifndef __EMSCRIPTEN__
  249. #ifdef __GNUC__
  250. #pragma GCC diagnostic pop
  251. #endif
  252. #endif
  253. }
  254. else
  255. {
  256. T* pDst = m_p;
  257. const T* pSrc = other.m_p;
  258. for (uint32_t i = m_size; i > 0; i--)
  259. construct(pDst++, *pSrc++);
  260. }
  261. }
  262. inline explicit vector(size_t size) :
  263. m_p(NULL),
  264. m_size(0),
  265. m_capacity(0)
  266. {
  267. resize(size);
  268. }
  269. inline ~vector()
  270. {
  271. if (m_p)
  272. {
  273. scalar_type<T>::destruct_array(m_p, m_size);
  274. free(m_p);
  275. }
  276. }
  277. inline vector& operator= (const vector& other)
  278. {
  279. if (this == &other)
  280. return *this;
  281. if (m_capacity >= other.m_size)
  282. resize(0);
  283. else
  284. {
  285. clear();
  286. increase_capacity(other.m_size, false);
  287. }
  288. if (BASISU_IS_BITWISE_COPYABLE(T))
  289. {
  290. #ifndef __EMSCRIPTEN__
  291. #ifdef __GNUC__
  292. #pragma GCC diagnostic push
  293. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  294. #endif
  295. #endif
  296. if ((m_p) && (other.m_p))
  297. memcpy(m_p, other.m_p, other.m_size * sizeof(T));
  298. #ifndef __EMSCRIPTEN__
  299. #ifdef __GNUC__
  300. #pragma GCC diagnostic pop
  301. #endif
  302. #endif
  303. }
  304. else
  305. {
  306. T* pDst = m_p;
  307. const T* pSrc = other.m_p;
  308. for (uint32_t i = other.m_size; i > 0; i--)
  309. construct(pDst++, *pSrc++);
  310. }
  311. m_size = other.m_size;
  312. return *this;
  313. }
  314. BASISU_FORCE_INLINE const T* begin() const { return m_p; }
  315. BASISU_FORCE_INLINE T* begin() { return m_p; }
  316. BASISU_FORCE_INLINE const T* end() const { return m_p + m_size; }
  317. BASISU_FORCE_INLINE T* end() { return m_p + m_size; }
  318. BASISU_FORCE_INLINE bool empty() const { return !m_size; }
  319. BASISU_FORCE_INLINE uint32_t size() const { return m_size; }
  320. BASISU_FORCE_INLINE uint32_t size_in_bytes() const { return m_size * sizeof(T); }
  321. BASISU_FORCE_INLINE uint32_t capacity() const { return m_capacity; }
  322. // operator[] will assert on out of range indices, but in final builds there is (and will never be) any range checking on this method.
  323. //BASISU_FORCE_INLINE const T& operator[] (uint32_t i) const { assert(i < m_size); return m_p[i]; }
  324. //BASISU_FORCE_INLINE T& operator[] (uint32_t i) { assert(i < m_size); return m_p[i]; }
  325. #if !BASISU_VECTOR_FORCE_CHECKING
  326. BASISU_FORCE_INLINE const T& operator[] (size_t i) const { assert(i < m_size); return m_p[i]; }
  327. BASISU_FORCE_INLINE T& operator[] (size_t i) { assert(i < m_size); return m_p[i]; }
  328. #else
  329. BASISU_FORCE_INLINE const T& operator[] (size_t i) const
  330. {
  331. if (i >= m_size)
  332. {
  333. fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));
  334. abort();
  335. }
  336. return m_p[i];
  337. }
  338. BASISU_FORCE_INLINE T& operator[] (size_t i)
  339. {
  340. if (i >= m_size)
  341. {
  342. fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));
  343. abort();
  344. }
  345. return m_p[i];
  346. }
  347. #endif
  348. // at() always includes range checking, even in final builds, unlike operator [].
  349. // The first element is returned if the index is out of range.
  350. BASISU_FORCE_INLINE const T& at(size_t i) const { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }
  351. BASISU_FORCE_INLINE T& at(size_t i) { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }
  352. #if !BASISU_VECTOR_FORCE_CHECKING
  353. BASISU_FORCE_INLINE const T& front() const { assert(m_size); return m_p[0]; }
  354. BASISU_FORCE_INLINE T& front() { assert(m_size); return m_p[0]; }
  355. BASISU_FORCE_INLINE const T& back() const { assert(m_size); return m_p[m_size - 1]; }
  356. BASISU_FORCE_INLINE T& back() { assert(m_size); return m_p[m_size - 1]; }
  357. #else
  358. BASISU_FORCE_INLINE const T& front() const
  359. {
  360. if (!m_size)
  361. {
  362. fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  363. abort();
  364. }
  365. return m_p[0];
  366. }
  367. BASISU_FORCE_INLINE T& front()
  368. {
  369. if (!m_size)
  370. {
  371. fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  372. abort();
  373. }
  374. return m_p[0];
  375. }
  376. BASISU_FORCE_INLINE const T& back() const
  377. {
  378. if(!m_size)
  379. {
  380. fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  381. abort();
  382. }
  383. return m_p[m_size - 1];
  384. }
  385. BASISU_FORCE_INLINE T& back()
  386. {
  387. if (!m_size)
  388. {
  389. fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  390. abort();
  391. }
  392. return m_p[m_size - 1];
  393. }
  394. #endif
  395. BASISU_FORCE_INLINE const T* get_ptr() const { return m_p; }
  396. BASISU_FORCE_INLINE T* get_ptr() { return m_p; }
  397. BASISU_FORCE_INLINE const T* data() const { return m_p; }
  398. BASISU_FORCE_INLINE T* data() { return m_p; }
  399. // clear() sets the container to empty, then frees the allocated block.
  400. inline void clear()
  401. {
  402. if (m_p)
  403. {
  404. scalar_type<T>::destruct_array(m_p, m_size);
  405. free(m_p);
  406. m_p = NULL;
  407. m_size = 0;
  408. m_capacity = 0;
  409. }
  410. }
  411. inline void clear_no_destruction()
  412. {
  413. if (m_p)
  414. {
  415. free(m_p);
  416. m_p = NULL;
  417. m_size = 0;
  418. m_capacity = 0;
  419. }
  420. }
  421. inline void reserve(size_t new_capacity_size_t)
  422. {
  423. if (new_capacity_size_t > UINT32_MAX)
  424. {
  425. assert(0);
  426. return;
  427. }
  428. uint32_t new_capacity = (uint32_t)new_capacity_size_t;
  429. if (new_capacity > m_capacity)
  430. increase_capacity(new_capacity, false);
  431. else if (new_capacity < m_capacity)
  432. {
  433. // Must work around the lack of a "decrease_capacity()" method.
  434. // This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.
  435. vector tmp;
  436. tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false);
  437. tmp = *this;
  438. swap(tmp);
  439. }
  440. }
  441. inline bool try_reserve(size_t new_capacity_size_t)
  442. {
  443. if (new_capacity_size_t > UINT32_MAX)
  444. {
  445. assert(0);
  446. return false;
  447. }
  448. uint32_t new_capacity = (uint32_t)new_capacity_size_t;
  449. if (new_capacity > m_capacity)
  450. {
  451. if (!increase_capacity(new_capacity, false, true))
  452. return false;
  453. }
  454. else if (new_capacity < m_capacity)
  455. {
  456. // Must work around the lack of a "decrease_capacity()" method.
  457. // This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.
  458. vector tmp;
  459. if (!tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false, true))
  460. return false;
  461. tmp = *this;
  462. swap(tmp);
  463. }
  464. return true;
  465. }
  466. // resize(0) sets the container to empty, but does not free the allocated block.
  467. inline void resize(size_t new_size_size_t, bool grow_hint = false)
  468. {
  469. if (new_size_size_t > UINT32_MAX)
  470. {
  471. assert(0);
  472. return;
  473. }
  474. uint32_t new_size = (uint32_t)new_size_size_t;
  475. if (m_size != new_size)
  476. {
  477. if (new_size < m_size)
  478. scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);
  479. else
  480. {
  481. if (new_size > m_capacity)
  482. increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint);
  483. scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);
  484. }
  485. m_size = new_size;
  486. }
  487. }
  488. inline bool try_resize(size_t new_size_size_t, bool grow_hint = false)
  489. {
  490. if (new_size_size_t > UINT32_MAX)
  491. {
  492. assert(0);
  493. return false;
  494. }
  495. uint32_t new_size = (uint32_t)new_size_size_t;
  496. if (m_size != new_size)
  497. {
  498. if (new_size < m_size)
  499. scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);
  500. else
  501. {
  502. if (new_size > m_capacity)
  503. {
  504. if (!increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint, true))
  505. return false;
  506. }
  507. scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);
  508. }
  509. m_size = new_size;
  510. }
  511. return true;
  512. }
  513. // If size >= capacity/2, reset() sets the container's size to 0 but doesn't free the allocated block (because the container may be similarly loaded in the future).
  514. // Otherwise it blows away the allocated block. See http://www.codercorner.com/blog/?p=494
  515. inline void reset()
  516. {
  517. if (m_size >= (m_capacity >> 1))
  518. resize(0);
  519. else
  520. clear();
  521. }
  522. inline T* enlarge(uint32_t i)
  523. {
  524. uint32_t cur_size = m_size;
  525. resize(cur_size + i, true);
  526. return get_ptr() + cur_size;
  527. }
  528. inline T* try_enlarge(uint32_t i)
  529. {
  530. uint32_t cur_size = m_size;
  531. if (!try_resize(cur_size + i, true))
  532. return NULL;
  533. return get_ptr() + cur_size;
  534. }
  535. BASISU_FORCE_INLINE void push_back(const T& obj)
  536. {
  537. assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));
  538. if (m_size >= m_capacity)
  539. increase_capacity(m_size + 1, true);
  540. scalar_type<T>::construct(m_p + m_size, obj);
  541. m_size++;
  542. }
  543. inline bool try_push_back(const T& obj)
  544. {
  545. assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));
  546. if (m_size >= m_capacity)
  547. {
  548. if (!increase_capacity(m_size + 1, true, true))
  549. return false;
  550. }
  551. scalar_type<T>::construct(m_p + m_size, obj);
  552. m_size++;
  553. return true;
  554. }
  555. inline void push_back_value(T obj)
  556. {
  557. if (m_size >= m_capacity)
  558. increase_capacity(m_size + 1, true);
  559. scalar_type<T>::construct(m_p + m_size, obj);
  560. m_size++;
  561. }
  562. inline void pop_back()
  563. {
  564. assert(m_size);
  565. if (m_size)
  566. {
  567. m_size--;
  568. scalar_type<T>::destruct(&m_p[m_size]);
  569. }
  570. }
  571. inline void insert(uint32_t index, const T* p, uint32_t n)
  572. {
  573. assert(index <= m_size);
  574. if (!n)
  575. return;
  576. const uint32_t orig_size = m_size;
  577. resize(m_size + n, true);
  578. const uint32_t num_to_move = orig_size - index;
  579. if (BASISU_IS_BITWISE_COPYABLE(T))
  580. {
  581. // This overwrites the destination object bits, but bitwise copyable means we don't need to worry about destruction.
  582. memmove(m_p + index + n, m_p + index, sizeof(T) * num_to_move);
  583. }
  584. else
  585. {
  586. const T* pSrc = m_p + orig_size - 1;
  587. T* pDst = const_cast<T*>(pSrc) + n;
  588. for (uint32_t i = 0; i < num_to_move; i++)
  589. {
  590. assert((pDst - m_p) < (int)m_size);
  591. *pDst-- = *pSrc--;
  592. }
  593. }
  594. T* pDst = m_p + index;
  595. if (BASISU_IS_BITWISE_COPYABLE(T))
  596. {
  597. // This copies in the new bits, overwriting the existing objects, which is OK for copyable types that don't need destruction.
  598. memcpy(pDst, p, sizeof(T) * n);
  599. }
  600. else
  601. {
  602. for (uint32_t i = 0; i < n; i++)
  603. {
  604. assert((pDst - m_p) < (int)m_size);
  605. *pDst++ = *p++;
  606. }
  607. }
  608. }
  609. inline void insert(T* p, const T& obj)
  610. {
  611. int64_t ofs = p - begin();
  612. if ((ofs < 0) || (ofs > UINT32_MAX))
  613. {
  614. assert(0);
  615. return;
  616. }
  617. insert((uint32_t)ofs, &obj, 1);
  618. }
  619. // push_front() isn't going to be very fast - it's only here for usability.
  620. inline void push_front(const T& obj)
  621. {
  622. insert(0, &obj, 1);
  623. }
  624. vector& append(const vector& other)
  625. {
  626. if (other.m_size)
  627. insert(m_size, &other[0], other.m_size);
  628. return *this;
  629. }
  630. vector& append(const T* p, uint32_t n)
  631. {
  632. if (n)
  633. insert(m_size, p, n);
  634. return *this;
  635. }
  636. inline void erase(uint32_t start, uint32_t n)
  637. {
  638. assert((start + n) <= m_size);
  639. if ((start + n) > m_size)
  640. return;
  641. if (!n)
  642. return;
  643. const uint32_t num_to_move = m_size - (start + n);
  644. T* pDst = m_p + start;
  645. const T* pSrc = m_p + start + n;
  646. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T))
  647. {
  648. // This test is overly cautious.
  649. if ((!BASISU_IS_BITWISE_COPYABLE(T)) || (BASISU_HAS_DESTRUCTOR(T)))
  650. {
  651. // Type has been marked explictly as bitwise movable, which means we can move them around but they may need to be destructed.
  652. // First destroy the erased objects.
  653. scalar_type<T>::destruct_array(pDst, n);
  654. }
  655. // Copy "down" the objects to preserve, filling in the empty slots.
  656. #ifndef __EMSCRIPTEN__
  657. #ifdef __GNUC__
  658. #pragma GCC diagnostic push
  659. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  660. #endif
  661. #endif
  662. memmove(pDst, pSrc, num_to_move * sizeof(T));
  663. #ifndef __EMSCRIPTEN__
  664. #ifdef __GNUC__
  665. #pragma GCC diagnostic pop
  666. #endif
  667. #endif
  668. }
  669. else
  670. {
  671. // Type is not bitwise copyable or movable.
  672. // Move them down one at a time by using the equals operator, and destroying anything that's left over at the end.
  673. T* pDst_end = pDst + num_to_move;
  674. while (pDst != pDst_end)
  675. *pDst++ = *pSrc++;
  676. scalar_type<T>::destruct_array(pDst_end, n);
  677. }
  678. m_size -= n;
  679. }
  680. inline void erase(uint32_t index)
  681. {
  682. erase(index, 1);
  683. }
  684. inline void erase(T* p)
  685. {
  686. assert((p >= m_p) && (p < (m_p + m_size)));
  687. erase(static_cast<uint32_t>(p - m_p));
  688. }
  689. inline void erase(T *pFirst, T *pEnd)
  690. {
  691. assert(pFirst <= pEnd);
  692. assert(pFirst >= begin() && pFirst <= end());
  693. assert(pEnd >= begin() && pEnd <= end());
  694. int64_t ofs = pFirst - begin();
  695. if ((ofs < 0) || (ofs > UINT32_MAX))
  696. {
  697. assert(0);
  698. return;
  699. }
  700. int64_t n = pEnd - pFirst;
  701. if ((n < 0) || (n > UINT32_MAX))
  702. {
  703. assert(0);
  704. return;
  705. }
  706. erase((uint32_t)ofs, (uint32_t)n);
  707. }
  708. void erase_unordered(uint32_t index)
  709. {
  710. assert(index < m_size);
  711. if ((index + 1) < m_size)
  712. (*this)[index] = back();
  713. pop_back();
  714. }
  715. inline bool operator== (const vector& rhs) const
  716. {
  717. if (m_size != rhs.m_size)
  718. return false;
  719. else if (m_size)
  720. {
  721. if (scalar_type<T>::cFlag)
  722. return memcmp(m_p, rhs.m_p, sizeof(T) * m_size) == 0;
  723. else
  724. {
  725. const T* pSrc = m_p;
  726. const T* pDst = rhs.m_p;
  727. for (uint32_t i = m_size; i; i--)
  728. if (!(*pSrc++ == *pDst++))
  729. return false;
  730. }
  731. }
  732. return true;
  733. }
  734. inline bool operator< (const vector& rhs) const
  735. {
  736. const uint32_t min_size = helpers::minimum(m_size, rhs.m_size);
  737. const T* pSrc = m_p;
  738. const T* pSrc_end = m_p + min_size;
  739. const T* pDst = rhs.m_p;
  740. while ((pSrc < pSrc_end) && (*pSrc == *pDst))
  741. {
  742. pSrc++;
  743. pDst++;
  744. }
  745. if (pSrc < pSrc_end)
  746. return *pSrc < *pDst;
  747. return m_size < rhs.m_size;
  748. }
  749. inline void swap(vector& other)
  750. {
  751. std::swap(m_p, other.m_p);
  752. std::swap(m_size, other.m_size);
  753. std::swap(m_capacity, other.m_capacity);
  754. }
  755. inline void sort()
  756. {
  757. std::sort(begin(), end());
  758. }
  759. inline void unique()
  760. {
  761. if (!empty())
  762. {
  763. sort();
  764. resize(std::unique(begin(), end()) - begin());
  765. }
  766. }
  767. inline void reverse()
  768. {
  769. uint32_t j = m_size >> 1;
  770. for (uint32_t i = 0; i < j; i++)
  771. std::swap(m_p[i], m_p[m_size - 1 - i]);
  772. }
  773. inline int find(const T& key) const
  774. {
  775. const T* p = m_p;
  776. const T* p_end = m_p + m_size;
  777. uint32_t index = 0;
  778. while (p != p_end)
  779. {
  780. if (key == *p)
  781. return index;
  782. p++;
  783. index++;
  784. }
  785. return cInvalidIndex;
  786. }
  787. inline int find_sorted(const T& key) const
  788. {
  789. if (m_size)
  790. {
  791. // Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.
  792. int i = ((m_size + 1) >> 1) - 1;
  793. int m = m_size;
  794. for (; ; )
  795. {
  796. assert(i >= 0 && i < (int)m_size);
  797. const T* pKey_i = m_p + i;
  798. int cmp = key < *pKey_i;
  799. #if defined(_DEBUG) || defined(DEBUG)
  800. int cmp2 = *pKey_i < key;
  801. assert((cmp != cmp2) || (key == *pKey_i));
  802. #endif
  803. if ((!cmp) && (key == *pKey_i)) return i;
  804. m >>= 1;
  805. if (!m) break;
  806. cmp = -cmp;
  807. i += (((m + 1) >> 1) ^ cmp) - cmp;
  808. if (i < 0)
  809. break;
  810. assert(i >= 0 && i < (int)m_size);
  811. pKey_i = m_p + i;
  812. cmp = key < *pKey_i;
  813. #if defined(_DEBUG) || defined(DEBUG)
  814. cmp2 = *pKey_i < key;
  815. assert((cmp != cmp2) || (key == *pKey_i));
  816. #endif
  817. if ((!cmp) && (key == *pKey_i)) return i;
  818. m >>= 1;
  819. if (!m) break;
  820. cmp = -cmp;
  821. i += (((m + 1) >> 1) ^ cmp) - cmp;
  822. if (i < 0)
  823. break;
  824. }
  825. }
  826. return cInvalidIndex;
  827. }
  828. template<typename Q>
  829. inline int find_sorted(const T& key, Q less_than) const
  830. {
  831. if (m_size)
  832. {
  833. // Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.
  834. int i = ((m_size + 1) >> 1) - 1;
  835. int m = m_size;
  836. for (; ; )
  837. {
  838. assert(i >= 0 && i < (int)m_size);
  839. const T* pKey_i = m_p + i;
  840. int cmp = less_than(key, *pKey_i);
  841. if ((!cmp) && (!less_than(*pKey_i, key))) return i;
  842. m >>= 1;
  843. if (!m) break;
  844. cmp = -cmp;
  845. i += (((m + 1) >> 1) ^ cmp) - cmp;
  846. if (i < 0)
  847. break;
  848. assert(i >= 0 && i < (int)m_size);
  849. pKey_i = m_p + i;
  850. cmp = less_than(key, *pKey_i);
  851. if ((!cmp) && (!less_than(*pKey_i, key))) return i;
  852. m >>= 1;
  853. if (!m) break;
  854. cmp = -cmp;
  855. i += (((m + 1) >> 1) ^ cmp) - cmp;
  856. if (i < 0)
  857. break;
  858. }
  859. }
  860. return cInvalidIndex;
  861. }
  862. inline uint32_t count_occurences(const T& key) const
  863. {
  864. uint32_t c = 0;
  865. const T* p = m_p;
  866. const T* p_end = m_p + m_size;
  867. while (p != p_end)
  868. {
  869. if (key == *p)
  870. c++;
  871. p++;
  872. }
  873. return c;
  874. }
  875. inline void set_all(const T& o)
  876. {
  877. if ((sizeof(T) == 1) && (scalar_type<T>::cFlag))
  878. {
  879. #ifndef __EMSCRIPTEN__
  880. #ifdef __GNUC__
  881. #pragma GCC diagnostic push
  882. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  883. #endif
  884. #endif
  885. memset(m_p, *reinterpret_cast<const uint8_t*>(&o), m_size);
  886. #ifndef __EMSCRIPTEN__
  887. #ifdef __GNUC__
  888. #pragma GCC diagnostic pop
  889. #endif
  890. #endif
  891. }
  892. else
  893. {
  894. T* pDst = m_p;
  895. T* pDst_end = pDst + m_size;
  896. while (pDst != pDst_end)
  897. *pDst++ = o;
  898. }
  899. }
  900. // Caller assumes ownership of the heap block associated with the container. Container is cleared.
  901. inline void* assume_ownership()
  902. {
  903. T* p = m_p;
  904. m_p = NULL;
  905. m_size = 0;
  906. m_capacity = 0;
  907. return p;
  908. }
  909. // Caller is granting ownership of the indicated heap block.
  910. // Block must have size constructed elements, and have enough room for capacity elements.
  911. // The block must have been allocated using malloc().
  912. // Important: This method is used in Basis Universal. If you change how this container allocates memory, you'll need to change any users of this method.
  913. inline bool grant_ownership(T* p, uint32_t size, uint32_t capacity)
  914. {
  915. // To prevent the caller from obviously shooting themselves in the foot.
  916. if (((p + capacity) > m_p) && (p < (m_p + m_capacity)))
  917. {
  918. // Can grant ownership of a block inside the container itself!
  919. assert(0);
  920. return false;
  921. }
  922. if (size > capacity)
  923. {
  924. assert(0);
  925. return false;
  926. }
  927. if (!p)
  928. {
  929. if (capacity)
  930. {
  931. assert(0);
  932. return false;
  933. }
  934. }
  935. else if (!capacity)
  936. {
  937. assert(0);
  938. return false;
  939. }
  940. clear();
  941. m_p = p;
  942. m_size = size;
  943. m_capacity = capacity;
  944. return true;
  945. }
  946. private:
  947. T* m_p;
  948. uint32_t m_size;
  949. uint32_t m_capacity;
  950. template<typename Q> struct is_vector { enum { cFlag = false }; };
  951. template<typename Q> struct is_vector< vector<Q> > { enum { cFlag = true }; };
  952. static void object_mover(void* pDst_void, void* pSrc_void, uint32_t num)
  953. {
  954. T* pSrc = static_cast<T*>(pSrc_void);
  955. T* const pSrc_end = pSrc + num;
  956. T* pDst = static_cast<T*>(pDst_void);
  957. while (pSrc != pSrc_end)
  958. {
  959. // placement new
  960. new (static_cast<void*>(pDst)) T(*pSrc);
  961. pSrc->~T();
  962. ++pSrc;
  963. ++pDst;
  964. }
  965. }
  966. inline bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, bool nofail = false)
  967. {
  968. return reinterpret_cast<elemental_vector*>(this)->increase_capacity(
  969. min_new_capacity, grow_hint, sizeof(T),
  970. (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) || (is_vector<T>::cFlag)) ? NULL : object_mover, nofail);
  971. }
  972. };
  973. template<typename T> struct bitwise_movable< vector<T> > { enum { cFlag = true }; };
  974. // Hash map
  975. template <typename T>
  976. struct hasher
  977. {
  978. inline size_t operator() (const T& key) const { return static_cast<size_t>(key); }
  979. };
  980. template <typename T>
  981. struct equal_to
  982. {
  983. inline bool operator()(const T& a, const T& b) const { return a == b; }
  984. };
  985. // Important: The Hasher and Equals objects must be bitwise movable!
  986. template<typename Key, typename Value = empty_type, typename Hasher = hasher<Key>, typename Equals = equal_to<Key> >
  987. class hash_map
  988. {
  989. public:
  990. class iterator;
  991. class const_iterator;
  992. private:
  993. friend class iterator;
  994. friend class const_iterator;
  995. enum state
  996. {
  997. cStateInvalid = 0,
  998. cStateValid = 1
  999. };
  1000. enum
  1001. {
  1002. cMinHashSize = 4U
  1003. };
  1004. public:
  1005. typedef hash_map<Key, Value, Hasher, Equals> hash_map_type;
  1006. typedef std::pair<Key, Value> value_type;
  1007. typedef Key key_type;
  1008. typedef Value referent_type;
  1009. typedef Hasher hasher_type;
  1010. typedef Equals equals_type;
  1011. hash_map() :
  1012. m_hash_shift(32), m_num_valid(0), m_grow_threshold(0)
  1013. {
  1014. }
  1015. hash_map(const hash_map& other) :
  1016. m_values(other.m_values),
  1017. m_hash_shift(other.m_hash_shift),
  1018. m_hasher(other.m_hasher),
  1019. m_equals(other.m_equals),
  1020. m_num_valid(other.m_num_valid),
  1021. m_grow_threshold(other.m_grow_threshold)
  1022. {
  1023. }
  1024. hash_map& operator= (const hash_map& other)
  1025. {
  1026. if (this == &other)
  1027. return *this;
  1028. clear();
  1029. m_values = other.m_values;
  1030. m_hash_shift = other.m_hash_shift;
  1031. m_num_valid = other.m_num_valid;
  1032. m_grow_threshold = other.m_grow_threshold;
  1033. m_hasher = other.m_hasher;
  1034. m_equals = other.m_equals;
  1035. return *this;
  1036. }
  1037. inline ~hash_map()
  1038. {
  1039. clear();
  1040. }
  1041. const Equals& get_equals() const { return m_equals; }
  1042. Equals& get_equals() { return m_equals; }
  1043. void set_equals(const Equals& equals) { m_equals = equals; }
  1044. const Hasher& get_hasher() const { return m_hasher; }
  1045. Hasher& get_hasher() { return m_hasher; }
  1046. void set_hasher(const Hasher& hasher) { m_hasher = hasher; }
  1047. inline void clear()
  1048. {
  1049. if (!m_values.empty())
  1050. {
  1051. if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))
  1052. {
  1053. node* p = &get_node(0);
  1054. node* p_end = p + m_values.size();
  1055. uint32_t num_remaining = m_num_valid;
  1056. while (p != p_end)
  1057. {
  1058. if (p->state)
  1059. {
  1060. destruct_value_type(p);
  1061. num_remaining--;
  1062. if (!num_remaining)
  1063. break;
  1064. }
  1065. p++;
  1066. }
  1067. }
  1068. m_values.clear_no_destruction();
  1069. m_hash_shift = 32;
  1070. m_num_valid = 0;
  1071. m_grow_threshold = 0;
  1072. }
  1073. }
  1074. inline void reset()
  1075. {
  1076. if (!m_num_valid)
  1077. return;
  1078. if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))
  1079. {
  1080. node* p = &get_node(0);
  1081. node* p_end = p + m_values.size();
  1082. uint32_t num_remaining = m_num_valid;
  1083. while (p != p_end)
  1084. {
  1085. if (p->state)
  1086. {
  1087. destruct_value_type(p);
  1088. p->state = cStateInvalid;
  1089. num_remaining--;
  1090. if (!num_remaining)
  1091. break;
  1092. }
  1093. p++;
  1094. }
  1095. }
  1096. else if (sizeof(node) <= 32)
  1097. {
  1098. memset(&m_values[0], 0, m_values.size_in_bytes());
  1099. }
  1100. else
  1101. {
  1102. node* p = &get_node(0);
  1103. node* p_end = p + m_values.size();
  1104. uint32_t num_remaining = m_num_valid;
  1105. while (p != p_end)
  1106. {
  1107. if (p->state)
  1108. {
  1109. p->state = cStateInvalid;
  1110. num_remaining--;
  1111. if (!num_remaining)
  1112. break;
  1113. }
  1114. p++;
  1115. }
  1116. }
  1117. m_num_valid = 0;
  1118. }
  1119. inline uint32_t size()
  1120. {
  1121. return m_num_valid;
  1122. }
  1123. inline uint32_t get_table_size()
  1124. {
  1125. return m_values.size();
  1126. }
  1127. inline bool empty()
  1128. {
  1129. return !m_num_valid;
  1130. }
  1131. inline void reserve(uint32_t new_capacity)
  1132. {
  1133. uint64_t new_hash_size = helpers::maximum(1U, new_capacity);
  1134. new_hash_size = new_hash_size * 2ULL;
  1135. if (!helpers::is_power_of_2(new_hash_size))
  1136. new_hash_size = helpers::next_pow2(new_hash_size);
  1137. new_hash_size = helpers::maximum<uint64_t>(cMinHashSize, new_hash_size);
  1138. new_hash_size = helpers::minimum<uint64_t>(0x80000000UL, new_hash_size);
  1139. if (new_hash_size > m_values.size())
  1140. rehash((uint32_t)new_hash_size);
  1141. }
  1142. class iterator
  1143. {
  1144. friend class hash_map<Key, Value, Hasher, Equals>;
  1145. friend class hash_map<Key, Value, Hasher, Equals>::const_iterator;
  1146. public:
  1147. inline iterator() : m_pTable(NULL), m_index(0) { }
  1148. inline iterator(hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }
  1149. inline iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1150. inline iterator& operator= (const iterator& other)
  1151. {
  1152. m_pTable = other.m_pTable;
  1153. m_index = other.m_index;
  1154. return *this;
  1155. }
  1156. // post-increment
  1157. inline iterator operator++(int)
  1158. {
  1159. iterator result(*this);
  1160. ++*this;
  1161. return result;
  1162. }
  1163. // pre-increment
  1164. inline iterator& operator++()
  1165. {
  1166. probe();
  1167. return *this;
  1168. }
  1169. inline value_type& operator*() const { return *get_cur(); }
  1170. inline value_type* operator->() const { return get_cur(); }
  1171. inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1172. inline bool operator != (const iterator& b) const { return !(*this == b); }
  1173. inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1174. inline bool operator != (const const_iterator& b) const { return !(*this == b); }
  1175. private:
  1176. hash_map_type* m_pTable;
  1177. uint32_t m_index;
  1178. inline value_type* get_cur() const
  1179. {
  1180. assert(m_pTable && (m_index < m_pTable->m_values.size()));
  1181. assert(m_pTable->get_node_state(m_index) == cStateValid);
  1182. return &m_pTable->get_node(m_index);
  1183. }
  1184. inline void probe()
  1185. {
  1186. assert(m_pTable);
  1187. m_index = m_pTable->find_next(m_index);
  1188. }
  1189. };
  1190. class const_iterator
  1191. {
  1192. friend class hash_map<Key, Value, Hasher, Equals>;
  1193. friend class hash_map<Key, Value, Hasher, Equals>::iterator;
  1194. public:
  1195. inline const_iterator() : m_pTable(NULL), m_index(0) { }
  1196. inline const_iterator(const hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }
  1197. inline const_iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1198. inline const_iterator(const const_iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1199. inline const_iterator& operator= (const const_iterator& other)
  1200. {
  1201. m_pTable = other.m_pTable;
  1202. m_index = other.m_index;
  1203. return *this;
  1204. }
  1205. inline const_iterator& operator= (const iterator& other)
  1206. {
  1207. m_pTable = other.m_pTable;
  1208. m_index = other.m_index;
  1209. return *this;
  1210. }
  1211. // post-increment
  1212. inline const_iterator operator++(int)
  1213. {
  1214. const_iterator result(*this);
  1215. ++*this;
  1216. return result;
  1217. }
  1218. // pre-increment
  1219. inline const_iterator& operator++()
  1220. {
  1221. probe();
  1222. return *this;
  1223. }
  1224. inline const value_type& operator*() const { return *get_cur(); }
  1225. inline const value_type* operator->() const { return get_cur(); }
  1226. inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1227. inline bool operator != (const const_iterator& b) const { return !(*this == b); }
  1228. inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1229. inline bool operator != (const iterator& b) const { return !(*this == b); }
  1230. private:
  1231. const hash_map_type* m_pTable;
  1232. uint32_t m_index;
  1233. inline const value_type* get_cur() const
  1234. {
  1235. assert(m_pTable && (m_index < m_pTable->m_values.size()));
  1236. assert(m_pTable->get_node_state(m_index) == cStateValid);
  1237. return &m_pTable->get_node(m_index);
  1238. }
  1239. inline void probe()
  1240. {
  1241. assert(m_pTable);
  1242. m_index = m_pTable->find_next(m_index);
  1243. }
  1244. };
  1245. inline const_iterator begin() const
  1246. {
  1247. if (!m_num_valid)
  1248. return end();
  1249. return const_iterator(*this, find_next(UINT32_MAX));
  1250. }
  1251. inline const_iterator end() const
  1252. {
  1253. return const_iterator(*this, m_values.size());
  1254. }
  1255. inline iterator begin()
  1256. {
  1257. if (!m_num_valid)
  1258. return end();
  1259. return iterator(*this, find_next(UINT32_MAX));
  1260. }
  1261. inline iterator end()
  1262. {
  1263. return iterator(*this, m_values.size());
  1264. }
  1265. // insert_result.first will always point to inserted key/value (or the already existing key/value).
  1266. // insert_resutt.second will be true if a new key/value was inserted, or false if the key already existed (in which case first will point to the already existing value).
  1267. typedef std::pair<iterator, bool> insert_result;
  1268. inline insert_result insert(const Key& k, const Value& v = Value())
  1269. {
  1270. insert_result result;
  1271. if (!insert_no_grow(result, k, v))
  1272. {
  1273. grow();
  1274. // This must succeed.
  1275. if (!insert_no_grow(result, k, v))
  1276. {
  1277. fprintf(stderr, "insert() failed");
  1278. abort();
  1279. }
  1280. }
  1281. return result;
  1282. }
  1283. inline insert_result insert(const value_type& v)
  1284. {
  1285. return insert(v.first, v.second);
  1286. }
  1287. inline const_iterator find(const Key& k) const
  1288. {
  1289. return const_iterator(*this, find_index(k));
  1290. }
  1291. inline iterator find(const Key& k)
  1292. {
  1293. return iterator(*this, find_index(k));
  1294. }
  1295. inline bool erase(const Key& k)
  1296. {
  1297. uint32_t i = find_index(k);
  1298. if (i >= m_values.size())
  1299. return false;
  1300. node* pDst = &get_node(i);
  1301. destruct_value_type(pDst);
  1302. pDst->state = cStateInvalid;
  1303. m_num_valid--;
  1304. for (; ; )
  1305. {
  1306. uint32_t r, j = i;
  1307. node* pSrc = pDst;
  1308. do
  1309. {
  1310. if (!i)
  1311. {
  1312. i = m_values.size() - 1;
  1313. pSrc = &get_node(i);
  1314. }
  1315. else
  1316. {
  1317. i--;
  1318. pSrc--;
  1319. }
  1320. if (!pSrc->state)
  1321. return true;
  1322. r = hash_key(pSrc->first);
  1323. } while ((i <= r && r < j) || (r < j && j < i) || (j < i && i <= r));
  1324. move_node(pDst, pSrc);
  1325. pDst = pSrc;
  1326. }
  1327. }
  1328. inline void swap(hash_map_type& other)
  1329. {
  1330. m_values.swap(other.m_values);
  1331. std::swap(m_hash_shift, other.m_hash_shift);
  1332. std::swap(m_num_valid, other.m_num_valid);
  1333. std::swap(m_grow_threshold, other.m_grow_threshold);
  1334. std::swap(m_hasher, other.m_hasher);
  1335. std::swap(m_equals, other.m_equals);
  1336. }
  1337. private:
  1338. struct node : public value_type
  1339. {
  1340. uint8_t state;
  1341. };
  1342. static inline void construct_value_type(value_type* pDst, const Key& k, const Value& v)
  1343. {
  1344. if (BASISU_IS_BITWISE_COPYABLE(Key))
  1345. memcpy(&pDst->first, &k, sizeof(Key));
  1346. else
  1347. scalar_type<Key>::construct(&pDst->first, k);
  1348. if (BASISU_IS_BITWISE_COPYABLE(Value))
  1349. memcpy(&pDst->second, &v, sizeof(Value));
  1350. else
  1351. scalar_type<Value>::construct(&pDst->second, v);
  1352. }
  1353. static inline void construct_value_type(value_type* pDst, const value_type* pSrc)
  1354. {
  1355. if ((BASISU_IS_BITWISE_COPYABLE(Key)) && (BASISU_IS_BITWISE_COPYABLE(Value)))
  1356. {
  1357. memcpy(pDst, pSrc, sizeof(value_type));
  1358. }
  1359. else
  1360. {
  1361. if (BASISU_IS_BITWISE_COPYABLE(Key))
  1362. memcpy(&pDst->first, &pSrc->first, sizeof(Key));
  1363. else
  1364. scalar_type<Key>::construct(&pDst->first, pSrc->first);
  1365. if (BASISU_IS_BITWISE_COPYABLE(Value))
  1366. memcpy(&pDst->second, &pSrc->second, sizeof(Value));
  1367. else
  1368. scalar_type<Value>::construct(&pDst->second, pSrc->second);
  1369. }
  1370. }
  1371. static inline void destruct_value_type(value_type* p)
  1372. {
  1373. scalar_type<Key>::destruct(&p->first);
  1374. scalar_type<Value>::destruct(&p->second);
  1375. }
  1376. // Moves *pSrc to *pDst efficiently.
  1377. // pDst should NOT be constructed on entry.
  1378. static inline void move_node(node* pDst, node* pSrc, bool update_src_state = true)
  1379. {
  1380. assert(!pDst->state);
  1381. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key) && BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))
  1382. {
  1383. memcpy(pDst, pSrc, sizeof(node));
  1384. }
  1385. else
  1386. {
  1387. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key))
  1388. memcpy(&pDst->first, &pSrc->first, sizeof(Key));
  1389. else
  1390. {
  1391. scalar_type<Key>::construct(&pDst->first, pSrc->first);
  1392. scalar_type<Key>::destruct(&pSrc->first);
  1393. }
  1394. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))
  1395. memcpy(&pDst->second, &pSrc->second, sizeof(Value));
  1396. else
  1397. {
  1398. scalar_type<Value>::construct(&pDst->second, pSrc->second);
  1399. scalar_type<Value>::destruct(&pSrc->second);
  1400. }
  1401. pDst->state = cStateValid;
  1402. }
  1403. if (update_src_state)
  1404. pSrc->state = cStateInvalid;
  1405. }
  1406. struct raw_node
  1407. {
  1408. inline raw_node()
  1409. {
  1410. node* p = reinterpret_cast<node*>(this);
  1411. p->state = cStateInvalid;
  1412. }
  1413. inline ~raw_node()
  1414. {
  1415. node* p = reinterpret_cast<node*>(this);
  1416. if (p->state)
  1417. hash_map_type::destruct_value_type(p);
  1418. }
  1419. inline raw_node(const raw_node& other)
  1420. {
  1421. node* pDst = reinterpret_cast<node*>(this);
  1422. const node* pSrc = reinterpret_cast<const node*>(&other);
  1423. if (pSrc->state)
  1424. {
  1425. hash_map_type::construct_value_type(pDst, pSrc);
  1426. pDst->state = cStateValid;
  1427. }
  1428. else
  1429. pDst->state = cStateInvalid;
  1430. }
  1431. inline raw_node& operator= (const raw_node& rhs)
  1432. {
  1433. if (this == &rhs)
  1434. return *this;
  1435. node* pDst = reinterpret_cast<node*>(this);
  1436. const node* pSrc = reinterpret_cast<const node*>(&rhs);
  1437. if (pSrc->state)
  1438. {
  1439. if (pDst->state)
  1440. {
  1441. pDst->first = pSrc->first;
  1442. pDst->second = pSrc->second;
  1443. }
  1444. else
  1445. {
  1446. hash_map_type::construct_value_type(pDst, pSrc);
  1447. pDst->state = cStateValid;
  1448. }
  1449. }
  1450. else if (pDst->state)
  1451. {
  1452. hash_map_type::destruct_value_type(pDst);
  1453. pDst->state = cStateInvalid;
  1454. }
  1455. return *this;
  1456. }
  1457. uint8_t m_bits[sizeof(node)];
  1458. };
  1459. typedef basisu::vector<raw_node> node_vector;
  1460. node_vector m_values;
  1461. uint32_t m_hash_shift;
  1462. Hasher m_hasher;
  1463. Equals m_equals;
  1464. uint32_t m_num_valid;
  1465. uint32_t m_grow_threshold;
  1466. inline uint32_t hash_key(const Key& k) const
  1467. {
  1468. assert((1U << (32U - m_hash_shift)) == m_values.size());
  1469. uint32_t hash = static_cast<uint32_t>(m_hasher(k));
  1470. // Fibonacci hashing
  1471. hash = (2654435769U * hash) >> m_hash_shift;
  1472. assert(hash < m_values.size());
  1473. return hash;
  1474. }
  1475. inline const node& get_node(uint32_t index) const
  1476. {
  1477. return *reinterpret_cast<const node*>(&m_values[index]);
  1478. }
  1479. inline node& get_node(uint32_t index)
  1480. {
  1481. return *reinterpret_cast<node*>(&m_values[index]);
  1482. }
  1483. inline state get_node_state(uint32_t index) const
  1484. {
  1485. return static_cast<state>(get_node(index).state);
  1486. }
  1487. inline void set_node_state(uint32_t index, bool valid)
  1488. {
  1489. get_node(index).state = valid;
  1490. }
  1491. inline void grow()
  1492. {
  1493. uint64_t n = m_values.size() * 3ULL; // was * 2
  1494. if (!helpers::is_power_of_2(n))
  1495. n = helpers::next_pow2(n);
  1496. if (n > 0x80000000UL)
  1497. n = 0x80000000UL;
  1498. rehash(helpers::maximum<uint32_t>(cMinHashSize, (uint32_t)n));
  1499. }
  1500. inline void rehash(uint32_t new_hash_size)
  1501. {
  1502. assert(new_hash_size >= m_num_valid);
  1503. assert(helpers::is_power_of_2(new_hash_size));
  1504. if ((new_hash_size < m_num_valid) || (new_hash_size == m_values.size()))
  1505. return;
  1506. hash_map new_map;
  1507. new_map.m_values.resize(new_hash_size);
  1508. new_map.m_hash_shift = 32U - helpers::floor_log2i(new_hash_size);
  1509. assert(new_hash_size == (1U << (32U - new_map.m_hash_shift)));
  1510. new_map.m_grow_threshold = UINT_MAX;
  1511. node* pNode = reinterpret_cast<node*>(m_values.begin());
  1512. node* pNode_end = pNode + m_values.size();
  1513. while (pNode != pNode_end)
  1514. {
  1515. if (pNode->state)
  1516. {
  1517. new_map.move_into(pNode);
  1518. if (new_map.m_num_valid == m_num_valid)
  1519. break;
  1520. }
  1521. pNode++;
  1522. }
  1523. new_map.m_grow_threshold = (new_hash_size + 1U) >> 1U;
  1524. m_values.clear_no_destruction();
  1525. m_hash_shift = 32;
  1526. swap(new_map);
  1527. }
  1528. inline uint32_t find_next(uint32_t index) const
  1529. {
  1530. index++;
  1531. if (index >= m_values.size())
  1532. return index;
  1533. const node* pNode = &get_node(index);
  1534. for (; ; )
  1535. {
  1536. if (pNode->state)
  1537. break;
  1538. if (++index >= m_values.size())
  1539. break;
  1540. pNode++;
  1541. }
  1542. return index;
  1543. }
  1544. inline uint32_t find_index(const Key& k) const
  1545. {
  1546. if (m_num_valid)
  1547. {
  1548. uint32_t index = hash_key(k);
  1549. const node* pNode = &get_node(index);
  1550. if (pNode->state)
  1551. {
  1552. if (m_equals(pNode->first, k))
  1553. return index;
  1554. const uint32_t orig_index = index;
  1555. for (; ; )
  1556. {
  1557. if (!index)
  1558. {
  1559. index = m_values.size() - 1;
  1560. pNode = &get_node(index);
  1561. }
  1562. else
  1563. {
  1564. index--;
  1565. pNode--;
  1566. }
  1567. if (index == orig_index)
  1568. break;
  1569. if (!pNode->state)
  1570. break;
  1571. if (m_equals(pNode->first, k))
  1572. return index;
  1573. }
  1574. }
  1575. }
  1576. return m_values.size();
  1577. }
  1578. inline bool insert_no_grow(insert_result& result, const Key& k, const Value& v = Value())
  1579. {
  1580. if (!m_values.size())
  1581. return false;
  1582. uint32_t index = hash_key(k);
  1583. node* pNode = &get_node(index);
  1584. if (pNode->state)
  1585. {
  1586. if (m_equals(pNode->first, k))
  1587. {
  1588. result.first = iterator(*this, index);
  1589. result.second = false;
  1590. return true;
  1591. }
  1592. const uint32_t orig_index = index;
  1593. for (; ; )
  1594. {
  1595. if (!index)
  1596. {
  1597. index = m_values.size() - 1;
  1598. pNode = &get_node(index);
  1599. }
  1600. else
  1601. {
  1602. index--;
  1603. pNode--;
  1604. }
  1605. if (orig_index == index)
  1606. return false;
  1607. if (!pNode->state)
  1608. break;
  1609. if (m_equals(pNode->first, k))
  1610. {
  1611. result.first = iterator(*this, index);
  1612. result.second = false;
  1613. return true;
  1614. }
  1615. }
  1616. }
  1617. if (m_num_valid >= m_grow_threshold)
  1618. return false;
  1619. construct_value_type(pNode, k, v);
  1620. pNode->state = cStateValid;
  1621. m_num_valid++;
  1622. assert(m_num_valid <= m_values.size());
  1623. result.first = iterator(*this, index);
  1624. result.second = true;
  1625. return true;
  1626. }
  1627. inline void move_into(node* pNode)
  1628. {
  1629. uint32_t index = hash_key(pNode->first);
  1630. node* pDst_node = &get_node(index);
  1631. if (pDst_node->state)
  1632. {
  1633. const uint32_t orig_index = index;
  1634. for (; ; )
  1635. {
  1636. if (!index)
  1637. {
  1638. index = m_values.size() - 1;
  1639. pDst_node = &get_node(index);
  1640. }
  1641. else
  1642. {
  1643. index--;
  1644. pDst_node--;
  1645. }
  1646. if (index == orig_index)
  1647. {
  1648. assert(false);
  1649. return;
  1650. }
  1651. if (!pDst_node->state)
  1652. break;
  1653. }
  1654. }
  1655. move_node(pDst_node, pNode, false);
  1656. m_num_valid++;
  1657. }
  1658. };
  1659. template<typename Key, typename Value, typename Hasher, typename Equals>
  1660. struct bitwise_movable< hash_map<Key, Value, Hasher, Equals> > { enum { cFlag = true }; };
  1661. #if BASISU_HASHMAP_TEST
  1662. extern void hash_map_test();
  1663. #endif
  1664. } // namespace basisu
  1665. namespace std
  1666. {
  1667. template<typename T>
  1668. inline void swap(basisu::vector<T>& a, basisu::vector<T>& b)
  1669. {
  1670. a.swap(b);
  1671. }
  1672. template<typename Key, typename Value, typename Hasher, typename Equals>
  1673. inline void swap(basisu::hash_map<Key, Value, Hasher, Equals>& a, basisu::hash_map<Key, Value, Hasher, Equals>& b)
  1674. {
  1675. a.swap(b);
  1676. }
  1677. } // namespace std