bitmap.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. #ifndef JEMALLOC_INTERNAL_BITMAP_H
  2. #define JEMALLOC_INTERNAL_BITMAP_H
  3. #include "jemalloc/internal/bit_util.h"
  4. #include "jemalloc/internal/sc.h"
  5. typedef unsigned long bitmap_t;
  6. #define LG_SIZEOF_BITMAP LG_SIZEOF_LONG
  7. /* Maximum bitmap bit count is 2^LG_BITMAP_MAXBITS. */
  8. #if SC_LG_SLAB_MAXREGS > LG_CEIL(SC_NSIZES)
  9. /* Maximum bitmap bit count is determined by maximum regions per slab. */
  10. # define LG_BITMAP_MAXBITS SC_LG_SLAB_MAXREGS
  11. #else
  12. /* Maximum bitmap bit count is determined by number of extent size classes. */
  13. # define LG_BITMAP_MAXBITS LG_CEIL(SC_NSIZES)
  14. #endif
  15. #define BITMAP_MAXBITS (ZU(1) << LG_BITMAP_MAXBITS)
  16. /* Number of bits per group. */
  17. #define LG_BITMAP_GROUP_NBITS (LG_SIZEOF_BITMAP + 3)
  18. #define BITMAP_GROUP_NBITS (1U << LG_BITMAP_GROUP_NBITS)
  19. #define BITMAP_GROUP_NBITS_MASK (BITMAP_GROUP_NBITS-1)
  20. /*
  21. * Do some analysis on how big the bitmap is before we use a tree. For a brute
  22. * force linear search, if we would have to call ffs_lu() more than 2^3 times,
  23. * use a tree instead.
  24. */
  25. #if LG_BITMAP_MAXBITS - LG_BITMAP_GROUP_NBITS > 3
  26. # define BITMAP_USE_TREE
  27. #endif
  28. /* Number of groups required to store a given number of bits. */
  29. #define BITMAP_BITS2GROUPS(nbits) \
  30. (((nbits) + BITMAP_GROUP_NBITS_MASK) >> LG_BITMAP_GROUP_NBITS)
  31. /*
  32. * Number of groups required at a particular level for a given number of bits.
  33. */
  34. #define BITMAP_GROUPS_L0(nbits) \
  35. BITMAP_BITS2GROUPS(nbits)
  36. #define BITMAP_GROUPS_L1(nbits) \
  37. BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(nbits))
  38. #define BITMAP_GROUPS_L2(nbits) \
  39. BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits))))
  40. #define BITMAP_GROUPS_L3(nbits) \
  41. BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS( \
  42. BITMAP_BITS2GROUPS((nbits)))))
  43. #define BITMAP_GROUPS_L4(nbits) \
  44. BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS( \
  45. BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits))))))
  46. /*
  47. * Assuming the number of levels, number of groups required for a given number
  48. * of bits.
  49. */
  50. #define BITMAP_GROUPS_1_LEVEL(nbits) \
  51. BITMAP_GROUPS_L0(nbits)
  52. #define BITMAP_GROUPS_2_LEVEL(nbits) \
  53. (BITMAP_GROUPS_1_LEVEL(nbits) + BITMAP_GROUPS_L1(nbits))
  54. #define BITMAP_GROUPS_3_LEVEL(nbits) \
  55. (BITMAP_GROUPS_2_LEVEL(nbits) + BITMAP_GROUPS_L2(nbits))
  56. #define BITMAP_GROUPS_4_LEVEL(nbits) \
  57. (BITMAP_GROUPS_3_LEVEL(nbits) + BITMAP_GROUPS_L3(nbits))
  58. #define BITMAP_GROUPS_5_LEVEL(nbits) \
  59. (BITMAP_GROUPS_4_LEVEL(nbits) + BITMAP_GROUPS_L4(nbits))
  60. /*
  61. * Maximum number of groups required to support LG_BITMAP_MAXBITS.
  62. */
  63. #ifdef BITMAP_USE_TREE
  64. #if LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS
  65. # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_1_LEVEL(nbits)
  66. # define BITMAP_GROUPS_MAX BITMAP_GROUPS_1_LEVEL(BITMAP_MAXBITS)
  67. #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 2
  68. # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_2_LEVEL(nbits)
  69. # define BITMAP_GROUPS_MAX BITMAP_GROUPS_2_LEVEL(BITMAP_MAXBITS)
  70. #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 3
  71. # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_3_LEVEL(nbits)
  72. # define BITMAP_GROUPS_MAX BITMAP_GROUPS_3_LEVEL(BITMAP_MAXBITS)
  73. #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 4
  74. # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_4_LEVEL(nbits)
  75. # define BITMAP_GROUPS_MAX BITMAP_GROUPS_4_LEVEL(BITMAP_MAXBITS)
  76. #elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 5
  77. # define BITMAP_GROUPS(nbits) BITMAP_GROUPS_5_LEVEL(nbits)
  78. # define BITMAP_GROUPS_MAX BITMAP_GROUPS_5_LEVEL(BITMAP_MAXBITS)
  79. #else
  80. # error "Unsupported bitmap size"
  81. #endif
  82. /*
  83. * Maximum number of levels possible. This could be statically computed based
  84. * on LG_BITMAP_MAXBITS:
  85. *
  86. * #define BITMAP_MAX_LEVELS \
  87. * (LG_BITMAP_MAXBITS / LG_SIZEOF_BITMAP) \
  88. * + !!(LG_BITMAP_MAXBITS % LG_SIZEOF_BITMAP)
  89. *
  90. * However, that would not allow the generic BITMAP_INFO_INITIALIZER() macro, so
  91. * instead hardcode BITMAP_MAX_LEVELS to the largest number supported by the
  92. * various cascading macros. The only additional cost this incurs is some
  93. * unused trailing entries in bitmap_info_t structures; the bitmaps themselves
  94. * are not impacted.
  95. */
  96. #define BITMAP_MAX_LEVELS 5
  97. #define BITMAP_INFO_INITIALIZER(nbits) { \
  98. /* nbits. */ \
  99. nbits, \
  100. /* nlevels. */ \
  101. (BITMAP_GROUPS_L0(nbits) > BITMAP_GROUPS_L1(nbits)) + \
  102. (BITMAP_GROUPS_L1(nbits) > BITMAP_GROUPS_L2(nbits)) + \
  103. (BITMAP_GROUPS_L2(nbits) > BITMAP_GROUPS_L3(nbits)) + \
  104. (BITMAP_GROUPS_L3(nbits) > BITMAP_GROUPS_L4(nbits)) + 1, \
  105. /* levels. */ \
  106. { \
  107. {0}, \
  108. {BITMAP_GROUPS_L0(nbits)}, \
  109. {BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)}, \
  110. {BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits) + \
  111. BITMAP_GROUPS_L0(nbits)}, \
  112. {BITMAP_GROUPS_L3(nbits) + BITMAP_GROUPS_L2(nbits) + \
  113. BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)}, \
  114. {BITMAP_GROUPS_L4(nbits) + BITMAP_GROUPS_L3(nbits) + \
  115. BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits) \
  116. + BITMAP_GROUPS_L0(nbits)} \
  117. } \
  118. }
  119. #else /* BITMAP_USE_TREE */
  120. #define BITMAP_GROUPS(nbits) BITMAP_BITS2GROUPS(nbits)
  121. #define BITMAP_GROUPS_MAX BITMAP_BITS2GROUPS(BITMAP_MAXBITS)
  122. #define BITMAP_INFO_INITIALIZER(nbits) { \
  123. /* nbits. */ \
  124. nbits, \
  125. /* ngroups. */ \
  126. BITMAP_BITS2GROUPS(nbits) \
  127. }
  128. #endif /* BITMAP_USE_TREE */
  129. typedef struct bitmap_level_s {
  130. /* Offset of this level's groups within the array of groups. */
  131. size_t group_offset;
  132. } bitmap_level_t;
  133. typedef struct bitmap_info_s {
  134. /* Logical number of bits in bitmap (stored at bottom level). */
  135. size_t nbits;
  136. #ifdef BITMAP_USE_TREE
  137. /* Number of levels necessary for nbits. */
  138. unsigned nlevels;
  139. /*
  140. * Only the first (nlevels+1) elements are used, and levels are ordered
  141. * bottom to top (e.g. the bottom level is stored in levels[0]).
  142. */
  143. bitmap_level_t levels[BITMAP_MAX_LEVELS+1];
  144. #else /* BITMAP_USE_TREE */
  145. /* Number of groups necessary for nbits. */
  146. size_t ngroups;
  147. #endif /* BITMAP_USE_TREE */
  148. } bitmap_info_t;
  149. void bitmap_info_init(bitmap_info_t *binfo, size_t nbits);
  150. void bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill);
  151. size_t bitmap_size(const bitmap_info_t *binfo);
  152. static inline bool
  153. bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo) {
  154. #ifdef BITMAP_USE_TREE
  155. size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1;
  156. bitmap_t rg = bitmap[rgoff];
  157. /* The bitmap is full iff the root group is 0. */
  158. return (rg == 0);
  159. #else
  160. size_t i;
  161. for (i = 0; i < binfo->ngroups; i++) {
  162. if (bitmap[i] != 0) {
  163. return false;
  164. }
  165. }
  166. return true;
  167. #endif
  168. }
  169. static inline bool
  170. bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
  171. size_t goff;
  172. bitmap_t g;
  173. assert(bit < binfo->nbits);
  174. goff = bit >> LG_BITMAP_GROUP_NBITS;
  175. g = bitmap[goff];
  176. return !(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
  177. }
  178. static inline void
  179. bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
  180. size_t goff;
  181. bitmap_t *gp;
  182. bitmap_t g;
  183. assert(bit < binfo->nbits);
  184. assert(!bitmap_get(bitmap, binfo, bit));
  185. goff = bit >> LG_BITMAP_GROUP_NBITS;
  186. gp = &bitmap[goff];
  187. g = *gp;
  188. assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
  189. g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
  190. *gp = g;
  191. assert(bitmap_get(bitmap, binfo, bit));
  192. #ifdef BITMAP_USE_TREE
  193. /* Propagate group state transitions up the tree. */
  194. if (g == 0) {
  195. unsigned i;
  196. for (i = 1; i < binfo->nlevels; i++) {
  197. bit = goff;
  198. goff = bit >> LG_BITMAP_GROUP_NBITS;
  199. gp = &bitmap[binfo->levels[i].group_offset + goff];
  200. g = *gp;
  201. assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
  202. g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
  203. *gp = g;
  204. if (g != 0) {
  205. break;
  206. }
  207. }
  208. }
  209. #endif
  210. }
  211. /* ffu: find first unset >= bit. */
  212. static inline size_t
  213. bitmap_ffu(const bitmap_t *bitmap, const bitmap_info_t *binfo, size_t min_bit) {
  214. assert(min_bit < binfo->nbits);
  215. #ifdef BITMAP_USE_TREE
  216. size_t bit = 0;
  217. for (unsigned level = binfo->nlevels; level--;) {
  218. size_t lg_bits_per_group = (LG_BITMAP_GROUP_NBITS * (level +
  219. 1));
  220. bitmap_t group = bitmap[binfo->levels[level].group_offset + (bit
  221. >> lg_bits_per_group)];
  222. unsigned group_nmask = (unsigned)(((min_bit > bit) ? (min_bit -
  223. bit) : 0) >> (lg_bits_per_group - LG_BITMAP_GROUP_NBITS));
  224. assert(group_nmask <= BITMAP_GROUP_NBITS);
  225. bitmap_t group_mask = ~((1LU << group_nmask) - 1);
  226. bitmap_t group_masked = group & group_mask;
  227. if (group_masked == 0LU) {
  228. if (group == 0LU) {
  229. return binfo->nbits;
  230. }
  231. /*
  232. * min_bit was preceded by one or more unset bits in
  233. * this group, but there are no other unset bits in this
  234. * group. Try again starting at the first bit of the
  235. * next sibling. This will recurse at most once per
  236. * non-root level.
  237. */
  238. size_t sib_base = bit + (ZU(1) << lg_bits_per_group);
  239. assert(sib_base > min_bit);
  240. assert(sib_base > bit);
  241. if (sib_base >= binfo->nbits) {
  242. return binfo->nbits;
  243. }
  244. return bitmap_ffu(bitmap, binfo, sib_base);
  245. }
  246. bit += ((size_t)ffs_lu(group_masked)) <<
  247. (lg_bits_per_group - LG_BITMAP_GROUP_NBITS);
  248. }
  249. assert(bit >= min_bit);
  250. assert(bit < binfo->nbits);
  251. return bit;
  252. #else
  253. size_t i = min_bit >> LG_BITMAP_GROUP_NBITS;
  254. bitmap_t g = bitmap[i] & ~((1LU << (min_bit & BITMAP_GROUP_NBITS_MASK))
  255. - 1);
  256. size_t bit;
  257. do {
  258. if (g != 0) {
  259. bit = ffs_lu(g);
  260. return (i << LG_BITMAP_GROUP_NBITS) + bit;
  261. }
  262. i++;
  263. g = bitmap[i];
  264. } while (i < binfo->ngroups);
  265. return binfo->nbits;
  266. #endif
  267. }
  268. /* sfu: set first unset. */
  269. static inline size_t
  270. bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo) {
  271. size_t bit;
  272. bitmap_t g;
  273. unsigned i;
  274. assert(!bitmap_full(bitmap, binfo));
  275. #ifdef BITMAP_USE_TREE
  276. i = binfo->nlevels - 1;
  277. g = bitmap[binfo->levels[i].group_offset];
  278. bit = ffs_lu(g);
  279. while (i > 0) {
  280. i--;
  281. g = bitmap[binfo->levels[i].group_offset + bit];
  282. bit = (bit << LG_BITMAP_GROUP_NBITS) + ffs_lu(g);
  283. }
  284. #else
  285. i = 0;
  286. g = bitmap[0];
  287. while (g == 0) {
  288. i++;
  289. g = bitmap[i];
  290. }
  291. bit = (i << LG_BITMAP_GROUP_NBITS) + ffs_lu(g);
  292. #endif
  293. bitmap_set(bitmap, binfo, bit);
  294. return bit;
  295. }
  296. static inline void
  297. bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
  298. size_t goff;
  299. bitmap_t *gp;
  300. bitmap_t g;
  301. UNUSED bool propagate;
  302. assert(bit < binfo->nbits);
  303. assert(bitmap_get(bitmap, binfo, bit));
  304. goff = bit >> LG_BITMAP_GROUP_NBITS;
  305. gp = &bitmap[goff];
  306. g = *gp;
  307. propagate = (g == 0);
  308. assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) == 0);
  309. g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
  310. *gp = g;
  311. assert(!bitmap_get(bitmap, binfo, bit));
  312. #ifdef BITMAP_USE_TREE
  313. /* Propagate group state transitions up the tree. */
  314. if (propagate) {
  315. unsigned i;
  316. for (i = 1; i < binfo->nlevels; i++) {
  317. bit = goff;
  318. goff = bit >> LG_BITMAP_GROUP_NBITS;
  319. gp = &bitmap[binfo->levels[i].group_offset + goff];
  320. g = *gp;
  321. propagate = (g == 0);
  322. assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)))
  323. == 0);
  324. g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
  325. *gp = g;
  326. if (!propagate) {
  327. break;
  328. }
  329. }
  330. }
  331. #endif /* BITMAP_USE_TREE */
  332. }
  333. #endif /* JEMALLOC_INTERNAL_BITMAP_H */