1#ifndef JEMALLOC_INTERNAL_BITMAP_H
2#define JEMALLOC_INTERNAL_BITMAP_H
3
4#include "jemalloc/internal/arena_types.h"
5#include "jemalloc/internal/bit_util.h"
6#include "jemalloc/internal/sc.h"
7
8typedef unsigned long bitmap_t;
9#define LG_SIZEOF_BITMAP	LG_SIZEOF_LONG
10
11/* Maximum bitmap bit count is 2^LG_BITMAP_MAXBITS. */
12#if LG_SLAB_MAXREGS > LG_CEIL(SC_NSIZES)
13/* Maximum bitmap bit count is determined by maximum regions per slab. */
14#  define LG_BITMAP_MAXBITS	LG_SLAB_MAXREGS
15#else
16/* Maximum bitmap bit count is determined by number of extent size classes. */
17#  define LG_BITMAP_MAXBITS	LG_CEIL(SC_NSIZES)
18#endif
19#define BITMAP_MAXBITS		(ZU(1) << LG_BITMAP_MAXBITS)
20
21/* Number of bits per group. */
22#define LG_BITMAP_GROUP_NBITS		(LG_SIZEOF_BITMAP + 3)
23#define BITMAP_GROUP_NBITS		(1U << LG_BITMAP_GROUP_NBITS)
24#define BITMAP_GROUP_NBITS_MASK		(BITMAP_GROUP_NBITS-1)
25
26/*
27 * Do some analysis on how big the bitmap is before we use a tree.  For a brute
28 * force linear search, if we would have to call ffs_lu() more than 2^3 times,
29 * use a tree instead.
30 */
31#if LG_BITMAP_MAXBITS - LG_BITMAP_GROUP_NBITS > 3
32#  define BITMAP_USE_TREE
33#endif
34
35/* Number of groups required to store a given number of bits. */
36#define BITMAP_BITS2GROUPS(nbits)					\
37    (((nbits) + BITMAP_GROUP_NBITS_MASK) >> LG_BITMAP_GROUP_NBITS)
38
39/*
40 * Number of groups required at a particular level for a given number of bits.
41 */
42#define BITMAP_GROUPS_L0(nbits)						\
43    BITMAP_BITS2GROUPS(nbits)
44#define BITMAP_GROUPS_L1(nbits)						\
45    BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(nbits))
46#define BITMAP_GROUPS_L2(nbits)						\
47    BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits))))
48#define BITMAP_GROUPS_L3(nbits)						\
49    BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(		\
50	BITMAP_BITS2GROUPS((nbits)))))
51#define BITMAP_GROUPS_L4(nbits)						\
52    BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS(		\
53	BITMAP_BITS2GROUPS(BITMAP_BITS2GROUPS((nbits))))))
54
55/*
56 * Assuming the number of levels, number of groups required for a given number
57 * of bits.
58 */
59#define BITMAP_GROUPS_1_LEVEL(nbits)					\
60    BITMAP_GROUPS_L0(nbits)
61#define BITMAP_GROUPS_2_LEVEL(nbits)					\
62    (BITMAP_GROUPS_1_LEVEL(nbits) + BITMAP_GROUPS_L1(nbits))
63#define BITMAP_GROUPS_3_LEVEL(nbits)					\
64    (BITMAP_GROUPS_2_LEVEL(nbits) + BITMAP_GROUPS_L2(nbits))
65#define BITMAP_GROUPS_4_LEVEL(nbits)					\
66    (BITMAP_GROUPS_3_LEVEL(nbits) + BITMAP_GROUPS_L3(nbits))
67#define BITMAP_GROUPS_5_LEVEL(nbits)					\
68    (BITMAP_GROUPS_4_LEVEL(nbits) + BITMAP_GROUPS_L4(nbits))
69
70/*
71 * Maximum number of groups required to support LG_BITMAP_MAXBITS.
72 */
73#ifdef BITMAP_USE_TREE
74
75#if LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS
76#  define BITMAP_GROUPS(nbits)	BITMAP_GROUPS_1_LEVEL(nbits)
77#  define BITMAP_GROUPS_MAX	BITMAP_GROUPS_1_LEVEL(BITMAP_MAXBITS)
78#elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 2
79#  define BITMAP_GROUPS(nbits)	BITMAP_GROUPS_2_LEVEL(nbits)
80#  define BITMAP_GROUPS_MAX	BITMAP_GROUPS_2_LEVEL(BITMAP_MAXBITS)
81#elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 3
82#  define BITMAP_GROUPS(nbits)	BITMAP_GROUPS_3_LEVEL(nbits)
83#  define BITMAP_GROUPS_MAX	BITMAP_GROUPS_3_LEVEL(BITMAP_MAXBITS)
84#elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 4
85#  define BITMAP_GROUPS(nbits)	BITMAP_GROUPS_4_LEVEL(nbits)
86#  define BITMAP_GROUPS_MAX	BITMAP_GROUPS_4_LEVEL(BITMAP_MAXBITS)
87#elif LG_BITMAP_MAXBITS <= LG_BITMAP_GROUP_NBITS * 5
88#  define BITMAP_GROUPS(nbits)	BITMAP_GROUPS_5_LEVEL(nbits)
89#  define BITMAP_GROUPS_MAX	BITMAP_GROUPS_5_LEVEL(BITMAP_MAXBITS)
90#else
91#  error "Unsupported bitmap size"
92#endif
93
94/*
95 * Maximum number of levels possible.  This could be statically computed based
96 * on LG_BITMAP_MAXBITS:
97 *
98 * #define BITMAP_MAX_LEVELS \
99 *     (LG_BITMAP_MAXBITS / LG_SIZEOF_BITMAP) \
100 *     + !!(LG_BITMAP_MAXBITS % LG_SIZEOF_BITMAP)
101 *
102 * However, that would not allow the generic BITMAP_INFO_INITIALIZER() macro, so
103 * instead hardcode BITMAP_MAX_LEVELS to the largest number supported by the
104 * various cascading macros.  The only additional cost this incurs is some
105 * unused trailing entries in bitmap_info_t structures; the bitmaps themselves
106 * are not impacted.
107 */
108#define BITMAP_MAX_LEVELS	5
109
110#define BITMAP_INFO_INITIALIZER(nbits) {				\
111	/* nbits. */							\
112	nbits,								\
113	/* nlevels. */							\
114	(BITMAP_GROUPS_L0(nbits) > BITMAP_GROUPS_L1(nbits)) +		\
115	    (BITMAP_GROUPS_L1(nbits) > BITMAP_GROUPS_L2(nbits)) +	\
116	    (BITMAP_GROUPS_L2(nbits) > BITMAP_GROUPS_L3(nbits)) +	\
117	    (BITMAP_GROUPS_L3(nbits) > BITMAP_GROUPS_L4(nbits)) + 1,	\
118	/* levels. */							\
119	{								\
120		{0},							\
121		{BITMAP_GROUPS_L0(nbits)},				\
122		{BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)},	\
123		{BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits) +	\
124		    BITMAP_GROUPS_L0(nbits)},				\
125		{BITMAP_GROUPS_L3(nbits) + BITMAP_GROUPS_L2(nbits) +	\
126		    BITMAP_GROUPS_L1(nbits) + BITMAP_GROUPS_L0(nbits)},	\
127		{BITMAP_GROUPS_L4(nbits) + BITMAP_GROUPS_L3(nbits) +	\
128		     BITMAP_GROUPS_L2(nbits) + BITMAP_GROUPS_L1(nbits)	\
129		     + BITMAP_GROUPS_L0(nbits)}				\
130	}								\
131}
132
133#else /* BITMAP_USE_TREE */
134
135#define BITMAP_GROUPS(nbits)	BITMAP_BITS2GROUPS(nbits)
136#define BITMAP_GROUPS_MAX	BITMAP_BITS2GROUPS(BITMAP_MAXBITS)
137
138#define BITMAP_INFO_INITIALIZER(nbits) {				\
139	/* nbits. */							\
140	nbits,								\
141	/* ngroups. */							\
142	BITMAP_BITS2GROUPS(nbits)					\
143}
144
145#endif /* BITMAP_USE_TREE */
146
147typedef struct bitmap_level_s {
148	/* Offset of this level's groups within the array of groups. */
149	size_t group_offset;
150} bitmap_level_t;
151
152typedef struct bitmap_info_s {
153	/* Logical number of bits in bitmap (stored at bottom level). */
154	size_t nbits;
155
156#ifdef BITMAP_USE_TREE
157	/* Number of levels necessary for nbits. */
158	unsigned nlevels;
159
160	/*
161	 * Only the first (nlevels+1) elements are used, and levels are ordered
162	 * bottom to top (e.g. the bottom level is stored in levels[0]).
163	 */
164	bitmap_level_t levels[BITMAP_MAX_LEVELS+1];
165#else /* BITMAP_USE_TREE */
166	/* Number of groups necessary for nbits. */
167	size_t ngroups;
168#endif /* BITMAP_USE_TREE */
169} bitmap_info_t;
170
171void bitmap_info_init(bitmap_info_t *binfo, size_t nbits);
172void bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill);
173size_t bitmap_size(const bitmap_info_t *binfo);
174
175static inline bool
176bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo) {
177#ifdef BITMAP_USE_TREE
178	size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1;
179	bitmap_t rg = bitmap[rgoff];
180	/* The bitmap is full iff the root group is 0. */
181	return (rg == 0);
182#else
183	size_t i;
184
185	for (i = 0; i < binfo->ngroups; i++) {
186		if (bitmap[i] != 0) {
187			return false;
188		}
189	}
190	return true;
191#endif
192}
193
194static inline bool
195bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
196	size_t goff;
197	bitmap_t g;
198
199	assert(bit < binfo->nbits);
200	goff = bit >> LG_BITMAP_GROUP_NBITS;
201	g = bitmap[goff];
202	return !(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
203}
204
205static inline void
206bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
207	size_t goff;
208	bitmap_t *gp;
209	bitmap_t g;
210
211	assert(bit < binfo->nbits);
212	assert(!bitmap_get(bitmap, binfo, bit));
213	goff = bit >> LG_BITMAP_GROUP_NBITS;
214	gp = &bitmap[goff];
215	g = *gp;
216	assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
217	g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
218	*gp = g;
219	assert(bitmap_get(bitmap, binfo, bit));
220#ifdef BITMAP_USE_TREE
221	/* Propagate group state transitions up the tree. */
222	if (g == 0) {
223		unsigned i;
224		for (i = 1; i < binfo->nlevels; i++) {
225			bit = goff;
226			goff = bit >> LG_BITMAP_GROUP_NBITS;
227			gp = &bitmap[binfo->levels[i].group_offset + goff];
228			g = *gp;
229			assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
230			g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
231			*gp = g;
232			if (g != 0) {
233				break;
234			}
235		}
236	}
237#endif
238}
239
240/* ffu: find first unset >= bit. */
241static inline size_t
242bitmap_ffu(const bitmap_t *bitmap, const bitmap_info_t *binfo, size_t min_bit) {
243	assert(min_bit < binfo->nbits);
244
245#ifdef BITMAP_USE_TREE
246	size_t bit = 0;
247	for (unsigned level = binfo->nlevels; level--;) {
248		size_t lg_bits_per_group = (LG_BITMAP_GROUP_NBITS * (level +
249		    1));
250		bitmap_t group = bitmap[binfo->levels[level].group_offset + (bit
251		    >> lg_bits_per_group)];
252		unsigned group_nmask = (unsigned)(((min_bit > bit) ? (min_bit -
253		    bit) : 0) >> (lg_bits_per_group - LG_BITMAP_GROUP_NBITS));
254		assert(group_nmask <= BITMAP_GROUP_NBITS);
255		bitmap_t group_mask = ~((1LU << group_nmask) - 1);
256		bitmap_t group_masked = group & group_mask;
257		if (group_masked == 0LU) {
258			if (group == 0LU) {
259				return binfo->nbits;
260			}
261			/*
262			 * min_bit was preceded by one or more unset bits in
263			 * this group, but there are no other unset bits in this
264			 * group.  Try again starting at the first bit of the
265			 * next sibling.  This will recurse at most once per
266			 * non-root level.
267			 */
268			size_t sib_base = bit + (ZU(1) << lg_bits_per_group);
269			assert(sib_base > min_bit);
270			assert(sib_base > bit);
271			if (sib_base >= binfo->nbits) {
272				return binfo->nbits;
273			}
274			return bitmap_ffu(bitmap, binfo, sib_base);
275		}
276		bit += ((size_t)(ffs_lu(group_masked) - 1)) <<
277		    (lg_bits_per_group - LG_BITMAP_GROUP_NBITS);
278	}
279	assert(bit >= min_bit);
280	assert(bit < binfo->nbits);
281	return bit;
282#else
283	size_t i = min_bit >> LG_BITMAP_GROUP_NBITS;
284	bitmap_t g = bitmap[i] & ~((1LU << (min_bit & BITMAP_GROUP_NBITS_MASK))
285	    - 1);
286	size_t bit;
287	do {
288		bit = ffs_lu(g);
289		if (bit != 0) {
290			return (i << LG_BITMAP_GROUP_NBITS) + (bit - 1);
291		}
292		i++;
293		g = bitmap[i];
294	} while (i < binfo->ngroups);
295	return binfo->nbits;
296#endif
297}
298
299/* sfu: set first unset. */
300static inline size_t
301bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo) {
302	size_t bit;
303	bitmap_t g;
304	unsigned i;
305
306	assert(!bitmap_full(bitmap, binfo));
307
308#ifdef BITMAP_USE_TREE
309	i = binfo->nlevels - 1;
310	g = bitmap[binfo->levels[i].group_offset];
311	bit = ffs_lu(g) - 1;
312	while (i > 0) {
313		i--;
314		g = bitmap[binfo->levels[i].group_offset + bit];
315		bit = (bit << LG_BITMAP_GROUP_NBITS) + (ffs_lu(g) - 1);
316	}
317#else
318	i = 0;
319	g = bitmap[0];
320	while ((bit = ffs_lu(g)) == 0) {
321		i++;
322		g = bitmap[i];
323	}
324	bit = (i << LG_BITMAP_GROUP_NBITS) + (bit - 1);
325#endif
326	bitmap_set(bitmap, binfo, bit);
327	return bit;
328}
329
330static inline void
331bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
332	size_t goff;
333	bitmap_t *gp;
334	bitmap_t g;
335	UNUSED bool propagate;
336
337	assert(bit < binfo->nbits);
338	assert(bitmap_get(bitmap, binfo, bit));
339	goff = bit >> LG_BITMAP_GROUP_NBITS;
340	gp = &bitmap[goff];
341	g = *gp;
342	propagate = (g == 0);
343	assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) == 0);
344	g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
345	*gp = g;
346	assert(!bitmap_get(bitmap, binfo, bit));
347#ifdef BITMAP_USE_TREE
348	/* Propagate group state transitions up the tree. */
349	if (propagate) {
350		unsigned i;
351		for (i = 1; i < binfo->nlevels; i++) {
352			bit = goff;
353			goff = bit >> LG_BITMAP_GROUP_NBITS;
354			gp = &bitmap[binfo->levels[i].group_offset + goff];
355			g = *gp;
356			propagate = (g == 0);
357			assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)))
358			    == 0);
359			g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
360			*gp = g;
361			if (!propagate) {
362				break;
363			}
364		}
365	}
366#endif /* BITMAP_USE_TREE */
367}
368
369#endif /* JEMALLOC_INTERNAL_BITMAP_H */
370