bitops.h revision 282513
1/*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013, 2014 Mellanox Technologies, Ltd. 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 */ 29#ifndef _LINUX_BITOPS_H_ 30#define _LINUX_BITOPS_H_ 31 32#ifdef __LP64__ 33#define BITS_PER_LONG 64 34#else 35#define BITS_PER_LONG 32 36#endif 37#define BIT_MASK(n) (~0UL >> (BITS_PER_LONG - (n))) 38#define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG) 39#define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 40 41#define BITS_PER_BYTE 8 42 43static inline int 44__ffs(int mask) 45{ 46 return (ffs(mask) - 1); 47} 48 49static inline int 50__fls(int mask) 51{ 52 return (fls(mask) - 1); 53} 54 55static inline int 56__ffsl(long mask) 57{ 58 return (ffsl(mask) - 1); 59} 60 61static inline int 62__flsl(long mask) 63{ 64 return (flsl(mask) - 1); 65} 66 67 68#define ffz(mask) __ffs(~(mask)) 69 70static inline int get_count_order(unsigned int count) 71{ 72 int order; 73 74 order = fls(count) - 1; 75 if (count & (count - 1)) 76 order++; 77 return order; 78} 79 80static inline unsigned long 81find_first_bit(unsigned long *addr, unsigned long size) 82{ 83 long mask; 84 int bit; 85 86 for (bit = 0; size >= BITS_PER_LONG; 87 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 88 if (*addr == 0) 89 continue; 90 return (bit + __ffsl(*addr)); 91 } 92 if (size) { 93 mask = (*addr) & BIT_MASK(size); 94 if (mask) 95 bit += __ffsl(mask); 96 else 97 bit += size; 98 } 99 return (bit); 100} 101 102static inline unsigned long 103find_first_zero_bit(unsigned long *addr, unsigned long size) 104{ 105 long mask; 106 int bit; 107 108 for (bit = 0; size >= BITS_PER_LONG; 109 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 110 if (~(*addr) == 0) 111 continue; 112 return (bit + __ffsl(~(*addr))); 113 } 114 if (size) { 115 mask = ~(*addr) & BIT_MASK(size); 116 if (mask) 117 bit += __ffsl(mask); 118 else 119 bit += size; 120 } 121 return (bit); 122} 123 124static inline unsigned long 125find_last_bit(unsigned long *addr, unsigned long size) 126{ 127 long mask; 128 int offs; 129 int bit; 130 int pos; 131 132 pos = size / BITS_PER_LONG; 133 offs = size % BITS_PER_LONG; 134 bit = BITS_PER_LONG * pos; 135 addr += pos; 136 if (offs) { 137 mask = (*addr) & BIT_MASK(offs); 138 if (mask) 139 return (bit + __flsl(mask)); 140 } 141 while (--pos) { 142 addr--; 143 bit -= BITS_PER_LONG; 144 if (*addr) 145 return (bit + __flsl(mask)); 146 } 147 return (size); 148} 149 150static inline unsigned long 151find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset) 152{ 153 long mask; 154 int offs; 155 int bit; 156 int pos; 157 158 if (offset >= size) 159 return (size); 160 pos = offset / BITS_PER_LONG; 161 offs = offset % BITS_PER_LONG; 162 bit = BITS_PER_LONG * pos; 163 addr += pos; 164 if (offs) { 165 mask = (*addr) & ~BIT_MASK(offs); 166 if (mask) 167 return (bit + __ffsl(mask)); 168 bit += BITS_PER_LONG; 169 addr++; 170 } 171 for (size -= bit; size >= BITS_PER_LONG; 172 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 173 if (*addr == 0) 174 continue; 175 return (bit + __ffsl(*addr)); 176 } 177 if (size) { 178 mask = (*addr) & BIT_MASK(size); 179 if (mask) 180 bit += __ffsl(mask); 181 else 182 bit += size; 183 } 184 return (bit); 185} 186 187static inline unsigned long 188find_next_zero_bit(unsigned long *addr, unsigned long size, 189 unsigned long offset) 190{ 191 long mask; 192 int offs; 193 int bit; 194 int pos; 195 196 if (offset >= size) 197 return (size); 198 pos = offset / BITS_PER_LONG; 199 offs = offset % BITS_PER_LONG; 200 bit = BITS_PER_LONG * pos; 201 addr += pos; 202 if (offs) { 203 mask = ~(*addr) & ~BIT_MASK(offs); 204 if (mask) 205 return (bit + __ffsl(mask)); 206 bit += BITS_PER_LONG; 207 addr++; 208 } 209 for (size -= bit; size >= BITS_PER_LONG; 210 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 211 if (~(*addr) == 0) 212 continue; 213 return (bit + __ffsl(~(*addr))); 214 } 215 if (size) { 216 mask = ~(*addr) & BIT_MASK(size); 217 if (mask) 218 bit += __ffsl(mask); 219 else 220 bit += size; 221 } 222 return (bit); 223} 224 225static inline void 226bitmap_zero(unsigned long *addr, int size) 227{ 228 int len; 229 230 len = BITS_TO_LONGS(size) * sizeof(long); 231 memset(addr, 0, len); 232} 233 234static inline void 235bitmap_fill(unsigned long *addr, int size) 236{ 237 int tail; 238 int len; 239 240 len = (size / BITS_PER_LONG) * sizeof(long); 241 memset(addr, 0xff, len); 242 tail = size & (BITS_PER_LONG - 1); 243 if (tail) 244 addr[size / BITS_PER_LONG] = BIT_MASK(tail); 245} 246 247static inline int 248bitmap_full(unsigned long *addr, int size) 249{ 250 long mask; 251 int tail; 252 int len; 253 int i; 254 255 len = size / BITS_PER_LONG; 256 for (i = 0; i < len; i++) 257 if (addr[i] != ~0UL) 258 return (0); 259 tail = size & (BITS_PER_LONG - 1); 260 if (tail) { 261 mask = BIT_MASK(tail); 262 if ((addr[i] & mask) != mask) 263 return (0); 264 } 265 return (1); 266} 267 268static inline int 269bitmap_empty(unsigned long *addr, int size) 270{ 271 long mask; 272 int tail; 273 int len; 274 int i; 275 276 len = size / BITS_PER_LONG; 277 for (i = 0; i < len; i++) 278 if (addr[i] != 0) 279 return (0); 280 tail = size & (BITS_PER_LONG - 1); 281 if (tail) { 282 mask = BIT_MASK(tail); 283 if ((addr[i] & mask) != 0) 284 return (0); 285 } 286 return (1); 287} 288 289#define NBLONG (NBBY * sizeof(long)) 290 291#define __set_bit(i, a) \ 292 atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1UL << ((i) % NBLONG)) 293 294#define set_bit(i, a) \ 295 atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1UL << ((i) % NBLONG)) 296 297#define __clear_bit(i, a) \ 298 atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1UL << ((i) % NBLONG)) 299 300#define clear_bit(i, a) \ 301 atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1UL << ((i) % NBLONG)) 302 303#define test_bit(i, a) \ 304 !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) & \ 305 (1UL << ((i) % NBLONG))) 306 307static inline long 308test_and_clear_bit(long bit, long *var) 309{ 310 long val; 311 312 var += bit / (sizeof(long) * NBBY); 313 bit %= sizeof(long) * NBBY; 314 bit = (1UL << bit); 315 do { 316 val = *(volatile long *)var; 317 } while (atomic_cmpset_long(var, val, val & ~bit) == 0); 318 319 return !!(val & bit); 320} 321 322static inline long 323test_and_set_bit(long bit, long *var) 324{ 325 long val; 326 327 var += bit / (sizeof(long) * NBBY); 328 bit %= sizeof(long) * NBBY; 329 bit = (1UL << bit); 330 do { 331 val = *(volatile long *)var; 332 } while (atomic_cmpset_long(var, val, val | bit) == 0); 333 334 return !!(val & bit); 335} 336 337 338#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG)) 339#define BITMAP_LAST_WORD_MASK(nbits) \ 340( \ 341 ((nbits) % BITS_PER_LONG) ? \ 342 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL \ 343) 344 345 346static inline void 347bitmap_set(unsigned long *map, int start, int nr) 348{ 349 unsigned long *p = map + BIT_WORD(start); 350 const int size = start + nr; 351 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); 352 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); 353 354 while (nr - bits_to_set >= 0) { 355 *p |= mask_to_set; 356 nr -= bits_to_set; 357 bits_to_set = BITS_PER_LONG; 358 mask_to_set = ~0UL; 359 p++; 360 } 361 if (nr) { 362 mask_to_set &= BITMAP_LAST_WORD_MASK(size); 363 *p |= mask_to_set; 364 } 365} 366 367static inline void 368bitmap_clear(unsigned long *map, int start, int nr) 369{ 370 unsigned long *p = map + BIT_WORD(start); 371 const int size = start + nr; 372 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); 373 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); 374 375 while (nr - bits_to_clear >= 0) { 376 *p &= ~mask_to_clear; 377 nr -= bits_to_clear; 378 bits_to_clear = BITS_PER_LONG; 379 mask_to_clear = ~0UL; 380 p++; 381 } 382 if (nr) { 383 mask_to_clear &= BITMAP_LAST_WORD_MASK(size); 384 *p &= ~mask_to_clear; 385 } 386} 387 388enum { 389 REG_OP_ISFREE, /* true if region is all zero bits */ 390 REG_OP_ALLOC, /* set all bits in region */ 391 REG_OP_RELEASE, /* clear all bits in region */ 392}; 393 394static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op) 395{ 396 int nbits_reg; /* number of bits in region */ 397 int index; /* index first long of region in bitmap */ 398 int offset; /* bit offset region in bitmap[index] */ 399 int nlongs_reg; /* num longs spanned by region in bitmap */ 400 int nbitsinlong; /* num bits of region in each spanned long */ 401 unsigned long mask; /* bitmask for one long of region */ 402 int i; /* scans bitmap by longs */ 403 int ret = 0; /* return value */ 404 405 /* 406 * Either nlongs_reg == 1 (for small orders that fit in one long) 407 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.) 408 */ 409 nbits_reg = 1 << order; 410 index = pos / BITS_PER_LONG; 411 offset = pos - (index * BITS_PER_LONG); 412 nlongs_reg = BITS_TO_LONGS(nbits_reg); 413 nbitsinlong = min(nbits_reg, BITS_PER_LONG); 414 415 /* 416 * Can't do "mask = (1UL << nbitsinlong) - 1", as that 417 * overflows if nbitsinlong == BITS_PER_LONG. 418 */ 419 mask = (1UL << (nbitsinlong - 1)); 420 mask += mask - 1; 421 mask <<= offset; 422 423 switch (reg_op) { 424 case REG_OP_ISFREE: 425 for (i = 0; i < nlongs_reg; i++) { 426 if (bitmap[index + i] & mask) 427 goto done; 428 } 429 ret = 1; /* all bits in region free (zero) */ 430 break; 431 432 case REG_OP_ALLOC: 433 for (i = 0; i < nlongs_reg; i++) 434 bitmap[index + i] |= mask; 435 break; 436 437 case REG_OP_RELEASE: 438 for (i = 0; i < nlongs_reg; i++) 439 bitmap[index + i] &= ~mask; 440 break; 441 } 442done: 443 return ret; 444} 445 446/** 447 * bitmap_find_free_region - find a contiguous aligned mem region 448 * @bitmap: array of unsigned longs corresponding to the bitmap 449 * @bits: number of bits in the bitmap 450 * @order: region size (log base 2 of number of bits) to find 451 * 452 * Find a region of free (zero) bits in a @bitmap of @bits bits and 453 * allocate them (set them to one). Only consider regions of length 454 * a power (@order) of two, aligned to that power of two, which 455 * makes the search algorithm much faster. 456 * 457 * Return the bit offset in bitmap of the allocated region, 458 * or -errno on failure. 459 */ 460static inline int 461bitmap_find_free_region(unsigned long *bitmap, int bits, int order) 462{ 463 int pos, end; /* scans bitmap by regions of size order */ 464 465 for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) { 466 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 467 continue; 468 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 469 return pos; 470 } 471 return -ENOMEM; 472} 473 474/** 475 * bitmap_allocate_region - allocate bitmap region 476 * @bitmap: array of unsigned longs corresponding to the bitmap 477 * @pos: beginning of bit region to allocate 478 * @order: region size (log base 2 of number of bits) to allocate 479 * 480 * Allocate (set bits in) a specified region of a bitmap. 481 * 482 * Return 0 on success, or %-EBUSY if specified region wasn't 483 * free (not all bits were zero). 484 */ 485 486static inline int 487bitmap_allocate_region(unsigned long *bitmap, int pos, int order) 488{ 489 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 490 return -EBUSY; 491 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 492 return 0; 493} 494 495/** 496 * bitmap_release_region - release allocated bitmap region 497 * @bitmap: array of unsigned longs corresponding to the bitmap 498 * @pos: beginning of bit region to release 499 * @order: region size (log base 2 of number of bits) to release 500 * 501 * This is the complement to __bitmap_find_free_region() and releases 502 * the found region (by clearing it in the bitmap). 503 * 504 * No return value. 505 */ 506static inline void 507bitmap_release_region(unsigned long *bitmap, int pos, int order) 508{ 509 __reg_op(bitmap, pos, order, REG_OP_RELEASE); 510} 511 512 513#define for_each_set_bit(bit, addr, size) \ 514 for ((bit) = find_first_bit((addr), (size)); \ 515 (bit) < (size); \ 516 (bit) = find_next_bit((addr), (size), (bit) + 1)) 517 518#endif /* _LINUX_BITOPS_H_ */ 519