Lines Matching defs:bin

63     bin_t *bin);
65 bin_t *bin);
1013 arena_bin_slabs_nonfull_insert(bin_t *bin, extent_t *slab) {
1015 extent_heap_insert(&bin->slabs_nonfull, slab);
1017 bin->stats.nonfull_slabs++;
1022 arena_bin_slabs_nonfull_remove(bin_t *bin, extent_t *slab) {
1023 extent_heap_remove(&bin->slabs_nonfull, slab);
1025 bin->stats.nonfull_slabs--;
1030 arena_bin_slabs_nonfull_tryget(bin_t *bin) {
1031 extent_t *slab = extent_heap_remove_first(&bin->slabs_nonfull);
1036 bin->stats.reslabs++;
1037 bin->stats.nonfull_slabs--;
1043 arena_bin_slabs_full_insert(arena_t *arena, bin_t *bin, extent_t *slab) {
1053 extent_list_append(&bin->slabs_full, slab);
1057 arena_bin_slabs_full_remove(arena_t *arena, bin_t *bin, extent_t *slab) {
1061 extent_list_remove(&bin->slabs_full, slab);
1065 arena_bin_reset(tsd_t *tsd, arena_t *arena, bin_t *bin) {
1068 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock);
1069 if (bin->slabcur != NULL) {
1070 slab = bin->slabcur;
1071 bin->slabcur = NULL;
1072 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock);
1074 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock);
1076 while ((slab = extent_heap_remove_first(&bin->slabs_nonfull)) != NULL) {
1077 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock);
1079 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock);
1081 for (slab = extent_list_first(&bin->slabs_full); slab != NULL;
1082 slab = extent_list_first(&bin->slabs_full)) {
1083 arena_bin_slabs_full_remove(arena, bin, slab);
1084 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock);
1086 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock);
1089 bin->stats.curregs = 0;
1090 bin->stats.curslabs = 0;
1092 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock);
1266 arena_bin_nonfull_slab_get(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
1272 slab = arena_bin_slabs_nonfull_tryget(bin);
1281 malloc_mutex_unlock(tsdn, &bin->lock);
1285 malloc_mutex_lock(tsdn, &bin->lock);
1288 bin->stats.nslabs++;
1289 bin->stats.curslabs++;
1296 * sufficient memory available while this one dropped bin->lock above,
1299 slab = arena_bin_slabs_nonfull_tryget(bin);
1307 /* Re-fill bin->slabcur, then call arena_slab_reg_alloc(). */
1309 arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
1315 if (!arena_is_auto(arena) && bin->slabcur != NULL) {
1316 arena_bin_slabs_full_insert(arena, bin, bin->slabcur);
1317 bin->slabcur = NULL;
1319 slab = arena_bin_nonfull_slab_get(tsdn, arena, bin, binind, binshard);
1320 if (bin->slabcur != NULL) {
1323 * bin lock in arena_bin_nonfull_slab_get().
1325 if (extent_nfree_get(bin->slabcur) > 0) {
1326 void *ret = arena_slab_reg_alloc(bin->slabcur,
1340 bin);
1343 bin);
1349 arena_bin_slabs_full_insert(arena, bin, bin->slabcur);
1350 bin->slabcur = NULL;
1356 bin->slabcur = slab;
1358 assert(extent_nfree_get(bin->slabcur) > 0);
1363 /* Choose a bin shard and return the locked bin. */
1367 bin_t *bin;
1374 bin = &arena->bins[binind].bin_shards[*binshard];
1375 malloc_mutex_lock(tsdn, &bin->lock);
1377 return bin;
1392 bin_t *bin = arena_bin_choose_lock(tsdn, arena, binind, &binshard);
1397 if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) >
1407 void *ptr = arena_bin_malloc_hard(tsdn, arena, bin,
1434 bin->stats.nmalloc += i;
1435 bin->stats.nrequests += tbin->tstats.nrequests;
1436 bin->stats.curregs += i;
1437 bin->stats.nfills++;
1440 malloc_mutex_unlock(tsdn, &bin->lock);
1462 bin_t *bin;
1469 bin = arena_bin_choose_lock(tsdn, arena, binind, &binshard);
1471 if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > 0) {
1474 ret = arena_bin_malloc_hard(tsdn, arena, bin, binind, binshard);
1478 malloc_mutex_unlock(tsdn, &bin->lock);
1483 bin->stats.nmalloc++;
1484 bin->stats.nrequests++;
1485 bin->stats.curregs++;
1487 malloc_mutex_unlock(tsdn, &bin->lock);
1622 arena_dissociate_bin_slab(arena_t *arena, extent_t *slab, bin_t *bin) {
1623 /* Dissociate slab from bin. */
1624 if (slab == bin->slabcur) {
1625 bin->slabcur = NULL;
1636 arena_bin_slabs_full_remove(arena, bin, slab);
1638 arena_bin_slabs_nonfull_remove(bin, slab);
1645 bin_t *bin) {
1646 assert(slab != bin->slabcur);
1648 malloc_mutex_unlock(tsdn, &bin->lock);
1652 malloc_mutex_lock(tsdn, &bin->lock);
1654 bin->stats.curslabs--;
1660 bin_t *bin) {
1664 * Make sure that if bin->slabcur is non-NULL, it refers to the
1669 if (bin->slabcur != NULL && extent_snad_comp(bin->slabcur, slab) > 0) {
1671 if (extent_nfree_get(bin->slabcur) > 0) {
1672 arena_bin_slabs_nonfull_insert(bin, bin->slabcur);
1674 arena_bin_slabs_full_insert(arena, bin, bin->slabcur);
1676 bin->slabcur = slab;
1678 bin->stats.reslabs++;
1681 arena_bin_slabs_nonfull_insert(bin, slab);
1686 arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
1698 arena_dissociate_bin_slab(arena, slab, bin);
1699 arena_dalloc_bin_slab(tsdn, arena, slab, bin);
1700 } else if (nfree == 1 && slab != bin->slabcur) {
1701 arena_bin_slabs_full_remove(arena, bin, slab);
1702 arena_bin_lower_slab(tsdn, arena, slab, bin);
1706 bin->stats.ndalloc++;
1707 bin->stats.curregs--;
1712 arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena, bin_t *bin,
1714 arena_dalloc_bin_locked_impl(tsdn, arena, bin, binind, extent, ptr,
1722 bin_t *bin = &arena->bins[binind].bin_shards[binshard];
1724 malloc_mutex_lock(tsdn, &bin->lock);
1725 arena_dalloc_bin_locked_impl(tsdn, arena, bin, binind, extent, ptr,
1727 malloc_mutex_unlock(tsdn, &bin->lock);