Lines Matching refs:slab

34     extent_t *slab, arena_bin_t *bin);
36 extent_t *slab, arena_bin_t *bin);
49 size_t alignment, bool *zero, bool slab)
56 pad, alignment, zero, &commit, slab));
119 arena_slab_reg_alloc(tsdn_t *tsdn, extent_t *slab,
123 arena_slab_data_t *slab_data = extent_slab_data_get(slab);
130 ret = (void *)((uintptr_t)extent_addr_get(slab) +
140 arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr)
144 /* Freeing a pointer outside the slab can cause assertion failure. */
145 assert((uintptr_t)ptr >= (uintptr_t)extent_addr_get(slab));
146 assert((uintptr_t)ptr < (uintptr_t)extent_past_get(slab));
148 assert(((uintptr_t)ptr - (uintptr_t)extent_addr_get(slab)) %
152 diff = (size_t)((uintptr_t)ptr - (uintptr_t)extent_addr_get(slab));
176 arena_slab_reg_dalloc(tsdn_t *tsdn, extent_t *slab,
181 size_t regind = arena_slab_regind(slab, binind, ptr);
758 arena_slab_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *slab)
762 arena_nactive_sub(arena, extent_size_get(slab) >> LG_PAGE);
763 arena_extent_cache_dalloc_locked(tsdn, arena, &extent_hooks, slab);
767 arena_bin_slabs_nonfull_insert(arena_bin_t *bin, extent_t *slab)
769 assert(extent_slab_data_get(slab)->nfree > 0);
770 extent_heap_insert(&bin->slabs_nonfull, slab);
774 arena_bin_slabs_nonfull_remove(arena_bin_t *bin, extent_t *slab)
776 extent_heap_remove(&bin->slabs_nonfull, slab);
782 extent_t *slab = extent_heap_remove_first(&bin->slabs_nonfull);
783 if (slab == NULL)
787 return (slab);
791 arena_bin_slabs_full_insert(arena_bin_t *bin, extent_t *slab)
793 assert(extent_slab_data_get(slab)->nfree == 0);
794 extent_ring_insert(&bin->slabs_full, slab);
798 arena_bin_slabs_full_remove(extent_t *slab)
800 extent_ring_remove(slab);
848 extent_t *slab;
852 slab = bin->slabcur;
855 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab);
858 while ((slab = extent_heap_remove_first(&bin->slabs_nonfull)) !=
861 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab);
864 for (slab = qr_next(&bin->slabs_full, qr_link); slab !=
865 &bin->slabs_full; slab = qr_next(&bin->slabs_full,
867 arena_bin_slabs_full_remove(slab);
869 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab);
954 extent_t *slab;
960 slab = extent_alloc_wrapper(tsdn, arena, r_extent_hooks, NULL,
964 return (slab);
971 extent_t *slab;
976 slab = arena_extent_cache_alloc_locked(tsdn, arena, &extent_hooks, NULL,
978 if (slab == NULL) {
979 slab = arena_slab_alloc_hard(tsdn, arena, &extent_hooks,
981 if (slab == NULL)
984 assert(extent_slab_get(slab));
986 arena_nactive_add(arena, extent_size_get(slab) >> LG_PAGE);
988 /* Initialize slab internals. */
989 slab_data = extent_slab_data_get(slab);
995 arena->stats.mapped += extent_size_get(slab);
997 return (slab);
1004 extent_t *slab;
1007 /* Look for a usable slab. */
1008 slab = arena_bin_slabs_nonfull_tryget(bin);
1009 if (slab != NULL)
1010 return (slab);
1015 /* Allocate a new slab. */
1019 slab = arena_slab_alloc(tsdn, arena, binind, bin_info);
1023 if (slab != NULL) {
1028 return (slab);
1036 slab = arena_bin_slabs_nonfull_tryget(bin);
1037 if (slab != NULL)
1038 return (slab);
1049 extent_t *slab;
1056 slab = arena_bin_nonfull_slab_get(tsdn, arena, bin, binind);
1065 if (slab != NULL) {
1067 * arena_slab_alloc() may have allocated slab,
1070 * make any assumptions about how slab has
1073 * a region were just deallocated from the slab.
1075 if (extent_slab_data_get(slab)->nfree ==
1077 arena_dalloc_bin_slab(tsdn, arena, slab,
1080 arena_bin_lower_slab(tsdn, arena, slab,
1091 if (slab == NULL)
1093 bin->slabcur = slab;
1097 return (arena_slab_reg_alloc(tsdn, slab, bin_info));
1115 extent_t *slab;
1117 if ((slab = bin->slabcur) != NULL &&
1118 extent_slab_data_get(slab)->nfree > 0) {
1119 ptr = arena_slab_reg_alloc(tsdn, slab,
1183 extent_t *slab;
1190 if ((slab = bin->slabcur) != NULL && extent_slab_data_get(slab)->nfree >
1192 ret = arena_slab_reg_alloc(tsdn, slab, &arena_bin_info[binind]);
1254 /* Small; alignment doesn't require special slab placement. */
1326 arena_dissociate_bin_slab(extent_t *slab, arena_bin_t *bin)
1328 /* Dissociate slab from bin. */
1329 if (slab == bin->slabcur)
1332 szind_t binind = extent_slab_data_get(slab)->binind;
1337 * slab only contains one region, then it never gets inserted
1341 arena_bin_slabs_full_remove(slab);
1343 arena_bin_slabs_nonfull_remove(bin, slab);
1348 arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
1351 assert(slab != bin->slabcur);
1356 arena_slab_dalloc(tsdn, arena, slab);
1365 arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
1368 assert(extent_slab_data_get(slab)->nfree > 0);
1372 * oldest/lowest non-full slab. It is okay to NULL slabcur out rather
1374 * slab.
1376 if (bin->slabcur != NULL && extent_snad_comp(bin->slabcur, slab) > 0) {
1382 bin->slabcur = slab;
1386 arena_bin_slabs_nonfull_insert(bin, slab);
1390 arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
1393 arena_slab_data_t *slab_data = extent_slab_data_get(slab);
1401 arena_slab_reg_dalloc(tsdn, slab, slab_data, ptr);
1403 arena_dissociate_bin_slab(slab, bin);
1404 arena_dalloc_bin_slab(tsdn, arena, slab, bin);
1405 } else if (slab_data->nfree == 1 && slab != bin->slabcur) {
1406 arena_bin_slabs_full_remove(slab);
1407 arena_bin_lower_slab(tsdn, arena, slab, bin);