Searched refs:binind (Results 1 - 12 of 12) sorted by relevance

/netbsd-current/external/bsd/jemalloc/dist/test/unit/
H A Dslab.c4 szind_t binind; local
6 for (binind = 0; binind < NBINS; binind++) {
9 const bin_info_t *bin_info = &bin_infos[binind];
12 binind, 0, extent_state_active, false, true, true);
18 assert_zu_eq(arena_slab_regind(&slab, binind, reg),
H A Dpack.c53 unsigned binind = binind_compute(); local
59 mib[2] = (size_t)binind;
/netbsd-current/external/bsd/jemalloc/dist/include/jemalloc/internal/
H A Dtcache_inlines.h43 UNUSED size_t size, szind_t binind, bool zero, bool slow_path) {
49 assert(binind < NBINS);
50 bin = tcache_small_bin_get(tcache, binind);
61 bin, binind, &tcache_hard_success);
73 usize = sz_index2size(binind);
80 arena_alloc_junk_small(ret, &bin_infos[binind],
88 arena_alloc_junk_small(ret, &bin_infos[binind], true);
105 szind_t binind, bool zero, bool slow_path) {
110 assert(binind >= NBINS &&binind < nhbin
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, UNUSED size_t size, szind_t binind, bool zero, bool slow_path) argument
[all...]
H A Dtcache_externs.h33 cache_bin_t *tbin, szind_t binind, bool *tcache_success);
35 szind_t binind, unsigned rem);
36 void tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind,
H A Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) {
111 assert(binind < NBINS);
112 return &tcache->bins_small[binind];
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
117 assert(binind >= NBINS &&binind < nhbins);
118 return &tcache->bins_large[binind - NBINS];
H A Darena_externs.h29 size_t arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr);
48 cache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes);
/netbsd-current/external/bsd/jemalloc/include/jemalloc/internal/
H A Dtcache_inlines.h43 UNUSED size_t size, szind_t binind, bool zero, bool slow_path) {
49 assert(binind < NBINS);
50 bin = tcache_small_bin_get(tcache, binind);
61 bin, binind, &tcache_hard_success);
73 usize = sz_index2size(binind);
80 arena_alloc_junk_small(ret, &bin_infos[binind],
88 arena_alloc_junk_small(ret, &bin_infos[binind], true);
105 szind_t binind, bool zero, bool slow_path) {
110 assert(binind >= NBINS &&binind < nhbin
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, UNUSED size_t size, szind_t binind, bool zero, bool slow_path) argument
[all...]
H A Dtcache_externs.h33 cache_bin_t *tbin, szind_t binind, bool *tcache_success);
35 szind_t binind, unsigned rem);
36 void tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind,
H A Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) {
111 assert(binind < NBINS);
112 return &tcache->bins_small[binind];
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
117 assert(binind >= NBINS &&binind < nhbins);
118 return &tcache->bins_large[binind - NBINS];
H A Darena_externs.h29 size_t arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr);
48 cache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes);
/netbsd-current/external/bsd/jemalloc/dist/src/
H A Dtcache.c41 szind_t binind = tcache->next_gc_bin; local
44 if (binind < NBINS) {
45 tbin = tcache_small_bin_get(tcache, binind);
47 tbin = tcache_large_bin_get(tcache, binind);
53 if (binind < NBINS) {
54 tcache_bin_flush_small(tsd, tcache, tbin, binind,
61 cache_bin_info_t *tbin_info = &tcache_bin_info[binind];
63 (tcache->lg_fill_div[binind] + 1)) >= 1) {
64 tcache->lg_fill_div[binind]++;
67 tcache_bin_flush_large(tsd, tbin, binind, tbi
88 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, bool *tcache_success) argument
104 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, unsigned rem) argument
190 tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind, unsigned rem, tcache_t *tcache) argument
236 &arena->stats, binind, local
276 binind, tbin->tstats.nrequests); local
[all...]
H A Darena.c243 arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) { argument
251 (uintptr_t)bin_infos[binind].reg_size == 0);
256 regind = div_compute(&arena_binind_div_info[binind], diff);
258 assert(regind < bin_infos[binind].nregs);
265 szind_t binind = extent_szind_get(slab); local
266 const bin_info_t *bin_info = &bin_infos[binind];
267 size_t regind = arena_slab_regind(slab, binind, ptr);
1118 arena_slab_alloc(tsdn_t *tsdn, arena_t *arena, szind_t binind, argument
1129 binind, &zero, &commit);
1133 true, binind,
1155 arena_bin_nonfull_slab_get(tsdn_t *tsdn, arena_t *arena, bin_t *bin, szind_t binind) argument
1198 arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, bin_t *bin, szind_t binind) argument
1253 arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes) argument
1320 arena_malloc_small(tsdn_t *tsdn, arena_t *arena, szind_t binind, bool zero) argument
1473 szind_t binind = extent_szind_get(slab); local
1535 szind_t binind = extent_szind_get(slab); local
1567 szind_t binind = extent_szind_get(extent); local
[all...]

Completed in 275 milliseconds