Searched refs:tcache (Results 1 - 19 of 19) sorted by relevance

/freebsd-13-stable/contrib/jemalloc/src/
H A Dtcache.c17 static unsigned stack_nelms; /* Total stack elms per tcache. */
41 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { argument
42 szind_t binind = tcache->next_gc_bin;
46 tbin = tcache_small_bin_get(tcache, binind);
48 tbin = tcache_large_bin_get(tcache, binind);
55 tcache_bin_flush_small(tsd, tcache, tbin, binind,
64 (tcache->lg_fill_div[binind] + 1)) >= 1) {
65 tcache->lg_fill_div[binind]++;
69 - tbin->low_water + (tbin->low_water >> 2), tcache);
76 if (binind < SC_NBINS && tcache
89 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, bool *tcache_success) argument
134 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, unsigned rem) argument
230 tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind, unsigned rem, tcache_t *tcache) argument
341 tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
362 tcache_arena_dissociate(tsdn_t *tsdn, tcache_t *tcache) argument
389 tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
410 tcache_init(tsd_t *tsd, tcache_t *tcache, void *avail_stack) argument
445 tcache_t *tcache = tsd_tcachep_get_unsafe(tsd); local
471 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena); local
476 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena); local
487 tcache_t *tcache; local
506 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena_ichoose(tsd, NULL)); local
512 tcache_flush_cache(tsd_t *tsd, tcache_t *tcache) argument
546 tcache_destroy(tsd_t *tsd, tcache_t *tcache, bool tsd_tcache) argument
549 tcache_arena_dissociate(tsd_tsdn(tsd), tcache); local
559 idalloctm(tsd_tsdn(tsd), tcache, NULL, NULL, true, true); local
583 tcache_t *tcache = tsd_tcachep_get(tsd); local
601 tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
661 tcache_t *tcache = tcache_create_explicit(tsd); local
695 tcache_t *tcache = elm->tcache; local
711 tcache_t *tcache = tcaches_elm_remove(tsd, &tcaches[ind], true); local
723 tcache_t *tcache = tcaches_elm_remove(tsd, elm, false); local
[all...]
H A Dhook.c116 * reentrancy is a fairly heavyweight mode (disabling tcache,
133 tcache_t *tcache = tsdn_tcachep_get(tsdn); local
134 if (tcache != NULL) {
135 return &tcache->in_hook;
H A Djemalloc.c660 * individual threads do not lock when recording tcache stats
668 tcache_t *tcache; local
671 ql_foreach(tcache, &arena->tcache_ql, link) {
672 tcache_stats_merge(tsdn, tcache, arena);
1294 CONF_HANDLE_BOOL(opt_tcache, "tcache")
1918 tcache_t *tcache; local
1921 /* Fill in the tcache. */
1924 /* Getting tcache ptr unconditionally. */
1925 tcache = tsd_tcachep_get(tsd);
1926 assert(tcache
2339 tcache_t *tcache = tsd_tcachep_get(tsd); local
2560 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) argument
2590 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, local
2593 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, local
2599 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) argument
2648 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, false); local
2650 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, true); local
2770 tcache_t *tcache; local
2797 tcache_t *tcache = tsd_tcachep_get(tsd); local
3134 irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, prof_tctx_t *tctx, hook_ralloc_args_t *hook_args) argument
3159 irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache, arena_t *arena, alloc_ctx_t *alloc_ctx, hook_ralloc_args_t *hook_args) argument
3209 tcache_t *tcache; local
3482 tcache_t *tcache; local
3543 tcache_t *tcache; local
[all...]
H A Dtsd.c77 ql_foreach(tsd_list, &tsd_nominal_tsds, TSD_MANGLE(tcache).tsd_link) {
91 ql_elm_new(tsd, TSD_MANGLE(tcache).tsd_link);
93 ql_tail_insert(&tsd_nominal_tsds, tsd, TSD_MANGLE(tcache).tsd_link);
102 ql_remove(&tsd_nominal_tsds, tsd, TSD_MANGLE(tcache).tsd_link);
115 ql_foreach(remote_tsd, &tsd_nominal_tsds, TSD_MANGLE(tcache).tsd_link) {
221 * We initialize the rtree context first (before the tcache), since the
222 * tcache initialization depends on it.
H A Darena.c1381 arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, argument
1395 tcache->lg_fill_div[binind]); i < nfill; i += cnt) {
1533 bool zero, tcache_t *tcache) {
1541 zero, tcache, true);
1597 arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, argument
1613 if (bumped_usize <= tcache_maxclass && tcache != NULL) {
1614 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, local
1787 size_t alignment, bool zero, tcache_t *tcache) {
1790 zero, tcache, true);
1796 return ipalloct(tsdn, usize, alignment, zero, tcache, aren
1532 arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1786 arena_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1800 arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache, hook_ralloc_args_t *hook_args) argument
2269 tcache_t *tcache = tcache_get(tsdn_tsd(tsdn)); local
[all...]
H A Dlarge.c275 size_t alignment, bool zero, tcache_t *tcache,
313 isdalloct(tsdn, extent_addr_get(extent), oldusize, tcache, NULL, true);
274 large_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t usize, size_t alignment, bool zero, tcache_t *tcache, hook_ralloc_args_t *hook_args) argument
H A Dctl.c289 {NAME("tcache"), CHILD(named, thread_tcache)},
328 {NAME("tcache"), CTL(opt_tcache)},
630 {NAME("tcache"), CHILD(named, tcache)},
1845 /* Set new arena/tcache associations. */
2947 /* tcache bin mutex */
3299 * (1) flush tcache: mallctl("thread.tcache.flush", ...)
3304 * (6) disable tcache: mallctl("thread.tcache
[all...]
/freebsd-13-stable/contrib/jemalloc/include/jemalloc/internal/
H A Dtcache_externs.h10 * Number of tcache bins. There are SC_NBINS small-object bins, plus 0 or more
19 * Explicit tcaches, managed via the tcache.{create,flush,destroy} mallctls and
24 * explicit tcache is created without a disproportionate impact on memory usage.
29 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
30 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
32 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
35 unsigned rem, tcache_t *tcache);
36 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache,
40 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
45 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_
[all...]
H A Dtcache_inlines.h31 tcache_event(tsd_t *tsd, tcache_t *tcache) { argument
36 if (unlikely(ticker_tick(&tcache->gc_ticker))) {
37 tcache_event_hard(tsd, tcache);
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, argument
50 bin = tcache_small_bin_get(tcache, binind);
60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache,
97 tcache->prof_accumbytes += usize;
99 tcache_event(tsd, tcache);
104 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
111 bin = tcache_large_bin_get(tcache, binin
[all...]
H A Djemalloc_internal_inlines_c.h42 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, argument
46 assert(!is_internal || tcache == NULL);
53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
68 tcache_t *tcache, bool is_internal, arena_t *arena) {
73 assert(!is_internal || tcache == NULL);
78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
88 tcache_t *tcache, arena_t *arena) {
89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena);
104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, argument
107 assert(!is_internal || tcache
67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, bool is_internal, arena_t *arena) argument
87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
127 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, alloc_ctx_t *alloc_ctx, bool slow_path) argument
135 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, hook_ralloc_args_t *hook_args) argument
174 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, hook_ralloc_args_t *hook_args) argument
[all...]
H A Djemalloc_internal_inlines_b.h25 tcache_t *tcache = tcache_get(tsd); local
26 if (tcache->arena != NULL) {
28 assert(tcache->arena ==
30 if (tcache->arena != ret) {
32 tcache, ret); local
35 tcache_arena_associate(tsd_tsdn(tsd), tcache, local
H A Dtcache_structs.h25 * During tcache initialization, the avail pointer in each element of
43 * tcache definition. This enables arenas to aggregate stats across
44 * tcaches without having a tcache dependency.
48 /* The arena this tcache is associated with. */
62 /* Linkage for list of available (previously used) explicit tcache IDs. */
65 tcache_t *tcache; member in union:tcaches_s::__anon914
H A Darena_inlines_a.h47 /* Set new arena/tcache associations. */
49 tcache_t *tcache = tcache_get(tsd); local
50 if (tcache != NULL) {
51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, local
H A Darena_inlines_b.h160 tcache_t *tcache, bool slow_path) {
161 assert(!tsdn_null(tsdn) || tcache == NULL);
163 if (likely(tcache != NULL)) {
166 tcache, size, ind, zero, slow_path);
170 tcache, size, ind, zero, slow_path);
270 arena_dalloc_large(tsdn_t *tsdn, void *ptr, tcache_t *tcache, szind_t szind,
274 arena_dalloc_promoted(tsdn, ptr, tcache, slow_path);
276 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, szind,
286 arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
288 assert(!tsdn_null(tsdn) || tcache
159 arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, tcache_t *tcache, bool slow_path) argument
[all...]
H A Djemalloc_internal_inlines_a.h112 tcache_small_bin_get(tcache_t *tcache, szind_t binind) {
114 return &tcache->bins_small[binind];
118 tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
120 return &tcache->bins_large[binind - SC_NBINS];
126 * Thread specific auto tcache might be unavailable if: 1) during tcache
127 * initialization, or 2) disabled through thread.tcache.enabled mallctl
131 /* Associated arena == NULL implies tcache init in progress. */
H A Darena_externs.h51 void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
62 size_t alignment, bool zero, tcache_t *tcache);
64 void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
72 size_t size, size_t alignment, bool zero, tcache_t *tcache,
H A Dlarge_externs.h12 size_t alignment, bool zero, tcache_t *tcache,
H A Dtsd.h18 * --- data accessed on tcache fast path: state, rtree_ctx, stats, prof ---
25 * t: tcache
26 * --- data not accessed on tcache fast path: arena-related fields ---
34 * In particular, tcache and rtree_ctx rely on hot CPU cache to be effective.
44 * Note: the entire tcache is embedded into TSD and spans multiple cachelines.
46 * The last 3 members (i, a and o) before tcache isn't really needed on tcache
47 * fast path. However we have a number of unused tcache bins and witnesses
48 * (never touched unless config_debug) at the end of tcache, so we place them
79 O(tcache, tcache_
[all...]
/freebsd-13-stable/lib/libc/stdlib/jemalloc/
H A DMakefile.inc8 prng.c prof.c rtree.c safety_check.c sc.c stats.c sz.c tcache.c \

Completed in 142 milliseconds