Searched refs:tcache (Results 1 - 25 of 32) sorted by relevance

12

/netbsd-current/external/bsd/jemalloc/dist/src/
H A Dtcache.c16 static unsigned stack_nelms; /* Total stack elms per tcache. */
40 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { argument
41 szind_t binind = tcache->next_gc_bin;
45 tbin = tcache_small_bin_get(tcache, binind);
47 tbin = tcache_large_bin_get(tcache, binind);
54 tcache_bin_flush_small(tsd, tcache, tbin, binind,
63 (tcache->lg_fill_div[binind] + 1)) >= 1) {
64 tcache->lg_fill_div[binind]++;
68 - tbin->low_water + (tbin->low_water >> 2), tcache);
75 if (binind < NBINS && tcache
88 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, bool *tcache_success) argument
104 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, szind_t binind, unsigned rem) argument
190 tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind, unsigned rem, tcache_t *tcache) argument
289 tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
310 tcache_arena_dissociate(tsdn_t *tsdn, tcache_t *tcache) argument
337 tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
358 tcache_init(tsd_t *tsd, tcache_t *tcache, void *avail_stack) argument
393 tcache_t *tcache = tsd_tcachep_get_unsafe(tsd); local
419 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena); local
424 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena); local
435 tcache_t *tcache; local
454 tcache_arena_associate(tsd_tsdn(tsd), tcache, arena_ichoose(tsd, NULL)); local
460 tcache_flush_cache(tsd_t *tsd, tcache_t *tcache) argument
494 tcache_destroy(tsd_t *tsd, tcache_t *tcache, bool tsd_tcache) argument
496 tcache_arena_dissociate(tsd_tsdn(tsd), tcache); local
506 idalloctm(tsd_tsdn(tsd), tcache, NULL, NULL, true, true); local
513 tcache_t *tcache = tsd_tcachep_get(tsd); local
531 tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) argument
591 tcache_t *tcache = tcache_create_explicit(tsd); local
625 tcache_t *tcache = elm->tcache; local
633 tcache_t *tcache = tcaches_elm_remove(tsd, &tcaches[ind]); local
644 tcache_t *tcache = tcaches_elm_remove(tsd, elm); local
[all...]
H A Djemalloc.c674 * individual threads do not lock when recording tcache stats
682 tcache_t *tcache; local
685 ql_foreach(tcache, &arena->tcache_ql, link) {
686 tcache_stats_merge(tsdn, tcache, arena);
1175 CONF_HANDLE_BOOL(opt_tcache, "tcache")
1706 tcache_t *tcache; local
1709 /* Fill in the tcache. */
1712 /* Getting tcache ptr unconditionally. */
1713 tcache = tsd_tcachep_get(tsd);
1714 assert(tcache
2224 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) argument
2254 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, local
2257 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false, local
2263 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) argument
2312 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, false); local
2314 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, true); local
2423 tcache_t *tcache; local
2630 irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, prof_tctx_t *tctx) argument
2654 irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache, arena_t *arena, alloc_ctx_t *alloc_ctx) argument
2704 tcache_t *tcache; local
2965 tcache_t *tcache; local
3027 tcache_t *tcache; local
[all...]
H A Darena.c1253 arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, argument
1266 tcache->lg_fill_div[binind]); i < nfill; i++) {
1393 bool zero, tcache_t *tcache) {
1400 zero, tcache, true);
1452 arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, argument
1460 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, local
1622 size_t alignment, bool zero, tcache_t *tcache) {
1625 zero, tcache, true);
1631 return ipalloct(tsdn, usize, alignment, zero, tcache, arena);
1636 size_t size, size_t alignment, bool zero, tcache_t *tcache) {
1392 arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1621 arena_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1635 arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache) argument
2019 tcache_t *tcache = tcache_get(tsdn_tsd(tsdn)); local
[all...]
/netbsd-current/external/bsd/jemalloc/dist/include/jemalloc/internal/
H A Dtcache_externs.h12 * Number of tcache bins. There are NBINS small-object bins, plus 0 or more
21 * Explicit tcaches, managed via the tcache.{create,flush,destroy} mallctls and
26 * explicit tcache is created without a disproportionate impact on memory usage.
31 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
34 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
37 unsigned rem, tcache_t *tcache);
38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache,
42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_
[all...]
H A Dtcache_inlines.h31 tcache_event(tsd_t *tsd, tcache_t *tcache) { argument
36 if (unlikely(ticker_tick(&tcache->gc_ticker))) {
37 tcache_event_hard(tsd, tcache);
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, argument
50 bin = tcache_small_bin_get(tcache, binind);
60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache,
97 tcache->prof_accumbytes += usize;
99 tcache_event(tsd, tcache);
104 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
111 bin = tcache_large_bin_get(tcache, binin
[all...]
H A Djemalloc_internal_inlines_c.h41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, argument
46 assert(!is_internal || tcache == NULL);
53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
68 tcache_t *tcache, bool is_internal, arena_t *arena) {
73 assert(!is_internal || tcache == NULL);
78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
88 tcache_t *tcache, arena_t *arena) {
89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena);
104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, argument
107 assert(!is_internal || tcache
67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, bool is_internal, arena_t *arena) argument
87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
127 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, alloc_ctx_t *alloc_ctx, bool slow_path) argument
135 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
173 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
[all...]
H A Djemalloc_internal_inlines_b.h25 tcache_t *tcache = tcache_get(tsd); local
26 if (tcache->arena != NULL) {
28 assert(tcache->arena ==
30 if (tcache->arena != ret) {
32 tcache, ret); local
35 tcache_arena_associate(tsd_tsdn(tsd), tcache, local
H A Dtcache_structs.h21 * During tcache initialization, the avail pointer in each element of
34 * tcache definition. This enables arenas to aggregate stats across
35 * tcaches without having a tcache dependency.
39 /* The arena this tcache is associated with. */
53 /* Linkage for list of available (previously used) explicit tcache IDs. */
56 tcache_t *tcache; member in union:tcaches_s::__anon29
H A Darena_inlines_a.h48 /* Set new arena/tcache associations. */
50 tcache_t *tcache = tcache_get(tsd); local
51 if (tcache != NULL) {
52 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, local
H A Darena_inlines_b.h88 tcache_t *tcache, bool slow_path) {
89 assert(!tsdn_null(tsdn) || tcache == NULL);
92 if (likely(tcache != NULL)) {
95 tcache, size, ind, zero, slow_path);
99 tcache, size, ind, zero, slow_path);
190 arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
192 assert(!tsdn_null(tsdn) || tcache == NULL);
195 if (unlikely(tcache == NULL)) {
224 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
229 arena_dalloc_promoted(tsdn, ptr, tcache,
87 arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, tcache_t *tcache, bool slow_path) argument
[all...]
H A Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) {
112 return &tcache->bins_small[binind];
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
118 return &tcache->bins_large[binind - NBINS];
124 * Thread specific auto tcache might be unavailable if: 1) during tcache
125 * initialization, or 2) disabled through thread.tcache.enabled mallctl
129 /* Associated arena == NULL implies tcache init in progress. */
H A Dlarge_externs.h10 size_t alignment, bool zero, tcache_t *tcache);
H A Darena_externs.h47 void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
58 size_t alignment, bool zero, tcache_t *tcache);
60 JEMALLOC_NORETURN void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
68 size_t size, size_t alignment, bool zero, tcache_t *tcache);
/netbsd-current/external/bsd/jemalloc/include/jemalloc/internal/
H A Dtcache_externs.h12 * Number of tcache bins. There are NBINS small-object bins, plus 0 or more
21 * Explicit tcaches, managed via the tcache.{create,flush,destroy} mallctls and
26 * explicit tcache is created without a disproportionate impact on memory usage.
31 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
34 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
37 unsigned rem, tcache_t *tcache);
38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache,
42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_
[all...]
H A Dtcache_inlines.h31 tcache_event(tsd_t *tsd, tcache_t *tcache) { argument
36 if (unlikely(ticker_tick(&tcache->gc_ticker))) {
37 tcache_event_hard(tsd, tcache);
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, argument
50 bin = tcache_small_bin_get(tcache, binind);
60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache,
97 tcache->prof_accumbytes += usize;
99 tcache_event(tsd, tcache);
104 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
111 bin = tcache_large_bin_get(tcache, binin
[all...]
H A Djemalloc_internal_inlines_c.h41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, argument
46 assert(!is_internal || tcache == NULL);
53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
68 tcache_t *tcache, bool is_internal, arena_t *arena) {
73 assert(!is_internal || tcache == NULL);
78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
88 tcache_t *tcache, arena_t *arena) {
89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena);
104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, argument
107 assert(!is_internal || tcache
67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, bool is_internal, arena_t *arena) argument
87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
127 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, alloc_ctx_t *alloc_ctx, bool slow_path) argument
135 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
173 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
[all...]
H A Djemalloc_internal_inlines_b.h25 tcache_t *tcache = tcache_get(tsd); local
26 if (tcache->arena != NULL) {
28 assert(tcache->arena ==
30 if (tcache->arena != ret) {
32 tcache, ret); local
35 tcache_arena_associate(tsd_tsdn(tsd), tcache, local
H A Dtcache_structs.h21 * During tcache initialization, the avail pointer in each element of
34 * tcache definition. This enables arenas to aggregate stats across
35 * tcaches without having a tcache dependency.
39 /* The arena this tcache is associated with. */
53 /* Linkage for list of available (previously used) explicit tcache IDs. */
56 tcache_t *tcache; member in union:tcaches_s::__anon82
H A Darena_inlines_a.h48 /* Set new arena/tcache associations. */
50 tcache_t *tcache = tcache_get(tsd); local
51 if (tcache != NULL) {
52 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, local
H A Darena_inlines_b.h88 tcache_t *tcache, bool slow_path) {
89 assert(!tsdn_null(tsdn) || tcache == NULL);
92 if (likely(tcache != NULL)) {
95 tcache, size, ind, zero, slow_path);
99 tcache, size, ind, zero, slow_path);
190 arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
192 assert(!tsdn_null(tsdn) || tcache == NULL);
195 if (unlikely(tcache == NULL)) {
224 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
229 arena_dalloc_promoted(tsdn, ptr, tcache,
87 arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, tcache_t *tcache, bool slow_path) argument
[all...]
H A Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) {
112 return &tcache->bins_small[binind];
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) {
118 return &tcache->bins_large[binind - NBINS];
124 * Thread specific auto tcache might be unavailable if: 1) during tcache
125 * initialization, or 2) disabled through thread.tcache.enabled mallctl
129 /* Associated arena == NULL implies tcache init in progress. */
H A Dlarge_externs.h10 size_t alignment, bool zero, tcache_t *tcache);
H A Darena_externs.h47 void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
58 size_t alignment, bool zero, tcache_t *tcache);
60 JEMALLOC_NORETURN void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
68 size_t size, size_t alignment, bool zero, tcache_t *tcache);
/netbsd-current/external/bsd/jemalloc/lib/
H A DMakefile.inc32 tcache.c \
54 COPTS.tcache.c+=-Wno-error=stack-protector
/netbsd-current/external/gpl3/autoconf/dist/bin/
H A Dautom4te.in64 my $tcache;
386 $tcache = "$cache/traces.";
459 unlink ($tcache . $req->id . "t");
469 . " @M4_DEBUGFILE@=" . shell_quote ("$tcache" . $req->id . "t")
475 foreach my $file (map { $_ . $req->id } ($tcache, $ocache))
821 my $traces = new Autom4te::XFile ("< " . open_quote ($tcache . $req->id));
871 my $tfile = $tcache . $req->id;

Completed in 232 milliseconds

12