Searched refs:usize (Results 1 - 18 of 18) sorted by relevance

/fuchsia/zircon/third_party/ulib/jemalloc/include/jemalloc/internal/
H A Dprof_inlines.h11 size_t usize, prof_tctx_t *tctx);
14 bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
16 prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
19 size_t usize, prof_tctx_t *tctx);
21 size_t usize, prof_tctx_t *tctx, bool prof_active, bool updated,
25 size_t usize);
86 prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize, argument
92 arena_prof_tctx_set(tsdn, extent, ptr, usize, tctx);
106 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, argument
123 if (likely(tdata->bytes_until_sample >= usize)) {
136 prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) argument
157 prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize, prof_tctx_t *tctx) argument
173 prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize, prof_tctx_t *tctx, bool prof_active, bool updated, extent_t *old_extent, const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx) argument
202 prof_malloc_sample_object(tsd_tsdn(tsd), extent, ptr, usize, local
205 prof_tctx_set(tsd_tsdn(tsd), extent, ptr, usize, local
233 prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize) argument
[all...]
H A Dlarge_externs.h6 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero);
7 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
12 size_t usize, size_t alignment, bool zero, tcache_t *tcache);
19 void large_dalloc_junk(void *ptr, size_t usize);
20 void large_dalloc_maybe_junk(void *ptr, size_t usize);
H A Dtcache_inlines.h132 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
152 * Only compute usize if required. The checks in the following if
156 usize = index2size(binind);
157 assert(tcache_salloc(tsd_tsdn(tsd), ret) == usize);
166 memset(ret, 0, usize);
173 memset(ret, 0, usize);
179 tcache->prof_accumbytes += usize;
209 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
211 /* Only compute usize on demand */
214 usize
[all...]
H A Djemalloc_internal.h625 size_t usize = (psz + delta_mask) & ~delta_mask; local
626 return (usize);
701 size_t usize = grp_size + mod_size; local
702 return (usize);
740 size_t usize = (size + delta_mask) & ~delta_mask; local
741 return (usize);
774 size_t usize; local
794 usize = s2u(ALIGNMENT_CEILING(size, alignment));
795 if (usize < LARGE_MINCLASS)
796 return (usize);
1012 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, bool is_internal, arena_t *arena) argument
1033 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) argument
1040 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) argument
1105 size_t usize, copysize; local
[all...]
H A Dextent_externs.h27 extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
30 extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
33 extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
H A Darena_inlines_b.h9 size_t usize, prof_tctx_t *tctx);
46 size_t usize, prof_tctx_t *tctx)
153 size_t usize = extent_usize_get(extent); local
155 if (likely(tcache != NULL) && usize <= tcache_maxclass) {
156 if (config_prof && unlikely(usize <= SMALL_MAXCLASS)) {
161 ptr, usize, slow_path); local
H A Dextent_inlines.h27 void extent_usize_set(extent_t *extent, size_t usize);
35 size_t size, size_t usize, size_t sn, bool active, bool zeroed,
206 extent_usize_set(extent_t *extent, size_t usize) argument
208 extent->e_usize = usize;
249 size_t usize, size_t sn, bool active, bool zeroed, bool committed,
257 extent_usize_set(extent, usize);
248 extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size, size_t usize, size_t sn, bool active, bool zeroed, bool committed, bool slab) argument
H A Dprof_externs.h45 const void *ptr, size_t usize, prof_tctx_t *tctx);
46 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
H A Darena_externs.h31 size_t usize, size_t alignment, bool *zero);
56 void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
59 size_t usize);
/fuchsia/zircon/third_party/ulib/jemalloc/src/
H A Dlarge.c7 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) argument
9 assert(usize == s2u(usize));
11 return (large_palloc(tsdn, arena, usize, CACHELINE, zero));
15 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, argument
25 ausize = sa2u(usize, alignment);
37 arena, usize, alignment, &is_zeroed)) == NULL)
45 if (config_prof && arena_prof_accum(tsdn, arena, usize))
67 large_dalloc_junk(void *ptr, size_t usize) argument
69 memset(ptr, JEMALLOC_FREE_JUNK, usize);
82 large_dalloc_maybe_junk(void *ptr, size_t usize) argument
101 large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) argument
135 large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize, bool zero) argument
243 large_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero) argument
252 large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
[all...]
H A Darena.c48 extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
55 return (extent_alloc_cache(tsdn, arena, r_extent_hooks, new_addr, usize,
205 arena_large_malloc_stats_update(arena_t *arena, size_t usize) argument
211 if (usize < LARGE_MINCLASS)
212 usize = LARGE_MINCLASS;
213 index = size2index(usize);
217 arena->stats.allocated_large += usize;
224 arena_large_malloc_stats_update_undo(arena_t *arena, size_t usize) argument
230 if (usize < LARGE_MINCLASS)
231 usize
47 arena_extent_cache_alloc_locked(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool slab) argument
243 arena_large_dalloc_stats_update(arena_t *arena, size_t usize) argument
261 arena_large_reset_stats_cancel(arena_t *arena, size_t usize) argument
273 arena_large_ralloc_stats_update(arena_t *arena, size_t oldusize, size_t usize) argument
280 arena_extent_alloc_large_hard(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, size_t usize, size_t alignment, bool *zero) argument
303 arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool *zero) argument
355 size_t usize = extent_usize_get(extent); local
371 size_t usize = extent_usize_get(extent); local
828 size_t usize; local
1182 size_t usize; local
1247 arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1267 arena_prof_promote(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize) argument
1312 size_t usize; local
1319 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, usize, local
1477 arena_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, bool zero, tcache_t *tcache) argument
1494 size_t usize, copysize; local
[all...]
H A Djemalloc.c1445 size_t size, size_t usize, szind_t ind) {
1471 return ipalloct(tsd_tsdn(tsd), usize, dopts->alignment,
1481 size_t usize, szind_t ind) {
1485 * For small allocations, sampling bumps the usize. If so, we allocate
1489 size_t bumped_usize = usize;
1491 if (usize <= SMALL_MAXCLASS) {
1502 ret, usize); local
1504 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind);
1555 * allocations, or in case of stats or profiling we need usize
1444 imalloc_no_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, size_t size, size_t usize, szind_t ind) argument
1480 imalloc_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, size_t usize, szind_t ind) argument
1563 size_t usize = 0; local
1648 allocation, usize, tctx); local
1867 irealloc_prof_sample(tsd_t *tsd, extent_t *extent, void *old_ptr, size_t old_usize, size_t usize, prof_tctx_t *tctx) argument
1888 irealloc_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize, size_t usize) argument
1921 size_t usize; local
1944 isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) argument
1958 isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, false); local
1960 isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, true); local
2226 irallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *old_ptr, size_t old_usize, size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena, prof_tctx_t *tctx) argument
2249 irallocx_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize, size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache, arena_t *arena) argument
2300 size_t usize; local
2373 size_t usize; local
2387 size_t usize; local
2401 size_t usize_max, usize; local
2452 size_t usize, old_usize; local
2506 size_t usize; local
2553 size_t usize; local
2570 size_t usize; local
2601 size_t usize; local
[all...]
H A Dckh.c266 size_t usize; local
269 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
270 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
274 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE,
308 size_t usize; local
317 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
318 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS))
320 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELIN
359 size_t mincells, usize; local
[all...]
H A Dbase.c154 size_t usize, header_size, gap_size, block_size; local
157 usize = ALIGNMENT_CEILING(size, alignment);
160 block_size = HUGEPAGE_CEILING(header_size + gap_size + usize);
293 size_t usize, asize; local
298 usize = ALIGNMENT_CEILING(size, alignment);
299 asize = usize + alignment - QUANTUM;
312 extent = base_extent_alloc(tsdn, base, usize, alignment);
319 ret = base_extent_bump_alloc(tsdn, base, extent, usize, alignment);
H A Dextent.c417 void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero,
452 size = usize + pad;
455 if (alloc_size < usize)
505 lead, leadsize, leadsize, size + trailsize, usize +
521 r_extent_hooks, extent, size, usize, trailsize, trailsize);
534 * Splitting causes usize to be set as a side effect, but no
537 extent_usize_set(extent, usize);
572 for (i = 0; i < usize / sizeof(size_t); i++)
615 extent_hooks_t **r_extent_hooks, bool locked, void *new_addr, size_t usize,
620 assert(usize
415 extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, extent_heap_t extent_heaps[NPSIZES+1], bool locked, bool cache, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
614 extent_alloc_cache_impl(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, bool locked, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
630 extent_alloc_cache_locked(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
641 extent_alloc_cache(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
697 extent_grow_retained(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
811 extent_alloc_retained(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
840 extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
877 extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) argument
[all...]
H A Dprof.c227 size_t usize, prof_tctx_t *tctx)
229 prof_tctx_set(tsdn, extent, ptr, usize, tctx);
233 tctx->cnts.curbytes += usize;
236 tctx->cnts.accumbytes += usize;
243 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) argument
247 assert(tctx->cnts.curbytes >= usize);
249 tctx->cnts.curbytes -= usize;
226 prof_malloc_sample_object(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize, prof_tctx_t *tctx) argument
/fuchsia/zircon/third_party/ulib/jemalloc/test/unit/
H A Djunk.c40 large_dalloc_junk_intercept(void *ptr, size_t usize) argument
44 large_dalloc_junk_orig(ptr, usize);
45 for (i = 0; i < usize; i++) {
48 i, usize);
55 large_dalloc_maybe_junk_intercept(void *ptr, size_t usize) argument
57 large_dalloc_maybe_junk_orig(ptr, usize);
/fuchsia/zircon/third_party/ulib/jemalloc/test/integration/
H A Dallocated.c18 size_t sz, usize; local
70 usize = malloc_usable_size(p);
71 assert_u64_le(a0 + usize, a1,
87 assert_u64_le(d0 + usize, d1,

Completed in 175 milliseconds