Deleted Added
full compact
arena.h (296221) arena.h (299587)
1/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4#define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS)
5
6/* Maximum number of regions in one run. */
7#define LG_RUN_MAXREGS (LG_PAGE - LG_TINY_MIN)
8#define RUN_MAXREGS (1U << LG_RUN_MAXREGS)

--- 22 unchanged lines hidden (view full) ---

31} purge_mode_t;
32#define PURGE_DEFAULT purge_mode_ratio
33/* Default decay time in seconds. */
34#define DECAY_TIME_DEFAULT 10
35/* Number of event ticks between time checks. */
36#define DECAY_NTICKS_PER_UPDATE 1000
37
38typedef struct arena_runs_dirty_link_s arena_runs_dirty_link_t;
1/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4#define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS)
5
6/* Maximum number of regions in one run. */
7#define LG_RUN_MAXREGS (LG_PAGE - LG_TINY_MIN)
8#define RUN_MAXREGS (1U << LG_RUN_MAXREGS)

--- 22 unchanged lines hidden (view full) ---

31} purge_mode_t;
32#define PURGE_DEFAULT purge_mode_ratio
33/* Default decay time in seconds. */
34#define DECAY_TIME_DEFAULT 10
35/* Number of event ticks between time checks. */
36#define DECAY_NTICKS_PER_UPDATE 1000
37
38typedef struct arena_runs_dirty_link_s arena_runs_dirty_link_t;
39typedef struct arena_avail_links_s arena_avail_links_t;
39typedef struct arena_run_s arena_run_t;
40typedef struct arena_chunk_map_bits_s arena_chunk_map_bits_t;
41typedef struct arena_chunk_map_misc_s arena_chunk_map_misc_t;
42typedef struct arena_chunk_s arena_chunk_t;
43typedef struct arena_bin_info_s arena_bin_info_t;
44typedef struct arena_bin_s arena_bin_t;
45typedef struct arena_s arena_t;
46typedef struct arena_tdata_s arena_tdata_t;

--- 101 unchanged lines hidden (view full) ---

148
149/*
150 * Each arena_chunk_map_misc_t corresponds to one page within the chunk, just
151 * like arena_chunk_map_bits_t. Two separate arrays are stored within each
152 * chunk header in order to improve cache locality.
153 */
154struct arena_chunk_map_misc_s {
155 /*
40typedef struct arena_run_s arena_run_t;
41typedef struct arena_chunk_map_bits_s arena_chunk_map_bits_t;
42typedef struct arena_chunk_map_misc_s arena_chunk_map_misc_t;
43typedef struct arena_chunk_s arena_chunk_t;
44typedef struct arena_bin_info_s arena_bin_info_t;
45typedef struct arena_bin_s arena_bin_t;
46typedef struct arena_s arena_t;
47typedef struct arena_tdata_s arena_tdata_t;

--- 101 unchanged lines hidden (view full) ---

149
150/*
151 * Each arena_chunk_map_misc_t corresponds to one page within the chunk, just
152 * like arena_chunk_map_bits_t. Two separate arrays are stored within each
153 * chunk header in order to improve cache locality.
154 */
155struct arena_chunk_map_misc_s {
156 /*
156 * Linkage for run trees. There are two disjoint uses:
157 * Linkage for run heaps. There are two disjoint uses:
157 *
158 *
158 * 1) arena_t's runs_avail tree.
159 * 1) arena_t's runs_avail heaps.
159 * 2) arena_run_t conceptually uses this linkage for in-use non-full
160 * runs, rather than directly embedding linkage.
161 */
160 * 2) arena_run_t conceptually uses this linkage for in-use non-full
161 * runs, rather than directly embedding linkage.
162 */
162 rb_node(arena_chunk_map_misc_t) rb_link;
163 phn(arena_chunk_map_misc_t) ph_link;
163
164 union {
165 /* Linkage for list of dirty runs. */
166 arena_runs_dirty_link_t rd;
167
168 /* Profile counters, used for large object runs. */
169 union {
170 void *prof_tctx_pun;
171 prof_tctx_t *prof_tctx;
172 };
173
174 /* Small region run metadata. */
175 arena_run_t run;
176 };
177};
164
165 union {
166 /* Linkage for list of dirty runs. */
167 arena_runs_dirty_link_t rd;
168
169 /* Profile counters, used for large object runs. */
170 union {
171 void *prof_tctx_pun;
172 prof_tctx_t *prof_tctx;
173 };
174
175 /* Small region run metadata. */
176 arena_run_t run;
177 };
178};
178typedef rb_tree(arena_chunk_map_misc_t) arena_run_tree_t;
179typedef ph(arena_chunk_map_misc_t) arena_run_heap_t;
179#endif /* JEMALLOC_ARENA_STRUCTS_A */
180
181#ifdef JEMALLOC_ARENA_STRUCTS_B
182/* Arena chunk header. */
183struct arena_chunk_s {
184 /*
185 * A pointer to the arena that owns the chunk is stored within the node.
186 * This field as a whole is used by chunks_rtree to support both

--- 80 unchanged lines hidden (view full) ---

267
268 /*
269 * Current run being used to service allocations of this bin's size
270 * class.
271 */
272 arena_run_t *runcur;
273
274 /*
180#endif /* JEMALLOC_ARENA_STRUCTS_A */
181
182#ifdef JEMALLOC_ARENA_STRUCTS_B
183/* Arena chunk header. */
184struct arena_chunk_s {
185 /*
186 * A pointer to the arena that owns the chunk is stored within the node.
187 * This field as a whole is used by chunks_rtree to support both

--- 80 unchanged lines hidden (view full) ---

268
269 /*
270 * Current run being used to service allocations of this bin's size
271 * class.
272 */
273 arena_run_t *runcur;
274
275 /*
275 * Tree of non-full runs. This tree is used when looking for an
276 * Heap of non-full runs. This heap is used when looking for an
276 * existing run when runcur is no longer usable. We choose the
277 * non-full run that is lowest in memory; this policy tends to keep
278 * objects packed well, and it can also help reduce the number of
279 * almost-empty chunks.
280 */
277 * existing run when runcur is no longer usable. We choose the
278 * non-full run that is lowest in memory; this policy tends to keep
279 * objects packed well, and it can also help reduce the number of
280 * almost-empty chunks.
281 */
281 arena_run_tree_t runs;
282 arena_run_heap_t runs;
282
283 /* Bin statistics. */
284 malloc_bin_stats_t stats;
285};
286
287struct arena_s {
288 /* This arena's index within the arenas array. */
289 unsigned ind;
290
291 /*
283
284 /* Bin statistics. */
285 malloc_bin_stats_t stats;
286};
287
288struct arena_s {
289 /* This arena's index within the arenas array. */
290 unsigned ind;
291
292 /*
292 * Number of threads currently assigned to this arena. This field is
293 * synchronized via atomic operations.
293 * Number of threads currently assigned to this arena, synchronized via
294 * atomic operations. Each thread has two distinct assignments, one for
295 * application-serving allocation, and the other for internal metadata
296 * allocation. Internal metadata must not be allocated from arenas
297 * created via the arenas.extend mallctl, because the arena.<i>.reset
298 * mallctl indiscriminately discards all allocations for the affected
299 * arena.
300 *
301 * 0: Application allocation.
302 * 1: Internal metadata allocation.
294 */
303 */
295 unsigned nthreads;
304 unsigned nthreads[2];
296
297 /*
298 * There are three classes of arena operations from a locking
299 * perspective:
300 * 1) Thread assignment (modifies nthreads) is synchronized via atomics.
301 * 2) Bin-related operations are protected by bin locks.
302 * 3) Chunk- and run-related operations are protected by this mutex.
303 */

--- 12 unchanged lines hidden (view full) ---

316 /*
317 * PRNG state for cache index randomization of large allocation base
318 * pointers.
319 */
320 uint64_t offset_state;
321
322 dss_prec_t dss_prec;
323
305
306 /*
307 * There are three classes of arena operations from a locking
308 * perspective:
309 * 1) Thread assignment (modifies nthreads) is synchronized via atomics.
310 * 2) Bin-related operations are protected by bin locks.
311 * 3) Chunk- and run-related operations are protected by this mutex.
312 */

--- 12 unchanged lines hidden (view full) ---

325 /*
326 * PRNG state for cache index randomization of large allocation base
327 * pointers.
328 */
329 uint64_t offset_state;
330
331 dss_prec_t dss_prec;
332
333
334 /* Extant arena chunks. */
335 ql_head(extent_node_t) achunks;
336
324 /*
325 * In order to avoid rapid chunk allocation/deallocation when an arena
326 * oscillates right on the cusp of needing a new chunk, cache the most
327 * recently freed chunk. The spare is left in the arena's chunk trees
328 * until it is deleted.
329 *
330 * There is one spare chunk per arena, rather than one spare total, in
331 * order to avoid interactions between multiple threads that could make

--- 120 unchanged lines hidden (view full) ---

452
453 /* User-configurable chunk hook functions. */
454 chunk_hooks_t chunk_hooks;
455
456 /* bins is used to store trees of free regions. */
457 arena_bin_t bins[NBINS];
458
459 /*
337 /*
338 * In order to avoid rapid chunk allocation/deallocation when an arena
339 * oscillates right on the cusp of needing a new chunk, cache the most
340 * recently freed chunk. The spare is left in the arena's chunk trees
341 * until it is deleted.
342 *
343 * There is one spare chunk per arena, rather than one spare total, in
344 * order to avoid interactions between multiple threads that could make

--- 120 unchanged lines hidden (view full) ---

465
466 /* User-configurable chunk hook functions. */
467 chunk_hooks_t chunk_hooks;
468
469 /* bins is used to store trees of free regions. */
470 arena_bin_t bins[NBINS];
471
472 /*
460 * Quantized address-ordered trees of this arena's available runs. The
461 * trees are used for first-best-fit run allocation.
473 * Quantized address-ordered heaps of this arena's available runs. The
474 * heaps are used for first-best-fit run allocation.
462 */
475 */
463 arena_run_tree_t runs_avail[1]; /* Dynamically sized. */
476 arena_run_heap_t runs_avail[1]; /* Dynamically sized. */
464};
465
466/* Used in conjunction with tsd for fast arena-related context lookup. */
467struct arena_tdata_s {
468 ticker_t decay_ticker;
469};
470#endif /* JEMALLOC_ARENA_STRUCTS_B */
471

--- 28 unchanged lines hidden (view full) ---

500typedef size_t (run_quantize_t)(size_t);
501extern run_quantize_t *run_quantize_floor;
502extern run_quantize_t *run_quantize_ceil;
503#endif
504void arena_chunk_cache_maybe_insert(arena_t *arena, extent_node_t *node,
505 bool cache);
506void arena_chunk_cache_maybe_remove(arena_t *arena, extent_node_t *node,
507 bool cache);
477};
478
479/* Used in conjunction with tsd for fast arena-related context lookup. */
480struct arena_tdata_s {
481 ticker_t decay_ticker;
482};
483#endif /* JEMALLOC_ARENA_STRUCTS_B */
484

--- 28 unchanged lines hidden (view full) ---

513typedef size_t (run_quantize_t)(size_t);
514extern run_quantize_t *run_quantize_floor;
515extern run_quantize_t *run_quantize_ceil;
516#endif
517void arena_chunk_cache_maybe_insert(arena_t *arena, extent_node_t *node,
518 bool cache);
519void arena_chunk_cache_maybe_remove(arena_t *arena, extent_node_t *node,
520 bool cache);
508extent_node_t *arena_node_alloc(arena_t *arena);
509void arena_node_dalloc(arena_t *arena, extent_node_t *node);
510void *arena_chunk_alloc_huge(arena_t *arena, size_t usize, size_t alignment,
511 bool *zero);
512void arena_chunk_dalloc_huge(arena_t *arena, void *chunk, size_t usize);
513void arena_chunk_ralloc_huge_similar(arena_t *arena, void *chunk,
514 size_t oldsize, size_t usize);
515void arena_chunk_ralloc_huge_shrink(arena_t *arena, void *chunk,
516 size_t oldsize, size_t usize);
517bool arena_chunk_ralloc_huge_expand(arena_t *arena, void *chunk,
518 size_t oldsize, size_t usize, bool *zero);
519ssize_t arena_lg_dirty_mult_get(arena_t *arena);
520bool arena_lg_dirty_mult_set(arena_t *arena, ssize_t lg_dirty_mult);
521ssize_t arena_decay_time_get(arena_t *arena);
522bool arena_decay_time_set(arena_t *arena, ssize_t decay_time);
523void arena_maybe_purge(arena_t *arena);
524void arena_purge(arena_t *arena, bool all);
525void arena_tcache_fill_small(tsd_t *tsd, arena_t *arena, tcache_bin_t *tbin,
526 szind_t binind, uint64_t prof_accumbytes);
521extent_node_t *arena_node_alloc(tsdn_t *tsdn, arena_t *arena);
522void arena_node_dalloc(tsdn_t *tsdn, arena_t *arena, extent_node_t *node);
523void *arena_chunk_alloc_huge(tsdn_t *tsdn, arena_t *arena, size_t usize,
524 size_t alignment, bool *zero);
525void arena_chunk_dalloc_huge(tsdn_t *tsdn, arena_t *arena, void *chunk,
526 size_t usize);
527void arena_chunk_ralloc_huge_similar(tsdn_t *tsdn, arena_t *arena,
528 void *chunk, size_t oldsize, size_t usize);
529void arena_chunk_ralloc_huge_shrink(tsdn_t *tsdn, arena_t *arena,
530 void *chunk, size_t oldsize, size_t usize);
531bool arena_chunk_ralloc_huge_expand(tsdn_t *tsdn, arena_t *arena,
532 void *chunk, size_t oldsize, size_t usize, bool *zero);
533ssize_t arena_lg_dirty_mult_get(tsdn_t *tsdn, arena_t *arena);
534bool arena_lg_dirty_mult_set(tsdn_t *tsdn, arena_t *arena,
535 ssize_t lg_dirty_mult);
536ssize_t arena_decay_time_get(tsdn_t *tsdn, arena_t *arena);
537bool arena_decay_time_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_time);
538void arena_purge(tsdn_t *tsdn, arena_t *arena, bool all);
539void arena_maybe_purge(tsdn_t *tsdn, arena_t *arena);
540void arena_reset(tsd_t *tsd, arena_t *arena);
541void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena,
542 tcache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes);
527void arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info,
528 bool zero);
529#ifdef JEMALLOC_JET
530typedef void (arena_redzone_corruption_t)(void *, size_t, bool, size_t,
531 uint8_t);
532extern arena_redzone_corruption_t *arena_redzone_corruption;
533typedef void (arena_dalloc_junk_small_t)(void *, arena_bin_info_t *);
534extern arena_dalloc_junk_small_t *arena_dalloc_junk_small;
535#else
536void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info);
537#endif
538void arena_quarantine_junk_small(void *ptr, size_t usize);
543void arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info,
544 bool zero);
545#ifdef JEMALLOC_JET
546typedef void (arena_redzone_corruption_t)(void *, size_t, bool, size_t,
547 uint8_t);
548extern arena_redzone_corruption_t *arena_redzone_corruption;
549typedef void (arena_dalloc_junk_small_t)(void *, arena_bin_info_t *);
550extern arena_dalloc_junk_small_t *arena_dalloc_junk_small;
551#else
552void arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info);
553#endif
554void arena_quarantine_junk_small(void *ptr, size_t usize);
539void *arena_malloc_large(tsd_t *tsd, arena_t *arena, szind_t ind, bool zero);
540void *arena_malloc_hard(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind,
541 bool zero, tcache_t *tcache);
542void *arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize,
555void *arena_malloc_large(tsdn_t *tsdn, arena_t *arena, szind_t ind,
556 bool zero);
557void *arena_malloc_hard(tsdn_t *tsdn, arena_t *arena, size_t size,
558 szind_t ind, bool zero);
559void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
543 size_t alignment, bool zero, tcache_t *tcache);
560 size_t alignment, bool zero, tcache_t *tcache);
544void arena_prof_promoted(const void *ptr, size_t size);
545void arena_dalloc_bin_junked_locked(arena_t *arena, arena_chunk_t *chunk,
546 void *ptr, arena_chunk_map_bits_t *bitselm);
547void arena_dalloc_bin(arena_t *arena, arena_chunk_t *chunk, void *ptr,
548 size_t pageind, arena_chunk_map_bits_t *bitselm);
549void arena_dalloc_small(tsd_t *tsd, arena_t *arena, arena_chunk_t *chunk,
561void arena_prof_promoted(tsdn_t *tsdn, const void *ptr, size_t size);
562void arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena,
563 arena_chunk_t *chunk, void *ptr, arena_chunk_map_bits_t *bitselm);
564void arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
565 void *ptr, size_t pageind, arena_chunk_map_bits_t *bitselm);
566void arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
550 void *ptr, size_t pageind);
551#ifdef JEMALLOC_JET
552typedef void (arena_dalloc_junk_large_t)(void *, size_t);
553extern arena_dalloc_junk_large_t *arena_dalloc_junk_large;
554#else
555void arena_dalloc_junk_large(void *ptr, size_t usize);
556#endif
567 void *ptr, size_t pageind);
568#ifdef JEMALLOC_JET
569typedef void (arena_dalloc_junk_large_t)(void *, size_t);
570extern arena_dalloc_junk_large_t *arena_dalloc_junk_large;
571#else
572void arena_dalloc_junk_large(void *ptr, size_t usize);
573#endif
557void arena_dalloc_large_junked_locked(arena_t *arena, arena_chunk_t *chunk,
574void arena_dalloc_large_junked_locked(tsdn_t *tsdn, arena_t *arena,
575 arena_chunk_t *chunk, void *ptr);
576void arena_dalloc_large(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
558 void *ptr);
577 void *ptr);
559void arena_dalloc_large(tsd_t *tsd, arena_t *arena, arena_chunk_t *chunk,
560 void *ptr);
561#ifdef JEMALLOC_JET
562typedef void (arena_ralloc_junk_large_t)(void *, size_t, size_t);
563extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
564#endif
578#ifdef JEMALLOC_JET
579typedef void (arena_ralloc_junk_large_t)(void *, size_t, size_t);
580extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
581#endif
565bool arena_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
566 size_t extra, bool zero);
582bool arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize,
583 size_t size, size_t extra, bool zero);
567void *arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
568 size_t size, size_t alignment, bool zero, tcache_t *tcache);
584void *arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
585 size_t size, size_t alignment, bool zero, tcache_t *tcache);
569dss_prec_t arena_dss_prec_get(arena_t *arena);
570bool arena_dss_prec_set(arena_t *arena, dss_prec_t dss_prec);
586dss_prec_t arena_dss_prec_get(tsdn_t *tsdn, arena_t *arena);
587bool arena_dss_prec_set(tsdn_t *tsdn, arena_t *arena, dss_prec_t dss_prec);
571ssize_t arena_lg_dirty_mult_default_get(void);
572bool arena_lg_dirty_mult_default_set(ssize_t lg_dirty_mult);
573ssize_t arena_decay_time_default_get(void);
574bool arena_decay_time_default_set(ssize_t decay_time);
588ssize_t arena_lg_dirty_mult_default_get(void);
589bool arena_lg_dirty_mult_default_set(ssize_t lg_dirty_mult);
590ssize_t arena_decay_time_default_get(void);
591bool arena_decay_time_default_set(ssize_t decay_time);
575void arena_basic_stats_merge(arena_t *arena, unsigned *nthreads,
592void arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena,
593 unsigned *nthreads, const char **dss, ssize_t *lg_dirty_mult,
594 ssize_t *decay_time, size_t *nactive, size_t *ndirty);
595void arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads,
576 const char **dss, ssize_t *lg_dirty_mult, ssize_t *decay_time,
596 const char **dss, ssize_t *lg_dirty_mult, ssize_t *decay_time,
577 size_t *nactive, size_t *ndirty);
578void arena_stats_merge(arena_t *arena, unsigned *nthreads, const char **dss,
579 ssize_t *lg_dirty_mult, ssize_t *decay_time, size_t *nactive,
580 size_t *ndirty, arena_stats_t *astats, malloc_bin_stats_t *bstats,
581 malloc_large_stats_t *lstats, malloc_huge_stats_t *hstats);
582unsigned arena_nthreads_get(arena_t *arena);
583void arena_nthreads_inc(arena_t *arena);
584void arena_nthreads_dec(arena_t *arena);
585arena_t *arena_new(unsigned ind);
597 size_t *nactive, size_t *ndirty, arena_stats_t *astats,
598 malloc_bin_stats_t *bstats, malloc_large_stats_t *lstats,
599 malloc_huge_stats_t *hstats);
600unsigned arena_nthreads_get(arena_t *arena, bool internal);
601void arena_nthreads_inc(arena_t *arena, bool internal);
602void arena_nthreads_dec(arena_t *arena, bool internal);
603arena_t *arena_new(tsdn_t *tsdn, unsigned ind);
586bool arena_boot(void);
604bool arena_boot(void);
587void arena_prefork(arena_t *arena);
588void arena_postfork_parent(arena_t *arena);
589void arena_postfork_child(arena_t *arena);
605void arena_prefork0(tsdn_t *tsdn, arena_t *arena);
606void arena_prefork1(tsdn_t *tsdn, arena_t *arena);
607void arena_prefork2(tsdn_t *tsdn, arena_t *arena);
608void arena_prefork3(tsdn_t *tsdn, arena_t *arena);
609void arena_postfork_parent(tsdn_t *tsdn, arena_t *arena);
610void arena_postfork_child(tsdn_t *tsdn, arena_t *arena);
590
591#endif /* JEMALLOC_H_EXTERNS */
592/******************************************************************************/
593#ifdef JEMALLOC_H_INLINES
594
595#ifndef JEMALLOC_ENABLE_INLINE
611
612#endif /* JEMALLOC_H_EXTERNS */
613/******************************************************************************/
614#ifdef JEMALLOC_H_INLINES
615
616#ifndef JEMALLOC_ENABLE_INLINE
596arena_chunk_map_bits_t *arena_bitselm_get(arena_chunk_t *chunk,
617arena_chunk_map_bits_t *arena_bitselm_get_mutable(arena_chunk_t *chunk,
597 size_t pageind);
618 size_t pageind);
598arena_chunk_map_misc_t *arena_miscelm_get(arena_chunk_t *chunk,
619const arena_chunk_map_bits_t *arena_bitselm_get_const(
620 const arena_chunk_t *chunk, size_t pageind);
621arena_chunk_map_misc_t *arena_miscelm_get_mutable(arena_chunk_t *chunk,
599 size_t pageind);
622 size_t pageind);
623const arena_chunk_map_misc_t *arena_miscelm_get_const(
624 const arena_chunk_t *chunk, size_t pageind);
600size_t arena_miscelm_to_pageind(const arena_chunk_map_misc_t *miscelm);
625size_t arena_miscelm_to_pageind(const arena_chunk_map_misc_t *miscelm);
601void *arena_miscelm_to_rpages(arena_chunk_map_misc_t *miscelm);
626void *arena_miscelm_to_rpages(const arena_chunk_map_misc_t *miscelm);
602arena_chunk_map_misc_t *arena_rd_to_miscelm(arena_runs_dirty_link_t *rd);
603arena_chunk_map_misc_t *arena_run_to_miscelm(arena_run_t *run);
627arena_chunk_map_misc_t *arena_rd_to_miscelm(arena_runs_dirty_link_t *rd);
628arena_chunk_map_misc_t *arena_run_to_miscelm(arena_run_t *run);
604size_t *arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind);
605size_t arena_mapbitsp_read(size_t *mapbitsp);
606size_t arena_mapbits_get(arena_chunk_t *chunk, size_t pageind);
629size_t *arena_mapbitsp_get_mutable(arena_chunk_t *chunk, size_t pageind);
630const size_t *arena_mapbitsp_get_const(const arena_chunk_t *chunk,
631 size_t pageind);
632size_t arena_mapbitsp_read(const size_t *mapbitsp);
633size_t arena_mapbits_get(const arena_chunk_t *chunk, size_t pageind);
607size_t arena_mapbits_size_decode(size_t mapbits);
634size_t arena_mapbits_size_decode(size_t mapbits);
608size_t arena_mapbits_unallocated_size_get(arena_chunk_t *chunk,
635size_t arena_mapbits_unallocated_size_get(const arena_chunk_t *chunk,
609 size_t pageind);
636 size_t pageind);
610size_t arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind);
611size_t arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind);
612szind_t arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind);
613size_t arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind);
614size_t arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind);
615size_t arena_mapbits_decommitted_get(arena_chunk_t *chunk, size_t pageind);
616size_t arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind);
617size_t arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind);
637size_t arena_mapbits_large_size_get(const arena_chunk_t *chunk,
638 size_t pageind);
639size_t arena_mapbits_small_runind_get(const arena_chunk_t *chunk,
640 size_t pageind);
641szind_t arena_mapbits_binind_get(const arena_chunk_t *chunk, size_t pageind);
642size_t arena_mapbits_dirty_get(const arena_chunk_t *chunk, size_t pageind);
643size_t arena_mapbits_unzeroed_get(const arena_chunk_t *chunk, size_t pageind);
644size_t arena_mapbits_decommitted_get(const arena_chunk_t *chunk,
645 size_t pageind);
646size_t arena_mapbits_large_get(const arena_chunk_t *chunk, size_t pageind);
647size_t arena_mapbits_allocated_get(const arena_chunk_t *chunk, size_t pageind);
618void arena_mapbitsp_write(size_t *mapbitsp, size_t mapbits);
619size_t arena_mapbits_size_encode(size_t size);
620void arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind,
621 size_t size, size_t flags);
622void arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
623 size_t size);
624void arena_mapbits_internal_set(arena_chunk_t *chunk, size_t pageind,
625 size_t flags);
626void arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind,
627 size_t size, size_t flags);
628void arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
629 szind_t binind);
630void arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind,
631 size_t runind, szind_t binind, size_t flags);
632void arena_metadata_allocated_add(arena_t *arena, size_t size);
633void arena_metadata_allocated_sub(arena_t *arena, size_t size);
634size_t arena_metadata_allocated_get(arena_t *arena);
635bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
636bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
648void arena_mapbitsp_write(size_t *mapbitsp, size_t mapbits);
649size_t arena_mapbits_size_encode(size_t size);
650void arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind,
651 size_t size, size_t flags);
652void arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
653 size_t size);
654void arena_mapbits_internal_set(arena_chunk_t *chunk, size_t pageind,
655 size_t flags);
656void arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind,
657 size_t size, size_t flags);
658void arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
659 szind_t binind);
660void arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind,
661 size_t runind, szind_t binind, size_t flags);
662void arena_metadata_allocated_add(arena_t *arena, size_t size);
663void arena_metadata_allocated_sub(arena_t *arena, size_t size);
664size_t arena_metadata_allocated_get(arena_t *arena);
665bool arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes);
666bool arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes);
637bool arena_prof_accum(arena_t *arena, uint64_t accumbytes);
667bool arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes);
638szind_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
639szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
640size_t arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
641 const void *ptr);
668szind_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
669szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
670size_t arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
671 const void *ptr);
642prof_tctx_t *arena_prof_tctx_get(const void *ptr);
643void arena_prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx);
644void arena_prof_tctx_reset(const void *ptr, size_t usize,
672prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr);
673void arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
674 prof_tctx_t *tctx);
675void arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
645 const void *old_ptr, prof_tctx_t *old_tctx);
676 const void *old_ptr, prof_tctx_t *old_tctx);
646void arena_decay_ticks(tsd_t *tsd, arena_t *arena, unsigned nticks);
647void arena_decay_tick(tsd_t *tsd, arena_t *arena);
648void *arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind,
677void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks);
678void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
679void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
649 bool zero, tcache_t *tcache, bool slow_path);
650arena_t *arena_aalloc(const void *ptr);
680 bool zero, tcache_t *tcache, bool slow_path);
681arena_t *arena_aalloc(const void *ptr);
651size_t arena_salloc(const void *ptr, bool demote);
652void arena_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path);
653void arena_sdalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache);
682size_t arena_salloc(tsdn_t *tsdn, const void *ptr, bool demote);
683void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path);
684void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
685 bool slow_path);
654#endif
655
656#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
657# ifdef JEMALLOC_ARENA_INLINE_A
658JEMALLOC_ALWAYS_INLINE arena_chunk_map_bits_t *
686#endif
687
688#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
689# ifdef JEMALLOC_ARENA_INLINE_A
690JEMALLOC_ALWAYS_INLINE arena_chunk_map_bits_t *
659arena_bitselm_get(arena_chunk_t *chunk, size_t pageind)
691arena_bitselm_get_mutable(arena_chunk_t *chunk, size_t pageind)
660{
661
662 assert(pageind >= map_bias);
663 assert(pageind < chunk_npages);
664
665 return (&chunk->map_bits[pageind-map_bias]);
666}
667
692{
693
694 assert(pageind >= map_bias);
695 assert(pageind < chunk_npages);
696
697 return (&chunk->map_bits[pageind-map_bias]);
698}
699
700JEMALLOC_ALWAYS_INLINE const arena_chunk_map_bits_t *
701arena_bitselm_get_const(const arena_chunk_t *chunk, size_t pageind)
702{
703
704 return (arena_bitselm_get_mutable((arena_chunk_t *)chunk, pageind));
705}
706
668JEMALLOC_ALWAYS_INLINE arena_chunk_map_misc_t *
707JEMALLOC_ALWAYS_INLINE arena_chunk_map_misc_t *
669arena_miscelm_get(arena_chunk_t *chunk, size_t pageind)
708arena_miscelm_get_mutable(arena_chunk_t *chunk, size_t pageind)
670{
671
672 assert(pageind >= map_bias);
673 assert(pageind < chunk_npages);
674
675 return ((arena_chunk_map_misc_t *)((uintptr_t)chunk +
676 (uintptr_t)map_misc_offset) + pageind-map_bias);
677}
678
709{
710
711 assert(pageind >= map_bias);
712 assert(pageind < chunk_npages);
713
714 return ((arena_chunk_map_misc_t *)((uintptr_t)chunk +
715 (uintptr_t)map_misc_offset) + pageind-map_bias);
716}
717
718JEMALLOC_ALWAYS_INLINE const arena_chunk_map_misc_t *
719arena_miscelm_get_const(const arena_chunk_t *chunk, size_t pageind)
720{
721
722 return (arena_miscelm_get_mutable((arena_chunk_t *)chunk, pageind));
723}
724
679JEMALLOC_ALWAYS_INLINE size_t
680arena_miscelm_to_pageind(const arena_chunk_map_misc_t *miscelm)
681{
682 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
683 size_t pageind = ((uintptr_t)miscelm - ((uintptr_t)chunk +
684 map_misc_offset)) / sizeof(arena_chunk_map_misc_t) + map_bias;
685
686 assert(pageind >= map_bias);
687 assert(pageind < chunk_npages);
688
689 return (pageind);
690}
691
692JEMALLOC_ALWAYS_INLINE void *
725JEMALLOC_ALWAYS_INLINE size_t
726arena_miscelm_to_pageind(const arena_chunk_map_misc_t *miscelm)
727{
728 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
729 size_t pageind = ((uintptr_t)miscelm - ((uintptr_t)chunk +
730 map_misc_offset)) / sizeof(arena_chunk_map_misc_t) + map_bias;
731
732 assert(pageind >= map_bias);
733 assert(pageind < chunk_npages);
734
735 return (pageind);
736}
737
738JEMALLOC_ALWAYS_INLINE void *
693arena_miscelm_to_rpages(arena_chunk_map_misc_t *miscelm)
739arena_miscelm_to_rpages(const arena_chunk_map_misc_t *miscelm)
694{
695 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
696 size_t pageind = arena_miscelm_to_pageind(miscelm);
697
698 return ((void *)((uintptr_t)chunk + (pageind << LG_PAGE)));
699}
700
701JEMALLOC_ALWAYS_INLINE arena_chunk_map_misc_t *

--- 16 unchanged lines hidden (view full) ---

718
719 assert(arena_miscelm_to_pageind(miscelm) >= map_bias);
720 assert(arena_miscelm_to_pageind(miscelm) < chunk_npages);
721
722 return (miscelm);
723}
724
725JEMALLOC_ALWAYS_INLINE size_t *
740{
741 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(miscelm);
742 size_t pageind = arena_miscelm_to_pageind(miscelm);
743
744 return ((void *)((uintptr_t)chunk + (pageind << LG_PAGE)));
745}
746
747JEMALLOC_ALWAYS_INLINE arena_chunk_map_misc_t *

--- 16 unchanged lines hidden (view full) ---

764
765 assert(arena_miscelm_to_pageind(miscelm) >= map_bias);
766 assert(arena_miscelm_to_pageind(miscelm) < chunk_npages);
767
768 return (miscelm);
769}
770
771JEMALLOC_ALWAYS_INLINE size_t *
726arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind)
772arena_mapbitsp_get_mutable(arena_chunk_t *chunk, size_t pageind)
727{
728
773{
774
729 return (&arena_bitselm_get(chunk, pageind)->bits);
775 return (&arena_bitselm_get_mutable(chunk, pageind)->bits);
730}
731
776}
777
778JEMALLOC_ALWAYS_INLINE const size_t *
779arena_mapbitsp_get_const(const arena_chunk_t *chunk, size_t pageind)
780{
781
782 return (arena_mapbitsp_get_mutable((arena_chunk_t *)chunk, pageind));
783}
784
732JEMALLOC_ALWAYS_INLINE size_t
785JEMALLOC_ALWAYS_INLINE size_t
733arena_mapbitsp_read(size_t *mapbitsp)
786arena_mapbitsp_read(const size_t *mapbitsp)
734{
735
736 return (*mapbitsp);
737}
738
739JEMALLOC_ALWAYS_INLINE size_t
787{
788
789 return (*mapbitsp);
790}
791
792JEMALLOC_ALWAYS_INLINE size_t
740arena_mapbits_get(arena_chunk_t *chunk, size_t pageind)
793arena_mapbits_get(const arena_chunk_t *chunk, size_t pageind)
741{
742
794{
795
743 return (arena_mapbitsp_read(arena_mapbitsp_get(chunk, pageind)));
796 return (arena_mapbitsp_read(arena_mapbitsp_get_const(chunk, pageind)));
744}
745
746JEMALLOC_ALWAYS_INLINE size_t
747arena_mapbits_size_decode(size_t mapbits)
748{
749 size_t size;
750
751#if CHUNK_MAP_SIZE_SHIFT > 0
752 size = (mapbits & CHUNK_MAP_SIZE_MASK) >> CHUNK_MAP_SIZE_SHIFT;
753#elif CHUNK_MAP_SIZE_SHIFT == 0
754 size = mapbits & CHUNK_MAP_SIZE_MASK;
755#else
756 size = (mapbits & CHUNK_MAP_SIZE_MASK) << -CHUNK_MAP_SIZE_SHIFT;
757#endif
758
759 return (size);
760}
761
762JEMALLOC_ALWAYS_INLINE size_t
797}
798
799JEMALLOC_ALWAYS_INLINE size_t
800arena_mapbits_size_decode(size_t mapbits)
801{
802 size_t size;
803
804#if CHUNK_MAP_SIZE_SHIFT > 0
805 size = (mapbits & CHUNK_MAP_SIZE_MASK) >> CHUNK_MAP_SIZE_SHIFT;
806#elif CHUNK_MAP_SIZE_SHIFT == 0
807 size = mapbits & CHUNK_MAP_SIZE_MASK;
808#else
809 size = (mapbits & CHUNK_MAP_SIZE_MASK) << -CHUNK_MAP_SIZE_SHIFT;
810#endif
811
812 return (size);
813}
814
815JEMALLOC_ALWAYS_INLINE size_t
763arena_mapbits_unallocated_size_get(arena_chunk_t *chunk, size_t pageind)
816arena_mapbits_unallocated_size_get(const arena_chunk_t *chunk, size_t pageind)
764{
765 size_t mapbits;
766
767 mapbits = arena_mapbits_get(chunk, pageind);
768 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
769 return (arena_mapbits_size_decode(mapbits));
770}
771
772JEMALLOC_ALWAYS_INLINE size_t
817{
818 size_t mapbits;
819
820 mapbits = arena_mapbits_get(chunk, pageind);
821 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
822 return (arena_mapbits_size_decode(mapbits));
823}
824
825JEMALLOC_ALWAYS_INLINE size_t
773arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind)
826arena_mapbits_large_size_get(const arena_chunk_t *chunk, size_t pageind)
774{
775 size_t mapbits;
776
777 mapbits = arena_mapbits_get(chunk, pageind);
778 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
779 (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED));
780 return (arena_mapbits_size_decode(mapbits));
781}
782
783JEMALLOC_ALWAYS_INLINE size_t
827{
828 size_t mapbits;
829
830 mapbits = arena_mapbits_get(chunk, pageind);
831 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
832 (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED));
833 return (arena_mapbits_size_decode(mapbits));
834}
835
836JEMALLOC_ALWAYS_INLINE size_t
784arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind)
837arena_mapbits_small_runind_get(const arena_chunk_t *chunk, size_t pageind)
785{
786 size_t mapbits;
787
788 mapbits = arena_mapbits_get(chunk, pageind);
789 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
790 CHUNK_MAP_ALLOCATED);
791 return (mapbits >> CHUNK_MAP_RUNIND_SHIFT);
792}
793
794JEMALLOC_ALWAYS_INLINE szind_t
838{
839 size_t mapbits;
840
841 mapbits = arena_mapbits_get(chunk, pageind);
842 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) ==
843 CHUNK_MAP_ALLOCATED);
844 return (mapbits >> CHUNK_MAP_RUNIND_SHIFT);
845}
846
847JEMALLOC_ALWAYS_INLINE szind_t
795arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
848arena_mapbits_binind_get(const arena_chunk_t *chunk, size_t pageind)
796{
797 size_t mapbits;
798 szind_t binind;
799
800 mapbits = arena_mapbits_get(chunk, pageind);
801 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
802 assert(binind < NBINS || binind == BININD_INVALID);
803 return (binind);
804}
805
806JEMALLOC_ALWAYS_INLINE size_t
849{
850 size_t mapbits;
851 szind_t binind;
852
853 mapbits = arena_mapbits_get(chunk, pageind);
854 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
855 assert(binind < NBINS || binind == BININD_INVALID);
856 return (binind);
857}
858
859JEMALLOC_ALWAYS_INLINE size_t
807arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind)
860arena_mapbits_dirty_get(const arena_chunk_t *chunk, size_t pageind)
808{
809 size_t mapbits;
810
811 mapbits = arena_mapbits_get(chunk, pageind);
812 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
813 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
814 return (mapbits & CHUNK_MAP_DIRTY);
815}
816
817JEMALLOC_ALWAYS_INLINE size_t
861{
862 size_t mapbits;
863
864 mapbits = arena_mapbits_get(chunk, pageind);
865 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
866 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
867 return (mapbits & CHUNK_MAP_DIRTY);
868}
869
870JEMALLOC_ALWAYS_INLINE size_t
818arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind)
871arena_mapbits_unzeroed_get(const arena_chunk_t *chunk, size_t pageind)
819{
820 size_t mapbits;
821
822 mapbits = arena_mapbits_get(chunk, pageind);
823 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
824 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
825 return (mapbits & CHUNK_MAP_UNZEROED);
826}
827
828JEMALLOC_ALWAYS_INLINE size_t
872{
873 size_t mapbits;
874
875 mapbits = arena_mapbits_get(chunk, pageind);
876 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
877 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
878 return (mapbits & CHUNK_MAP_UNZEROED);
879}
880
881JEMALLOC_ALWAYS_INLINE size_t
829arena_mapbits_decommitted_get(arena_chunk_t *chunk, size_t pageind)
882arena_mapbits_decommitted_get(const arena_chunk_t *chunk, size_t pageind)
830{
831 size_t mapbits;
832
833 mapbits = arena_mapbits_get(chunk, pageind);
834 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
835 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
836 return (mapbits & CHUNK_MAP_DECOMMITTED);
837}
838
839JEMALLOC_ALWAYS_INLINE size_t
883{
884 size_t mapbits;
885
886 mapbits = arena_mapbits_get(chunk, pageind);
887 assert((mapbits & CHUNK_MAP_DECOMMITTED) == 0 || (mapbits &
888 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
889 return (mapbits & CHUNK_MAP_DECOMMITTED);
890}
891
892JEMALLOC_ALWAYS_INLINE size_t
840arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind)
893arena_mapbits_large_get(const arena_chunk_t *chunk, size_t pageind)
841{
842 size_t mapbits;
843
844 mapbits = arena_mapbits_get(chunk, pageind);
845 return (mapbits & CHUNK_MAP_LARGE);
846}
847
848JEMALLOC_ALWAYS_INLINE size_t
894{
895 size_t mapbits;
896
897 mapbits = arena_mapbits_get(chunk, pageind);
898 return (mapbits & CHUNK_MAP_LARGE);
899}
900
901JEMALLOC_ALWAYS_INLINE size_t
849arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind)
902arena_mapbits_allocated_get(const arena_chunk_t *chunk, size_t pageind)
850{
851 size_t mapbits;
852
853 mapbits = arena_mapbits_get(chunk, pageind);
854 return (mapbits & CHUNK_MAP_ALLOCATED);
855}
856
857JEMALLOC_ALWAYS_INLINE void

--- 19 unchanged lines hidden (view full) ---

877 assert((mapbits & ~CHUNK_MAP_SIZE_MASK) == 0);
878 return (mapbits);
879}
880
881JEMALLOC_ALWAYS_INLINE void
882arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
883 size_t flags)
884{
903{
904 size_t mapbits;
905
906 mapbits = arena_mapbits_get(chunk, pageind);
907 return (mapbits & CHUNK_MAP_ALLOCATED);
908}
909
910JEMALLOC_ALWAYS_INLINE void

--- 19 unchanged lines hidden (view full) ---

930 assert((mapbits & ~CHUNK_MAP_SIZE_MASK) == 0);
931 return (mapbits);
932}
933
934JEMALLOC_ALWAYS_INLINE void
935arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
936 size_t flags)
937{
885 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
938 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
886
887 assert((size & PAGE_MASK) == 0);
888 assert((flags & CHUNK_MAP_FLAGS_MASK) == flags);
889 assert((flags & CHUNK_MAP_DECOMMITTED) == 0 || (flags &
890 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
891 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
892 CHUNK_MAP_BININD_INVALID | flags);
893}
894
895JEMALLOC_ALWAYS_INLINE void
896arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
897 size_t size)
898{
939
940 assert((size & PAGE_MASK) == 0);
941 assert((flags & CHUNK_MAP_FLAGS_MASK) == flags);
942 assert((flags & CHUNK_MAP_DECOMMITTED) == 0 || (flags &
943 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
944 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
945 CHUNK_MAP_BININD_INVALID | flags);
946}
947
948JEMALLOC_ALWAYS_INLINE void
949arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
950 size_t size)
951{
899 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
952 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
900 size_t mapbits = arena_mapbitsp_read(mapbitsp);
901
902 assert((size & PAGE_MASK) == 0);
903 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
904 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
905 (mapbits & ~CHUNK_MAP_SIZE_MASK));
906}
907
908JEMALLOC_ALWAYS_INLINE void
909arena_mapbits_internal_set(arena_chunk_t *chunk, size_t pageind, size_t flags)
910{
953 size_t mapbits = arena_mapbitsp_read(mapbitsp);
954
955 assert((size & PAGE_MASK) == 0);
956 assert((mapbits & (CHUNK_MAP_LARGE|CHUNK_MAP_ALLOCATED)) == 0);
957 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
958 (mapbits & ~CHUNK_MAP_SIZE_MASK));
959}
960
961JEMALLOC_ALWAYS_INLINE void
962arena_mapbits_internal_set(arena_chunk_t *chunk, size_t pageind, size_t flags)
963{
911 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
964 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
912
913 assert((flags & CHUNK_MAP_UNZEROED) == flags);
914 arena_mapbitsp_write(mapbitsp, flags);
915}
916
917JEMALLOC_ALWAYS_INLINE void
918arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
919 size_t flags)
920{
965
966 assert((flags & CHUNK_MAP_UNZEROED) == flags);
967 arena_mapbitsp_write(mapbitsp, flags);
968}
969
970JEMALLOC_ALWAYS_INLINE void
971arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
972 size_t flags)
973{
921 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
974 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
922
923 assert((size & PAGE_MASK) == 0);
924 assert((flags & CHUNK_MAP_FLAGS_MASK) == flags);
925 assert((flags & CHUNK_MAP_DECOMMITTED) == 0 || (flags &
926 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
927 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
928 CHUNK_MAP_BININD_INVALID | flags | CHUNK_MAP_LARGE |
929 CHUNK_MAP_ALLOCATED);
930}
931
932JEMALLOC_ALWAYS_INLINE void
933arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
934 szind_t binind)
935{
975
976 assert((size & PAGE_MASK) == 0);
977 assert((flags & CHUNK_MAP_FLAGS_MASK) == flags);
978 assert((flags & CHUNK_MAP_DECOMMITTED) == 0 || (flags &
979 (CHUNK_MAP_DIRTY|CHUNK_MAP_UNZEROED)) == 0);
980 arena_mapbitsp_write(mapbitsp, arena_mapbits_size_encode(size) |
981 CHUNK_MAP_BININD_INVALID | flags | CHUNK_MAP_LARGE |
982 CHUNK_MAP_ALLOCATED);
983}
984
985JEMALLOC_ALWAYS_INLINE void
986arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
987 szind_t binind)
988{
936 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
989 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
937 size_t mapbits = arena_mapbitsp_read(mapbitsp);
938
939 assert(binind <= BININD_INVALID);
940 assert(arena_mapbits_large_size_get(chunk, pageind) == LARGE_MINCLASS +
941 large_pad);
942 arena_mapbitsp_write(mapbitsp, (mapbits & ~CHUNK_MAP_BININD_MASK) |
943 (binind << CHUNK_MAP_BININD_SHIFT));
944}
945
946JEMALLOC_ALWAYS_INLINE void
947arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
948 szind_t binind, size_t flags)
949{
990 size_t mapbits = arena_mapbitsp_read(mapbitsp);
991
992 assert(binind <= BININD_INVALID);
993 assert(arena_mapbits_large_size_get(chunk, pageind) == LARGE_MINCLASS +
994 large_pad);
995 arena_mapbitsp_write(mapbitsp, (mapbits & ~CHUNK_MAP_BININD_MASK) |
996 (binind << CHUNK_MAP_BININD_SHIFT));
997}
998
999JEMALLOC_ALWAYS_INLINE void
1000arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
1001 szind_t binind, size_t flags)
1002{
950 size_t *mapbitsp = arena_mapbitsp_get(chunk, pageind);
1003 size_t *mapbitsp = arena_mapbitsp_get_mutable(chunk, pageind);
951
952 assert(binind < BININD_INVALID);
953 assert(pageind - runind >= map_bias);
954 assert((flags & CHUNK_MAP_UNZEROED) == flags);
955 arena_mapbitsp_write(mapbitsp, (runind << CHUNK_MAP_RUNIND_SHIFT) |
956 (binind << CHUNK_MAP_BININD_SHIFT) | flags | CHUNK_MAP_ALLOCATED);
957}
958

--- 40 unchanged lines hidden (view full) ---

999 cassert(config_prof);
1000
1001 if (likely(prof_interval == 0))
1002 return (false);
1003 return (arena_prof_accum_impl(arena, accumbytes));
1004}
1005
1006JEMALLOC_INLINE bool
1004
1005 assert(binind < BININD_INVALID);
1006 assert(pageind - runind >= map_bias);
1007 assert((flags & CHUNK_MAP_UNZEROED) == flags);
1008 arena_mapbitsp_write(mapbitsp, (runind << CHUNK_MAP_RUNIND_SHIFT) |
1009 (binind << CHUNK_MAP_BININD_SHIFT) | flags | CHUNK_MAP_ALLOCATED);
1010}
1011

--- 40 unchanged lines hidden (view full) ---

1052 cassert(config_prof);
1053
1054 if (likely(prof_interval == 0))
1055 return (false);
1056 return (arena_prof_accum_impl(arena, accumbytes));
1057}
1058
1059JEMALLOC_INLINE bool
1007arena_prof_accum(arena_t *arena, uint64_t accumbytes)
1060arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes)
1008{
1009
1010 cassert(config_prof);
1011
1012 if (likely(prof_interval == 0))
1013 return (false);
1014
1015 {
1016 bool ret;
1017
1061{
1062
1063 cassert(config_prof);
1064
1065 if (likely(prof_interval == 0))
1066 return (false);
1067
1068 {
1069 bool ret;
1070
1018 malloc_mutex_lock(&arena->lock);
1071 malloc_mutex_lock(tsdn, &arena->lock);
1019 ret = arena_prof_accum_impl(arena, accumbytes);
1072 ret = arena_prof_accum_impl(arena, accumbytes);
1020 malloc_mutex_unlock(&arena->lock);
1073 malloc_mutex_unlock(tsdn, &arena->lock);
1021 return (ret);
1022 }
1023}
1024
1025JEMALLOC_ALWAYS_INLINE szind_t
1026arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
1027{
1028 szind_t binind;
1029
1030 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
1031
1032 if (config_debug) {
1033 arena_chunk_t *chunk;
1034 arena_t *arena;
1035 size_t pageind;
1036 size_t actual_mapbits;
1037 size_t rpages_ind;
1074 return (ret);
1075 }
1076}
1077
1078JEMALLOC_ALWAYS_INLINE szind_t
1079arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
1080{
1081 szind_t binind;
1082
1083 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
1084
1085 if (config_debug) {
1086 arena_chunk_t *chunk;
1087 arena_t *arena;
1088 size_t pageind;
1089 size_t actual_mapbits;
1090 size_t rpages_ind;
1038 arena_run_t *run;
1091 const arena_run_t *run;
1039 arena_bin_t *bin;
1040 szind_t run_binind, actual_binind;
1041 arena_bin_info_t *bin_info;
1092 arena_bin_t *bin;
1093 szind_t run_binind, actual_binind;
1094 arena_bin_info_t *bin_info;
1042 arena_chunk_map_misc_t *miscelm;
1043 void *rpages;
1095 const arena_chunk_map_misc_t *miscelm;
1096 const void *rpages;
1044
1045 assert(binind != BININD_INVALID);
1046 assert(binind < NBINS);
1047 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1048 arena = extent_node_arena_get(&chunk->node);
1049 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1050 actual_mapbits = arena_mapbits_get(chunk, pageind);
1051 assert(mapbits == actual_mapbits);
1052 assert(arena_mapbits_large_get(chunk, pageind) == 0);
1053 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1054 rpages_ind = pageind - arena_mapbits_small_runind_get(chunk,
1055 pageind);
1097
1098 assert(binind != BININD_INVALID);
1099 assert(binind < NBINS);
1100 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1101 arena = extent_node_arena_get(&chunk->node);
1102 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1103 actual_mapbits = arena_mapbits_get(chunk, pageind);
1104 assert(mapbits == actual_mapbits);
1105 assert(arena_mapbits_large_get(chunk, pageind) == 0);
1106 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1107 rpages_ind = pageind - arena_mapbits_small_runind_get(chunk,
1108 pageind);
1056 miscelm = arena_miscelm_get(chunk, rpages_ind);
1109 miscelm = arena_miscelm_get_const(chunk, rpages_ind);
1057 run = &miscelm->run;
1058 run_binind = run->binind;
1059 bin = &arena->bins[run_binind];
1060 actual_binind = (szind_t)(bin - arena->bins);
1061 assert(run_binind == actual_binind);
1062 bin_info = &arena_bin_info[actual_binind];
1063 rpages = arena_miscelm_to_rpages(miscelm);
1064 assert(((uintptr_t)ptr - ((uintptr_t)rpages +

--- 83 unchanged lines hidden (view full) ---

1148 }
1149 assert(diff == regind * interval);
1150 assert(regind < bin_info->nregs);
1151
1152 return (regind);
1153}
1154
1155JEMALLOC_INLINE prof_tctx_t *
1110 run = &miscelm->run;
1111 run_binind = run->binind;
1112 bin = &arena->bins[run_binind];
1113 actual_binind = (szind_t)(bin - arena->bins);
1114 assert(run_binind == actual_binind);
1115 bin_info = &arena_bin_info[actual_binind];
1116 rpages = arena_miscelm_to_rpages(miscelm);
1117 assert(((uintptr_t)ptr - ((uintptr_t)rpages +

--- 83 unchanged lines hidden (view full) ---

1201 }
1202 assert(diff == regind * interval);
1203 assert(regind < bin_info->nregs);
1204
1205 return (regind);
1206}
1207
1208JEMALLOC_INLINE prof_tctx_t *
1156arena_prof_tctx_get(const void *ptr)
1209arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr)
1157{
1158 prof_tctx_t *ret;
1159 arena_chunk_t *chunk;
1160
1161 cassert(config_prof);
1162 assert(ptr != NULL);
1163
1164 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1165 if (likely(chunk != ptr)) {
1166 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1167 size_t mapbits = arena_mapbits_get(chunk, pageind);
1168 assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
1169 if (likely((mapbits & CHUNK_MAP_LARGE) == 0))
1170 ret = (prof_tctx_t *)(uintptr_t)1U;
1171 else {
1210{
1211 prof_tctx_t *ret;
1212 arena_chunk_t *chunk;
1213
1214 cassert(config_prof);
1215 assert(ptr != NULL);
1216
1217 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1218 if (likely(chunk != ptr)) {
1219 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1220 size_t mapbits = arena_mapbits_get(chunk, pageind);
1221 assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
1222 if (likely((mapbits & CHUNK_MAP_LARGE) == 0))
1223 ret = (prof_tctx_t *)(uintptr_t)1U;
1224 else {
1172 arena_chunk_map_misc_t *elm = arena_miscelm_get(chunk,
1173 pageind);
1225 arena_chunk_map_misc_t *elm =
1226 arena_miscelm_get_mutable(chunk, pageind);
1174 ret = atomic_read_p(&elm->prof_tctx_pun);
1175 }
1176 } else
1227 ret = atomic_read_p(&elm->prof_tctx_pun);
1228 }
1229 } else
1177 ret = huge_prof_tctx_get(ptr);
1230 ret = huge_prof_tctx_get(tsdn, ptr);
1178
1179 return (ret);
1180}
1181
1182JEMALLOC_INLINE void
1231
1232 return (ret);
1233}
1234
1235JEMALLOC_INLINE void
1183arena_prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx)
1236arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
1237 prof_tctx_t *tctx)
1184{
1185 arena_chunk_t *chunk;
1186
1187 cassert(config_prof);
1188 assert(ptr != NULL);
1189
1190 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1191 if (likely(chunk != ptr)) {
1192 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1193
1194 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1195
1196 if (unlikely(usize > SMALL_MAXCLASS || (uintptr_t)tctx >
1197 (uintptr_t)1U)) {
1198 arena_chunk_map_misc_t *elm;
1199
1200 assert(arena_mapbits_large_get(chunk, pageind) != 0);
1201
1238{
1239 arena_chunk_t *chunk;
1240
1241 cassert(config_prof);
1242 assert(ptr != NULL);
1243
1244 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1245 if (likely(chunk != ptr)) {
1246 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1247
1248 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1249
1250 if (unlikely(usize > SMALL_MAXCLASS || (uintptr_t)tctx >
1251 (uintptr_t)1U)) {
1252 arena_chunk_map_misc_t *elm;
1253
1254 assert(arena_mapbits_large_get(chunk, pageind) != 0);
1255
1202 elm = arena_miscelm_get(chunk, pageind);
1256 elm = arena_miscelm_get_mutable(chunk, pageind);
1203 atomic_write_p(&elm->prof_tctx_pun, tctx);
1204 } else {
1205 /*
1206 * tctx must always be initialized for large runs.
1207 * Assert that the surrounding conditional logic is
1208 * equivalent to checking whether ptr refers to a large
1209 * run.
1210 */
1211 assert(arena_mapbits_large_get(chunk, pageind) == 0);
1212 }
1213 } else
1257 atomic_write_p(&elm->prof_tctx_pun, tctx);
1258 } else {
1259 /*
1260 * tctx must always be initialized for large runs.
1261 * Assert that the surrounding conditional logic is
1262 * equivalent to checking whether ptr refers to a large
1263 * run.
1264 */
1265 assert(arena_mapbits_large_get(chunk, pageind) == 0);
1266 }
1267 } else
1214 huge_prof_tctx_set(ptr, tctx);
1268 huge_prof_tctx_set(tsdn, ptr, tctx);
1215}
1216
1217JEMALLOC_INLINE void
1269}
1270
1271JEMALLOC_INLINE void
1218arena_prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr,
1219 prof_tctx_t *old_tctx)
1272arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
1273 const void *old_ptr, prof_tctx_t *old_tctx)
1220{
1221
1222 cassert(config_prof);
1223 assert(ptr != NULL);
1224
1225 if (unlikely(usize > SMALL_MAXCLASS || (ptr == old_ptr &&
1226 (uintptr_t)old_tctx > (uintptr_t)1U))) {
1227 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1228 if (likely(chunk != ptr)) {
1229 size_t pageind;
1230 arena_chunk_map_misc_t *elm;
1231
1232 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
1233 LG_PAGE;
1234 assert(arena_mapbits_allocated_get(chunk, pageind) !=
1235 0);
1236 assert(arena_mapbits_large_get(chunk, pageind) != 0);
1237
1274{
1275
1276 cassert(config_prof);
1277 assert(ptr != NULL);
1278
1279 if (unlikely(usize > SMALL_MAXCLASS || (ptr == old_ptr &&
1280 (uintptr_t)old_tctx > (uintptr_t)1U))) {
1281 arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1282 if (likely(chunk != ptr)) {
1283 size_t pageind;
1284 arena_chunk_map_misc_t *elm;
1285
1286 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
1287 LG_PAGE;
1288 assert(arena_mapbits_allocated_get(chunk, pageind) !=
1289 0);
1290 assert(arena_mapbits_large_get(chunk, pageind) != 0);
1291
1238 elm = arena_miscelm_get(chunk, pageind);
1292 elm = arena_miscelm_get_mutable(chunk, pageind);
1239 atomic_write_p(&elm->prof_tctx_pun,
1240 (prof_tctx_t *)(uintptr_t)1U);
1241 } else
1293 atomic_write_p(&elm->prof_tctx_pun,
1294 (prof_tctx_t *)(uintptr_t)1U);
1295 } else
1242 huge_prof_tctx_reset(ptr);
1296 huge_prof_tctx_reset(tsdn, ptr);
1243 }
1244}
1245
1246JEMALLOC_ALWAYS_INLINE void
1297 }
1298}
1299
1300JEMALLOC_ALWAYS_INLINE void
1247arena_decay_ticks(tsd_t *tsd, arena_t *arena, unsigned nticks)
1301arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks)
1248{
1302{
1303 tsd_t *tsd;
1249 ticker_t *decay_ticker;
1250
1304 ticker_t *decay_ticker;
1305
1251 if (unlikely(tsd == NULL))
1306 if (unlikely(tsdn_null(tsdn)))
1252 return;
1307 return;
1308 tsd = tsdn_tsd(tsdn);
1253 decay_ticker = decay_ticker_get(tsd, arena->ind);
1254 if (unlikely(decay_ticker == NULL))
1255 return;
1256 if (unlikely(ticker_ticks(decay_ticker, nticks)))
1309 decay_ticker = decay_ticker_get(tsd, arena->ind);
1310 if (unlikely(decay_ticker == NULL))
1311 return;
1312 if (unlikely(ticker_ticks(decay_ticker, nticks)))
1257 arena_purge(arena, false);
1313 arena_purge(tsdn, arena, false);
1258}
1259
1260JEMALLOC_ALWAYS_INLINE void
1314}
1315
1316JEMALLOC_ALWAYS_INLINE void
1261arena_decay_tick(tsd_t *tsd, arena_t *arena)
1317arena_decay_tick(tsdn_t *tsdn, arena_t *arena)
1262{
1263
1318{
1319
1264 arena_decay_ticks(tsd, arena, 1);
1320 arena_decay_ticks(tsdn, arena, 1);
1265}
1266
1267JEMALLOC_ALWAYS_INLINE void *
1321}
1322
1323JEMALLOC_ALWAYS_INLINE void *
1268arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, szind_t ind, bool zero,
1324arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
1269 tcache_t *tcache, bool slow_path)
1270{
1271
1325 tcache_t *tcache, bool slow_path)
1326{
1327
1328 assert(!tsdn_null(tsdn) || tcache == NULL);
1272 assert(size != 0);
1273
1274 if (likely(tcache != NULL)) {
1275 if (likely(size <= SMALL_MAXCLASS)) {
1329 assert(size != 0);
1330
1331 if (likely(tcache != NULL)) {
1332 if (likely(size <= SMALL_MAXCLASS)) {
1276 return (tcache_alloc_small(tsd, arena, tcache, size,
1277 ind, zero, slow_path));
1333 return (tcache_alloc_small(tsdn_tsd(tsdn), arena,
1334 tcache, size, ind, zero, slow_path));
1278 }
1279 if (likely(size <= tcache_maxclass)) {
1335 }
1336 if (likely(size <= tcache_maxclass)) {
1280 return (tcache_alloc_large(tsd, arena, tcache, size,
1281 ind, zero, slow_path));
1337 return (tcache_alloc_large(tsdn_tsd(tsdn), arena,
1338 tcache, size, ind, zero, slow_path));
1282 }
1283 /* (size > tcache_maxclass) case falls through. */
1284 assert(size > tcache_maxclass);
1285 }
1286
1339 }
1340 /* (size > tcache_maxclass) case falls through. */
1341 assert(size > tcache_maxclass);
1342 }
1343
1287 return (arena_malloc_hard(tsd, arena, size, ind, zero, tcache));
1344 return (arena_malloc_hard(tsdn, arena, size, ind, zero));
1288}
1289
1290JEMALLOC_ALWAYS_INLINE arena_t *
1291arena_aalloc(const void *ptr)
1292{
1293 arena_chunk_t *chunk;
1294
1295 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1296 if (likely(chunk != ptr))
1297 return (extent_node_arena_get(&chunk->node));
1298 else
1299 return (huge_aalloc(ptr));
1300}
1301
1302/* Return the size of the allocation pointed to by ptr. */
1303JEMALLOC_ALWAYS_INLINE size_t
1345}
1346
1347JEMALLOC_ALWAYS_INLINE arena_t *
1348arena_aalloc(const void *ptr)
1349{
1350 arena_chunk_t *chunk;
1351
1352 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1353 if (likely(chunk != ptr))
1354 return (extent_node_arena_get(&chunk->node));
1355 else
1356 return (huge_aalloc(ptr));
1357}
1358
1359/* Return the size of the allocation pointed to by ptr. */
1360JEMALLOC_ALWAYS_INLINE size_t
1304arena_salloc(const void *ptr, bool demote)
1361arena_salloc(tsdn_t *tsdn, const void *ptr, bool demote)
1305{
1306 size_t ret;
1307 arena_chunk_t *chunk;
1308 size_t pageind;
1309 szind_t binind;
1310
1311 assert(ptr != NULL);
1312

--- 26 unchanged lines hidden (view full) ---

1339 * object).
1340 */
1341 assert(arena_mapbits_large_get(chunk, pageind) != 0 ||
1342 arena_ptr_small_binind_get(ptr,
1343 arena_mapbits_get(chunk, pageind)) == binind);
1344 ret = index2size(binind);
1345 }
1346 } else
1362{
1363 size_t ret;
1364 arena_chunk_t *chunk;
1365 size_t pageind;
1366 szind_t binind;
1367
1368 assert(ptr != NULL);
1369

--- 26 unchanged lines hidden (view full) ---

1396 * object).
1397 */
1398 assert(arena_mapbits_large_get(chunk, pageind) != 0 ||
1399 arena_ptr_small_binind_get(ptr,
1400 arena_mapbits_get(chunk, pageind)) == binind);
1401 ret = index2size(binind);
1402 }
1403 } else
1347 ret = huge_salloc(ptr);
1404 ret = huge_salloc(tsdn, ptr);
1348
1349 return (ret);
1350}
1351
1352JEMALLOC_ALWAYS_INLINE void
1405
1406 return (ret);
1407}
1408
1409JEMALLOC_ALWAYS_INLINE void
1353arena_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path)
1410arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path)
1354{
1355 arena_chunk_t *chunk;
1356 size_t pageind, mapbits;
1357
1411{
1412 arena_chunk_t *chunk;
1413 size_t pageind, mapbits;
1414
1415 assert(!tsdn_null(tsdn) || tcache == NULL);
1358 assert(ptr != NULL);
1359
1360 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1361 if (likely(chunk != ptr)) {
1362 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1363 mapbits = arena_mapbits_get(chunk, pageind);
1364 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1365 if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
1366 /* Small allocation. */
1367 if (likely(tcache != NULL)) {
1368 szind_t binind = arena_ptr_small_binind_get(ptr,
1369 mapbits);
1416 assert(ptr != NULL);
1417
1418 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1419 if (likely(chunk != ptr)) {
1420 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1421 mapbits = arena_mapbits_get(chunk, pageind);
1422 assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
1423 if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
1424 /* Small allocation. */
1425 if (likely(tcache != NULL)) {
1426 szind_t binind = arena_ptr_small_binind_get(ptr,
1427 mapbits);
1370 tcache_dalloc_small(tsd, tcache, ptr, binind,
1371 slow_path);
1428 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr,
1429 binind, slow_path);
1372 } else {
1430 } else {
1373 arena_dalloc_small(tsd, extent_node_arena_get(
1374 &chunk->node), chunk, ptr, pageind);
1431 arena_dalloc_small(tsdn,
1432 extent_node_arena_get(&chunk->node), chunk,
1433 ptr, pageind);
1375 }
1376 } else {
1377 size_t size = arena_mapbits_large_size_get(chunk,
1378 pageind);
1379
1380 assert(config_cache_oblivious || ((uintptr_t)ptr &
1381 PAGE_MASK) == 0);
1382
1383 if (likely(tcache != NULL) && size - large_pad <=
1384 tcache_maxclass) {
1434 }
1435 } else {
1436 size_t size = arena_mapbits_large_size_get(chunk,
1437 pageind);
1438
1439 assert(config_cache_oblivious || ((uintptr_t)ptr &
1440 PAGE_MASK) == 0);
1441
1442 if (likely(tcache != NULL) && size - large_pad <=
1443 tcache_maxclass) {
1385 tcache_dalloc_large(tsd, tcache, ptr, size -
1386 large_pad, slow_path);
1444 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
1445 size - large_pad, slow_path);
1387 } else {
1446 } else {
1388 arena_dalloc_large(tsd, extent_node_arena_get(
1389 &chunk->node), chunk, ptr);
1447 arena_dalloc_large(tsdn,
1448 extent_node_arena_get(&chunk->node), chunk,
1449 ptr);
1390 }
1391 }
1392 } else
1450 }
1451 }
1452 } else
1393 huge_dalloc(tsd, ptr, tcache);
1453 huge_dalloc(tsdn, ptr);
1394}
1395
1396JEMALLOC_ALWAYS_INLINE void
1454}
1455
1456JEMALLOC_ALWAYS_INLINE void
1397arena_sdalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache)
1457arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
1458 bool slow_path)
1398{
1399 arena_chunk_t *chunk;
1400
1459{
1460 arena_chunk_t *chunk;
1461
1462 assert(!tsdn_null(tsdn) || tcache == NULL);
1463
1401 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1402 if (likely(chunk != ptr)) {
1403 if (config_prof && opt_prof) {
1404 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
1405 LG_PAGE;
1406 assert(arena_mapbits_allocated_get(chunk, pageind) !=
1407 0);
1408 if (arena_mapbits_large_get(chunk, pageind) != 0) {
1409 /*
1410 * Make sure to use promoted size, not request
1411 * size.
1412 */
1413 size = arena_mapbits_large_size_get(chunk,
1414 pageind) - large_pad;
1415 }
1416 }
1464 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1465 if (likely(chunk != ptr)) {
1466 if (config_prof && opt_prof) {
1467 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
1468 LG_PAGE;
1469 assert(arena_mapbits_allocated_get(chunk, pageind) !=
1470 0);
1471 if (arena_mapbits_large_get(chunk, pageind) != 0) {
1472 /*
1473 * Make sure to use promoted size, not request
1474 * size.
1475 */
1476 size = arena_mapbits_large_size_get(chunk,
1477 pageind) - large_pad;
1478 }
1479 }
1417 assert(s2u(size) == s2u(arena_salloc(ptr, false)));
1480 assert(s2u(size) == s2u(arena_salloc(tsdn, ptr, false)));
1418
1419 if (likely(size <= SMALL_MAXCLASS)) {
1420 /* Small allocation. */
1421 if (likely(tcache != NULL)) {
1422 szind_t binind = size2index(size);
1481
1482 if (likely(size <= SMALL_MAXCLASS)) {
1483 /* Small allocation. */
1484 if (likely(tcache != NULL)) {
1485 szind_t binind = size2index(size);
1423 tcache_dalloc_small(tsd, tcache, ptr, binind,
1424 true);
1486 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr,
1487 binind, slow_path);
1425 } else {
1426 size_t pageind = ((uintptr_t)ptr -
1427 (uintptr_t)chunk) >> LG_PAGE;
1488 } else {
1489 size_t pageind = ((uintptr_t)ptr -
1490 (uintptr_t)chunk) >> LG_PAGE;
1428 arena_dalloc_small(tsd, extent_node_arena_get(
1429 &chunk->node), chunk, ptr, pageind);
1491 arena_dalloc_small(tsdn,
1492 extent_node_arena_get(&chunk->node), chunk,
1493 ptr, pageind);
1430 }
1431 } else {
1432 assert(config_cache_oblivious || ((uintptr_t)ptr &
1433 PAGE_MASK) == 0);
1434
1435 if (likely(tcache != NULL) && size <= tcache_maxclass) {
1494 }
1495 } else {
1496 assert(config_cache_oblivious || ((uintptr_t)ptr &
1497 PAGE_MASK) == 0);
1498
1499 if (likely(tcache != NULL) && size <= tcache_maxclass) {
1436 tcache_dalloc_large(tsd, tcache, ptr, size,
1437 true);
1500 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
1501 size, slow_path);
1438 } else {
1502 } else {
1439 arena_dalloc_large(tsd, extent_node_arena_get(
1440 &chunk->node), chunk, ptr);
1503 arena_dalloc_large(tsdn,
1504 extent_node_arena_get(&chunk->node), chunk,
1505 ptr);
1441 }
1442 }
1443 } else
1506 }
1507 }
1508 } else
1444 huge_dalloc(tsd, ptr, tcache);
1509 huge_dalloc(tsdn, ptr);
1445}
1446# endif /* JEMALLOC_ARENA_INLINE_B */
1447#endif
1448
1449#endif /* JEMALLOC_H_INLINES */
1450/******************************************************************************/
1510}
1511# endif /* JEMALLOC_ARENA_INLINE_B */
1512#endif
1513
1514#endif /* JEMALLOC_H_INLINES */
1515/******************************************************************************/