Deleted Added
full compact
arena.c (234370) arena.c (234543)
1#define JEMALLOC_ARENA_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7ssize_t opt_lg_dirty_mult = LG_DIRTY_MULT_DEFAULT;
8arena_bin_info_t arena_bin_info[NBINS];

--- 662 unchanged lines hidden (view full) ---

671 sizeof(arena_chunk_map_t)) + map_bias;
672 size_t npages = mapelm->bits >> LG_PAGE;
673
674 assert(pageind + npages <= chunk_npages);
675 assert(ndirty >= npages);
676 if (config_debug)
677 ndirty -= npages;
678
1#define JEMALLOC_ARENA_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7ssize_t opt_lg_dirty_mult = LG_DIRTY_MULT_DEFAULT;
8arena_bin_info_t arena_bin_info[NBINS];

--- 662 unchanged lines hidden (view full) ---

671 sizeof(arena_chunk_map_t)) + map_bias;
672 size_t npages = mapelm->bits >> LG_PAGE;
673
674 assert(pageind + npages <= chunk_npages);
675 assert(ndirty >= npages);
676 if (config_debug)
677 ndirty -= npages;
678
679 madvise((void *)((uintptr_t)chunk + (pageind << LG_PAGE)),
680 (npages << LG_PAGE), JEMALLOC_MADV_PURGE);
679 pages_purge((void *)((uintptr_t)chunk + (pageind << LG_PAGE)),
680 (npages << LG_PAGE));
681 if (config_stats)
682 nmadvise++;
683 }
684 assert(ndirty == 0);
685 malloc_mutex_lock(&arena->lock);
686 if (config_stats)
687 arena->stats.nmadvise += nmadvise;
688

--- 519 unchanged lines hidden (view full) ---

1208
1209 return (arena_run_reg_alloc(bin->runcur, bin_info));
1210}
1211
1212void
1213arena_prof_accum(arena_t *arena, uint64_t accumbytes)
1214{
1215
681 if (config_stats)
682 nmadvise++;
683 }
684 assert(ndirty == 0);
685 malloc_mutex_lock(&arena->lock);
686 if (config_stats)
687 arena->stats.nmadvise += nmadvise;
688

--- 519 unchanged lines hidden (view full) ---

1208
1209 return (arena_run_reg_alloc(bin->runcur, bin_info));
1210}
1211
1212void
1213arena_prof_accum(arena_t *arena, uint64_t accumbytes)
1214{
1215
1216 if (prof_interval != 0) {
1216 cassert(config_prof);
1217
1218 if (config_prof && prof_interval != 0) {
1217 arena->prof_accumbytes += accumbytes;
1218 if (arena->prof_accumbytes >= prof_interval) {
1219 prof_idump();
1220 arena->prof_accumbytes -= prof_interval;
1221 }
1222 }
1223}
1224

--- 233 unchanged lines hidden (view full) ---

1458 if (opt_junk)
1459 memset(ret, 0xa5, size);
1460 else if (opt_zero)
1461 memset(ret, 0, size);
1462 }
1463 return (ret);
1464}
1465
1219 arena->prof_accumbytes += accumbytes;
1220 if (arena->prof_accumbytes >= prof_interval) {
1221 prof_idump();
1222 arena->prof_accumbytes -= prof_interval;
1223 }
1224 }
1225}
1226

--- 233 unchanged lines hidden (view full) ---

1460 if (opt_junk)
1461 memset(ret, 0xa5, size);
1462 else if (opt_zero)
1463 memset(ret, 0, size);
1464 }
1465 return (ret);
1466}
1467
1466/* Return the size of the allocation pointed to by ptr. */
1467size_t
1468arena_salloc(const void *ptr, bool demote)
1469{
1470 size_t ret;
1471 arena_chunk_t *chunk;
1472 size_t pageind, mapbits;
1473
1474 assert(ptr != NULL);
1475 assert(CHUNK_ADDR2BASE(ptr) != ptr);
1476
1477 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1478 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
1479 mapbits = chunk->map[pageind-map_bias].bits;
1480 assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
1481 if ((mapbits & CHUNK_MAP_LARGE) == 0) {
1482 arena_run_t *run = (arena_run_t *)((uintptr_t)chunk +
1483 (uintptr_t)((pageind - (mapbits >> LG_PAGE)) << LG_PAGE));
1484 size_t binind = arena_bin_index(chunk->arena, run->bin);
1485 arena_bin_info_t *bin_info = &arena_bin_info[binind];
1486 assert(((uintptr_t)ptr - ((uintptr_t)run +
1487 (uintptr_t)bin_info->reg0_offset)) % bin_info->reg_interval
1488 == 0);
1489 ret = bin_info->reg_size;
1490 } else {
1491 assert(((uintptr_t)ptr & PAGE_MASK) == 0);
1492 ret = mapbits & ~PAGE_MASK;
1493 if (demote && prof_promote && ret == PAGE && (mapbits &
1494 CHUNK_MAP_CLASS_MASK) != 0) {
1495 size_t binind = ((mapbits & CHUNK_MAP_CLASS_MASK) >>
1496 CHUNK_MAP_CLASS_SHIFT) - 1;
1497 assert(binind < NBINS);
1498 ret = arena_bin_info[binind].reg_size;
1499 }
1500 assert(ret != 0);
1501 }
1502
1503 return (ret);
1504}
1505
1506void
1507arena_prof_promoted(const void *ptr, size_t size)
1508{
1509 arena_chunk_t *chunk;
1510 size_t pageind, binind;
1511
1468void
1469arena_prof_promoted(const void *ptr, size_t size)
1470{
1471 arena_chunk_t *chunk;
1472 size_t pageind, binind;
1473
1512 assert(config_prof);
1474 cassert(config_prof);
1513 assert(ptr != NULL);
1514 assert(CHUNK_ADDR2BASE(ptr) != ptr);
1515 assert(isalloc(ptr, false) == PAGE);
1516 assert(isalloc(ptr, true) == PAGE);
1517 assert(size <= SMALL_MAXCLASS);
1518
1519 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1520 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;

--- 728 unchanged lines hidden ---
1475 assert(ptr != NULL);
1476 assert(CHUNK_ADDR2BASE(ptr) != ptr);
1477 assert(isalloc(ptr, false) == PAGE);
1478 assert(isalloc(ptr, true) == PAGE);
1479 assert(size <= SMALL_MAXCLASS);
1480
1481 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1482 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;

--- 728 unchanged lines hidden ---