Lines Matching refs:map_bias

213 	if (pageind-1 < map_bias)
526 assert(arena_mapbits_allocated_get(chunk, map_bias) == 0);
528 assert(arena_mapbits_unallocated_size_get(chunk, map_bias) ==
532 assert(arena_mapbits_dirty_get(chunk, map_bias) ==
565 arena_mapbits_unallocated_set(chunk, map_bias, arena_maxclass,
572 for (i = map_bias+1; i < chunk_npages-1; i++)
576 (void *)arena_mapp_get(chunk, map_bias+1),
579 - (uintptr_t)arena_mapp_get(chunk, map_bias+1)));
580 for (i = map_bias+1; i < chunk_npages-1; i++) {
590 arena_avail_insert(arena, chunk, map_bias, chunk_npages-map_bias,
599 assert(arena_mapbits_allocated_get(chunk, map_bias) == 0);
601 assert(arena_mapbits_unallocated_size_get(chunk, map_bias) ==
605 assert(arena_mapbits_dirty_get(chunk, map_bias) ==
612 arena_avail_remove(arena, chunk, map_bias, chunk_npages-map_bias,
641 + map_bias;
674 run = (arena_run_t *)((uintptr_t)chunk + (map_bias << LG_PAGE));
737 assert(arena_mapbits_dirty_get(chunk, map_bias) != 0);
758 for (pageind = map_bias; pageind < chunk_npages; pageind += npages) {
813 sizeof(arena_chunk_map_t)) + map_bias;
848 sizeof(arena_chunk_map_t)) + map_bias;
976 assert(run_ind >= map_bias);
1053 if (run_ind > map_bias && arena_mapbits_allocated_get(chunk, run_ind-1)
1088 assert(run_ind == map_bias);
1180 sizeof(arena_chunk_map_t))) + map_bias;
2316 * 1) Compute safe header_size and map_bias values that include enough
2318 * 2) Refine map_bias based on (1) to omit the header pages in the page
2319 * map. The resulting map_bias may be one too small.
2320 * 3) Refine map_bias based on (2). The result will be >= the result
2323 map_bias = 0;
2326 (sizeof(arena_chunk_map_t) * (chunk_npages-map_bias));
2327 map_bias = (header_size >> LG_PAGE) + ((header_size & PAGE_MASK)
2330 assert(map_bias > 0);
2332 arena_maxclass = chunksize - (map_bias << LG_PAGE);