Lines Matching refs:av

2245 static void malloc_init_state(mstate av)
2247 static void malloc_init_state(av) mstate av;
2255 bin = bin_at(av,i);
2259 set_noncontiguous(av);
2261 set_max_fast(av, DEFAULT_MXFAST);
2263 av->top = initial_top(av);
2316 static void do_check_chunk(mstate av, mchunkptr p)
2318 static void do_check_chunk(av, p) mstate av; mchunkptr p;
2323 char* max_address = (char*)(av->top) + chunksize(av->top);
2324 char* min_address = max_address - av->system_mem;
2329 if (p != av->top) {
2330 if (contiguous(av)) {
2332 assert(((char*)p + sz) <= ((char*)(av->top)));
2346 if (contiguous(av) && av->top != initial_top(av)) {
2365 static void do_check_free_chunk(mstate av, mchunkptr p)
2367 static void do_check_free_chunk(av, p) mstate av; mchunkptr p;
2373 do_check_chunk(av, p);
2388 assert (next == av->top || inuse(next));
2403 static void do_check_inuse_chunk(mstate av, mchunkptr p)
2405 static void do_check_inuse_chunk(av, p) mstate av; mchunkptr p;
2410 do_check_chunk(av, p);
2412 assert(av == arena_for_chunk(p));
2429 do_check_free_chunk(av, prv);
2432 if (next == av->top) {
2437 do_check_free_chunk(av, next);
2445 static void do_check_remalloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
2447 static void do_check_remalloced_chunk(av, p, s)
2448 mstate av; mchunkptr p; INTERNAL_SIZE_T s;
2454 assert(av == arena_for_chunk(p));
2457 do_check_inuse_chunk(av, p);
2474 static void do_check_malloced_chunk(mstate av, mchunkptr p, INTERNAL_SIZE_T s)
2476 static void do_check_malloced_chunk(av, p, s)
2477 mstate av; mchunkptr p; INTERNAL_SIZE_T s;
2481 do_check_remalloced_chunk(av, p, s);
2508 static void do_check_malloc_state(mstate av)
2528 if (av->top == 0 || av->top == initial_top(av))
2535 assert((av->max_fast & ~1) <= request2size(MAX_FAST_SIZE));
2537 max_fast_bin = fastbin_index(av->max_fast);
2540 p = av->fastbins[i];
2548 do_check_inuse_chunk(av, p);
2557 assert(have_fastchunks(av));
2558 else if (!have_fastchunks(av))
2563 b = bin_at(av,i);
2567 binbit = get_binmap(av,i);
2577 do_check_free_chunk(av, p);
2593 (q != av->top && inuse(q) &&
2596 do_check_inuse_chunk(av, q);
2601 check_chunk(av, av->top);
2606 assert((unsigned long)(av->system_mem) <=
2607 (unsigned long)(av->max_system_mem));
2871 mstate av;
2908 _int_malloc(mstate av, size_t bytes)
2943 This code is safe to execute even if av is not yet initialized, so we
2947 if ((unsigned long)(nb) <= (unsigned long)(av->max_fast)) {
2948 fb = &(av->fastbins[(fastbin_index(nb))]);
2951 check_remalloced_chunk(av, victim, nb);
2952 set_arena_for_chunk(victim, av);
2967 bin = bin_at(av,idx);
2971 malloc_consolidate(av);
2978 set_arena_for_chunk(victim, av);
2979 check_malloced_chunk(av, victim, nb);
2998 if (have_fastchunks(av))
2999 malloc_consolidate(av);
3017 while ( (victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) {
3030 bck == unsorted_chunks(av) &&
3031 victim == av->last_remainder &&
3037 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
3038 av->last_remainder = remainder;
3039 remainder->bk = remainder->fd = unsorted_chunks(av);
3045 set_arena_for_chunk(victim, av);
3046 check_malloced_chunk(av, victim, nb);
3051 unsorted_chunks(av)->bk = bck;
3052 bck->fd = unsorted_chunks(av);
3058 set_arena_for_chunk(victim, av);
3059 check_malloced_chunk(av, victim, nb);
3067 bck = bin_at(av, victim_index);
3072 bck = bin_at(av, victim_index);
3094 mark_bin(av, victim_index);
3109 bin = bin_at(av, idx);
3121 set_arena_for_chunk(victim, av);
3122 check_malloced_chunk(av, victim, nb);
3128 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
3129 remainder->bk = remainder->fd = unsorted_chunks(av);
3133 set_arena_for_chunk(victim, av);
3134 check_malloced_chunk(av, victim, nb);
3153 bin = bin_at(av,idx);
3155 map = av->binmap[block];
3165 } while ( (map = av->binmap[block]) == 0);
3167 bin = bin_at(av, (block << BINMAPSHIFT));
3183 av->binmap[block] = map &= ~bit; /* Write through */
3204 set_arena_for_chunk(victim, av);
3205 check_malloced_chunk(av, victim, nb);
3213 unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
3214 remainder->bk = remainder->fd = unsorted_chunks(av);
3217 av->last_remainder = remainder;
3222 set_arena_for_chunk(victim, av);
3223 check_malloced_chunk(av, victim, nb);
3232 (held in av->top). Note that this is in accord with the best-fit
3233 search rule. In effect, av->top is treated as larger (and thus
3238 We require that av->top always exists (i.e., has size >=
3245 victim = av->top;
3251 av->top = remainder;
3255 set_arena_for_chunk(victim, av);
3256 check_malloced_chunk(av, victim, nb);
3266 else if (have_fastchunks(av)) {
3268 malloc_consolidate(av);
3285 _int_free(mstate av, Void_t* mem)
3303 check_inuse_chunk(av, p);
3310 if ((unsigned long)(size) <= (unsigned long)(av->max_fast)
3317 && (chunk_at_offset(p, size) != av->top)
3321 set_fastchunks(av);
3322 fb = &(av->fastbins[fastbin_index(size)]);
3344 if (nextchunk != av->top) {
3361 bck = unsorted_chunks(av);
3371 check_free_chunk(av, p);
3382 av->top = p;
3383 check_chunk(av, p);
3400 if (have_fastchunks(av))
3401 malloc_consolidate(av);
3422 static void malloc_consolidate(mstate av)
3424 static void malloc_consolidate(av) mstate av;
3444 If max_fast is 0, we know that av hasn't
3448 if (av->max_fast != 0) {
3449 clear_fastchunks(av);
3451 unsorted_bin = unsorted_chunks(av);
3461 maxfb = &(av->fastbins[fastbin_index(av->max_fast)]);
3462 fb = &(av->fastbins[0]);
3468 check_inuse_chunk(av, p);
3483 if (nextchunk != av->top) {
3505 av->top = p;
3514 malloc_init_state(av);
3515 check_malloc_state(av);
3524 _int_realloc(mstate av, Void_t* oldmem, size_t bytes)
3551 _int_free(av, oldmem);
3557 if (oldmem == 0) return _int_malloc(av, bytes);
3564 check_inuse_chunk(av, oldp);
3579 if (next == av->top &&
3583 av->top = chunk_at_offset(oldp, nb);
3584 set_head(av->top, (newsize - nb) | PREV_INUSE);
3585 check_inuse_chunk(av, oldp);
3586 set_arena_for_chunk(oldp, av);
3591 else if (next != av->top &&
3601 newmem = _int_malloc(av, nb - MALLOC_ALIGN_MASK);
3649 _int_free(av, oldmem);
3650 set_arena_for_chunk(newp, av);
3651 check_inuse_chunk(av, newp);
3673 set_arena_for_chunk(remainder, av);
3674 _int_free(av, chunk2mem(remainder));
3677 set_arena_for_chunk(newp, av);
3678 check_inuse_chunk(av, newp);
3688 check_malloc_state(av);
3699 _int_memalign(mstate av, size_t alignment, size_t bytes)
3714 if (alignment <= MALLOC_ALIGNMENT) return _int_malloc(av, bytes);
3737 m = (char*)(_int_malloc(av, nb + alignment + MINSIZE));
3766 set_arena_for_chunk(newp, av);
3774 set_arena_for_chunk(p, av);
3775 _int_free(av, chunk2mem(p));
3790 set_arena_for_chunk(remainder, av);
3791 _int_free(av, chunk2mem(remainder));
3795 set_arena_for_chunk(p, av);
3796 check_inuse_chunk(av, p);
3885 struct mallinfo mALLINFo(mstate av)
3897 if (av->top == 0) malloc_consolidate(av);
3899 check_malloc_state(av);
3902 avail = chunksize(av->top);
3910 for (p = av->fastbins[i]; p != 0; p = p->fd) {
3920 b = bin_at(av, i);
3930 mi.uordblks = av->system_mem - avail;
3931 mi.arena = av->system_mem;
3933 mi.keepcost = chunksize(av->top);