Lines Matching defs:usize

1445     size_t size, size_t usize, szind_t ind) {
1471 return ipalloct(tsd_tsdn(tsd), usize, dopts->alignment,
1481 size_t usize, szind_t ind) {
1485 * For small allocations, sampling bumps the usize. If so, we allocate
1489 size_t bumped_usize = usize;
1491 if (usize <= SMALL_MAXCLASS) {
1502 ret, usize);
1504 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind);
1555 * allocations, or in case of stats or profiling we need usize.
1563 size_t usize = 0;
1602 usize = index2size(ind);
1603 assert(usize > 0 && usize <= LARGE_MAXCLASS);
1606 usize = sa2u(size, dopts->alignment);
1607 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
1623 * Note that if we're going down this path, usize must have been
1627 tsd, usize, prof_active_get_unlocked(), true);
1630 sopts, dopts, tsd, usize, usize, ind);
1637 sopts, dopts, tsd, usize, ind);
1648 allocation, usize, tctx);
1652 * If dopts->alignment > 0, then ind is still 0, but usize was
1655 * on usize).
1657 allocation = imalloc_no_sample(sopts, dopts, tsd, usize, usize,
1672 assert(usize == isalloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
1674 *tsd_thread_allocatedp_get(tsd) += usize;
1868 size_t old_usize, size_t usize, prof_tctx_t *tctx)
1874 if (usize <= SMALL_MAXCLASS) {
1880 usize);
1882 p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
1889 size_t usize)
1898 tctx = prof_alloc_prep(tsd, usize, prof_active, true);
1901 usize, tctx);
1903 p = iralloc(tsd, old_extent, old_ptr, old_usize, usize, 0,
1911 prof_realloc(tsd, extent, p, usize, tctx, prof_active, true, old_extent,
1921 size_t usize;
1930 usize = isalloc(tsd_tsdn(tsd), extent, ptr);
1931 prof_free(tsd, extent, ptr, usize);
1933 usize = isalloc(tsd_tsdn(tsd), extent, ptr);
1935 *tsd_thread_deallocatedp_get(tsd) += usize;
1944 isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
1953 prof_free(tsd, extent, ptr, usize);
1955 *tsd_thread_deallocatedp_get(tsd) += usize;
1958 isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, false);
1960 isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, true);
1970 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1998 usize = s2u(size);
1999 ret = unlikely(usize == 0 || usize > LARGE_MAXCLASS) ?
2001 usize);
2004 usize = s2u(size);
2025 assert(usize == isalloc(tsdn, iealloc(tsdn, ret), ret));
2027 *tsd_thread_allocatedp_get(tsd) += usize;
2227 size_t old_usize, size_t usize, size_t alignment, bool zero,
2234 if (usize <= SMALL_MAXCLASS) {
2239 arena_prof_promote(tsdn, iealloc(tsdn, p), p, usize);
2241 p = iralloct(tsdn, extent, old_ptr, old_usize, usize, alignment,
2250 size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
2260 tctx = prof_alloc_prep(tsd, *usize, prof_active, false);
2263 old_usize, *usize, alignment, zero, tcache, arena, tctx);
2279 * be the same as the current usize because of in-place large
2280 * reallocation. Therefore, query the actual value of usize.
2283 *usize = isalloc(tsd_tsdn(tsd), extent, p);
2286 prof_realloc(tsd, extent, p, *usize, tctx, prof_active, false,
2300 size_t usize;
2333 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
2334 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS))
2337 &usize, zero, tcache, arena);
2346 usize = isalloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
2353 *tsd_thread_allocatedp_get(tsd) += usize;
2373 size_t usize;
2377 usize = isalloc(tsdn, extent, ptr);
2379 return (usize);
2387 size_t usize;
2391 usize = ixallocx_helper(tsdn, extent, ptr, old_usize, size, extra,
2394 return (usize);
2401 size_t usize_max, usize;
2408 * usize isn't knowable before ixalloc() returns when extra is non-zero.
2411 * prof_realloc() will use the actual usize to decide whether to sample.
2431 usize = ixallocx_prof_sample(tsd_tsdn(tsd), extent, ptr,
2434 usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
2437 if (usize == old_usize) {
2439 return (usize);
2441 prof_realloc(tsd, extent, ptr, usize, tctx, prof_active, false, extent,
2444 return (usize);
2452 size_t usize, old_usize;
2476 usize = old_usize;
2483 usize = ixallocx_prof(tsd, extent, ptr, old_usize, size, extra,
2486 usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
2489 if (unlikely(usize == old_usize))
2493 *tsd_thread_allocatedp_get(tsd) += usize;
2499 return (usize);
2506 size_t usize;
2515 usize = ivsalloc(tsdn, ptr);
2517 usize = isalloc(tsdn, iealloc(tsdn, ptr), ptr);
2520 return (usize);
2553 size_t usize;
2558 usize = s2u(size);
2560 usize = sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags));
2562 return (usize);
2570 size_t usize;
2577 usize = inallocx(tsd_tsdn(tsd), size, flags);
2578 assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr));
2591 isfree(tsd, extent, ptr, usize, tcache, false);
2593 isfree(tsd, extent, ptr, usize, tcache, true);
2601 size_t usize;
2612 usize = inallocx(tsdn, size, flags);
2613 if (unlikely(usize > LARGE_MAXCLASS))
2617 return (usize);