Lines Matching defs:usize

864 	size_t usize JEMALLOC_CC_SILENCE_INIT(0);
876 usize = s2u(size);
877 PROF_ALLOC_PREP(1, usize, cnt);
882 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
886 arena_prof_promoted(ret, usize);
891 usize = s2u(size);
905 prof_malloc(ret, usize, cnt);
907 assert(usize == isalloc(ret, config_prof));
908 thread_allocated_tsd_get()->allocated += usize;
911 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, false);
928 size_t usize;
953 usize = sa2u(size, alignment);
954 if (usize == 0) {
961 PROF_ALLOC_PREP(2, usize, cnt);
967 (uintptr_t)1U && usize <= SMALL_MAXCLASS) {
974 usize);
977 result = ipalloc(usize, alignment,
982 result = ipalloc(usize, alignment, false);
1000 assert(usize == isalloc(result, config_prof));
1001 thread_allocated_tsd_get()->allocated += usize;
1004 prof_malloc(result, usize, cnt);
1038 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1068 usize = s2u(num_size);
1069 PROF_ALLOC_PREP(1, usize, cnt);
1074 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize
1078 arena_prof_promoted(ret, usize);
1083 usize = s2u(num_size);
1098 prof_malloc(ret, usize, cnt);
1100 assert(usize == isalloc(ret, config_prof));
1101 thread_allocated_tsd_get()->allocated += usize;
1104 JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, true);
1112 size_t usize JEMALLOC_CC_SILENCE_INIT(0);
1162 usize = s2u(size);
1164 PROF_ALLOC_PREP(1, usize, cnt);
1171 usize <= SMALL_MAXCLASS) {
1175 arena_prof_promoted(ret, usize);
1185 usize = s2u(size);
1208 usize = s2u(size);
1209 PROF_ALLOC_PREP(1, usize, cnt);
1214 (uintptr_t)1U && usize <=
1219 usize);
1227 usize = s2u(size);
1244 prof_realloc(ret, usize, cnt, old_size, old_ctx);
1247 assert(usize == isalloc(ret, config_prof));
1249 ta->allocated += usize;
1253 JEMALLOC_VALGRIND_REALLOC(ret, usize, ptr, old_size, old_rzsize, false);
1263 size_t usize;
1269 usize = isalloc(ptr, config_prof);
1270 prof_free(ptr, usize);
1272 usize = isalloc(ptr, config_prof);
1274 thread_allocated_tsd_get()->deallocated += usize;
1411 iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache,
1415 assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
1419 return (ipallocx(usize, alignment, zero, try_tcache, arena));
1421 return (icallocx(usize, try_tcache, arena));
1423 return (imallocx(usize, try_tcache, arena));
1430 size_t usize;
1452 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1453 if (usize == 0)
1459 PROF_ALLOC_PREP(1, usize, cnt);
1462 if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
1472 arena_prof_promoted(p, usize);
1474 p = iallocm(usize, alignment, zero, try_tcache, arena);
1478 prof_malloc(p, usize, cnt);
1480 p = iallocm(usize, alignment, zero, try_tcache, arena);
1485 *rsize = usize;
1489 assert(usize == isalloc(p, config_prof));
1490 thread_allocated_tsd_get()->allocated += usize;
1493 JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
1510 size_t usize;
1546 * usize isn't knowable before iralloc() returns when extra is
1549 * backtrace. prof_realloc() will use the actual usize to
1562 * Use minimum usize to determine whether promotion may happen.
1574 usize = max_usize;
1575 arena_prof_promoted(q, usize);
1577 usize = isalloc(q, config_prof);
1583 usize = isalloc(q, config_prof);
1585 prof_realloc(q, usize, cnt, old_size, old_ctx);
1587 *rsize = usize;
1602 usize = isalloc(q, config_prof);
1605 usize = isalloc(q, config_prof);
1606 *rsize = usize;
1614 ta->allocated += usize;
1618 JEMALLOC_VALGRIND_REALLOC(q, usize, p, old_size, old_rzsize, zero);
1658 size_t usize;
1675 usize = isalloc(ptr, config_prof);
1678 usize = isalloc(ptr, config_prof);
1679 prof_free(ptr, usize);
1682 thread_allocated_tsd_get()->deallocated += usize;
1694 size_t usize;
1703 usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1704 if (usize == 0)
1708 *rsize = usize;