Lines Matching refs:ind

301 arena_set(unsigned ind, arena_t *arena) {
302 atomic_store_p(&arenas[ind], arena, ATOMIC_RELEASE);
320 /* Create a new arena and insert it into the arenas array at index ind. */
322 arena_init_locked(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
325 assert(ind <= narenas_total_get());
326 if (ind >= MALLOCX_ARENA_LIMIT) {
329 if (ind == narenas_total_get()) {
334 * Another thread may have already initialized arenas[ind] if it's an
337 arena = arena_get(tsdn, ind, false);
344 arena = arena_new(tsdn, ind, extent_hooks);
350 arena_new_create_background_thread(tsdn_t *tsdn, unsigned ind) {
351 if (ind == 0) {
358 if (have_background_thread && !arena_is_huge(ind)) {
359 if (background_thread_create(tsdn_tsd(tsdn), ind)) {
361 "creation for arena %u. Abort.\n", ind);
368 arena_init(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
372 arena = arena_init_locked(tsdn, ind, extent_hooks);
375 arena_new_create_background_thread(tsdn, ind);
381 arena_bind(tsd_t *tsd, unsigned ind, bool internal) {
382 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false);
412 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) {
415 arena = arena_get(tsd_tsdn(tsd), ind, false);
426 arena_tdata_get_hard(tsd_t *tsd, unsigned ind) {
452 narenas_tdata = (ind < narenas_actual) ? narenas_actual : ind+1;
493 tdata = &arenas_tdata[ind];
1914 /* ind is ignored if dopts->alignment > 0. */
1917 size_t size, size_t usize, szind_t ind) {
1953 return iallocztm(tsd_tsdn(tsd), size, ind, dopts->zero, tcache, false,
1959 size_t usize, szind_t ind) {
1983 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind);
2038 * For unaligned allocations, we need only ind. For aligned
2046 szind_t ind = 0;
2066 ind = sz_size2index(size);
2067 if (unlikely(ind >= SC_NSIZES)) {
2071 usize = sz_index2size(ind);
2128 sopts, dopts, tsd, usize, usize, ind);
2131 * Note that ind might still be 0 here. This is fine;
2132 * imalloc_sample ignores ind if dopts->alignment > 0.
2135 sopts, dopts, tsd, usize, ind);
2148 * If dopts->alignment > 0, then ind is still 0, but usize was
2150 * alignment path, imalloc_no_sample ignores ind and size
2154 ind);
2345 szind_t ind = sz_size2index_lookup(size);
2348 usize = sz_index2size(ind);
2351 assert(ind < SC_NBINS);
2373 cache_bin_t *bin = tcache_small_bin_get(tcache, ind);