• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-12-stable/contrib/jemalloc/src/

Lines Matching refs:tsd

56 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED
57 bool background_threads_enable(tsd_t *tsd) NOT_REACHED
58 bool background_threads_disable(tsd_t *tsd) NOT_REACHED
313 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) {
315 malloc_mutex_assert_owner(tsd_tsdn(tsd),
318 malloc_mutex_assert_not_owner(tsd_tsdn(tsd),
322 pre_reentrancy(tsd, NULL);
323 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
333 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
336 post_reentrancy(tsd);
341 post_reentrancy(tsd);
346 post_reentrancy(tsd);
386 check_background_thread_creation(tsd_t *tsd, unsigned *n_created,
393 tsdn_t *tsdn = tsd_tsdn(tsd);
411 pre_reentrancy(tsd, NULL);
414 post_reentrancy(tsd);
436 background_thread0_work(tsd_t *tsd) {
446 if (background_thread_pause_check(tsd_tsdn(tsd),
450 if (check_background_thread_creation(tsd, &n_created,
454 background_work_sleep_once(tsd_tsdn(tsd),
467 background_threads_disable_single(tsd, info);
469 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
477 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
485 background_work(tsd_t *tsd, unsigned ind) {
488 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
489 background_thread_wakeup_time_set(tsd_tsdn(tsd), info,
492 background_thread0_work(tsd);
495 if (background_thread_pause_check(tsd_tsdn(tsd),
499 background_work_sleep_once(tsd_tsdn(tsd), info, ind);
503 background_thread_wakeup_time_set(tsd_tsdn(tsd), info, 0);
504 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
518 * Start periodic background work. We use internal tsd which avoids
530 background_thread_init(tsd_t *tsd, background_thread_info_t *info) {
531 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
533 background_thread_info_init(tsd_tsdn(tsd), info);
539 background_thread_create(tsd_t *tsd, unsigned arena_ind) {
541 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
548 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
552 background_thread_init(tsd, info);
554 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
561 malloc_mutex_lock(tsd_tsdn(tsd), &t0->mtx);
564 malloc_mutex_unlock(tsd_tsdn(tsd), &t0->mtx);
569 pre_reentrancy(tsd, NULL);
576 post_reentrancy(tsd);
581 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
584 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
593 background_threads_enable(tsd_t *tsd) {
596 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
610 arena_get(tsd_tsdn(tsd), i, false) == NULL) {
615 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
617 background_thread_init(tsd, info);
618 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
625 return background_thread_create(tsd, 0);
629 background_threads_disable(tsd_t *tsd) {
631 malloc_mutex_assert_owner(tsd_tsdn(tsd), &background_thread_lock);
634 if (background_threads_disable_single(tsd,