• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-13-stable/contrib/llvm-project/openmp/runtime/src/

Lines Matching refs:lck

860   ompt_wait_id_t lck;
865 lck = (ompt_wait_id_t)(uintptr_t)&team->t.t_ordered.dt.t_value;
867 th->th.ompt_thread_info.wait_id = lck;
874 ompt_mutex_ordered, omp_lock_hint_none, kmp_mutex_impl_spin, lck,
894 ompt_mutex_ordered, (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra);
949 kmp_indirect_lock_t **lck;
950 lck = (kmp_indirect_lock_t **)crit;
960 int status = KMP_COMPARE_AND_STORE_PTR(lck, nullptr, ilk);
969 KMP_DEBUG_ASSERT(*lck != NULL);
1093 kmp_user_lock_p lck = (kmp_user_lock_p)TCR_PTR(*lck_pp);
1095 if (lck == NULL) {
1100 lck = __kmp_user_lock_allocate(&idx, gtid, kmp_lf_critical_section);
1101 __kmp_init_user_lock_with_checks(lck);
1102 __kmp_set_user_lock_location(lck, loc);
1104 __kmp_itt_critical_creating(lck);
1115 int status = KMP_COMPARE_AND_STORE_PTR(lck_pp, 0, lck);
1120 __kmp_itt_critical_destroyed(lck);
1124 __kmp_destroy_user_lock_with_checks(lck);
1125 __kmp_user_lock_free(&idx, gtid, lck);
1126 lck = (kmp_user_lock_p)TCR_PTR(*lck_pp);
1127 KMP_DEBUG_ASSERT(lck != NULL);
1130 return lck;
1158 kmp_user_lock_p lck;
1168 (sizeof(lck->tas.lk.poll) <= OMP_CRITICAL_SIZE)) {
1169 lck = (kmp_user_lock_p)crit;
1173 (sizeof(lck->futex.lk.poll) <= OMP_CRITICAL_SIZE)) {
1174 lck = (kmp_user_lock_p)crit;
1178 lck = __kmp_get_critical_section_ptr(crit, loc, global_tid);
1182 __kmp_push_sync(global_tid, ct_critical, loc, lck);
1190 __kmp_itt_critical_acquiring(lck);
1199 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck;
1207 (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra);
1213 __kmp_acquire_user_lock_with_checks(lck, global_tid);
1216 __kmp_itt_critical_acquired(lck);
1227 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra);
1372 kmp_user_lock_p lck;
1400 lck = (kmp_user_lock_p)lk;
1402 __kmp_push_sync(global_tid, ct_critical, loc, lck,
1406 __kmp_itt_critical_acquiring(lck);
1413 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck;
1420 __ompt_get_mutex_impl_type(crit), (ompt_wait_id_t)(uintptr_t)lck,
1427 KMP_ACQUIRE_TAS_LOCK(lck, global_tid);
1431 KMP_ACQUIRE_FUTEX_LOCK(lck, global_tid);
1439 lck = ilk->lock;
1441 __kmp_push_sync(global_tid, ct_critical, loc, lck,
1445 __kmp_itt_critical_acquiring(lck);
1452 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck;
1459 __ompt_get_mutex_impl_type(0, ilk), (ompt_wait_id_t)(uintptr_t)lck,
1464 KMP_I_LOCK_FUNC(ilk, set)(lck, global_tid);
1469 __kmp_itt_critical_acquired(lck);
1480 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
1502 kmp_user_lock_p lck;
1508 lck = (kmp_user_lock_p)crit;
1509 KMP_ASSERT(lck != NULL);
1514 __kmp_itt_critical_releasing(lck);
1518 KMP_RELEASE_TAS_LOCK(lck, global_tid);
1522 KMP_RELEASE_FUTEX_LOCK(lck, global_tid);
1526 KMP_D_LOCK_FUNC(lck, unset)((kmp_dyna_lock_t *)lck, global_tid);
1532 lck = ilk->lock;
1537 __kmp_itt_critical_releasing(lck);
1539 KMP_I_LOCK_FUNC(ilk, unset)(lck, global_tid);
1545 (sizeof(lck->tas.lk.poll) <= OMP_CRITICAL_SIZE)) {
1546 lck = (kmp_user_lock_p)crit;
1550 (sizeof(lck->futex.lk.poll) <= OMP_CRITICAL_SIZE)) {
1551 lck = (kmp_user_lock_p)crit;
1555 lck = (kmp_user_lock_p)TCR_PTR(*((kmp_user_lock_p *)crit));
1558 KMP_ASSERT(lck != NULL);
1564 __kmp_itt_critical_releasing(lck);
1568 __kmp_release_user_lock_with_checks(lck, global_tid);
1578 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck,
2263 kmp_user_lock_p lck;
2275 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
2276 lck = (kmp_user_lock_p)user_lock;
2280 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
2281 lck = (kmp_user_lock_p)user_lock;
2285 lck = __kmp_user_lock_allocate(user_lock, gtid, 0);
2287 INIT_LOCK(lck);
2288 __kmp_set_user_lock_location(lck, loc);
2303 __kmp_itt_lock_creating(lck);
2335 kmp_user_lock_p lck;
2347 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
2349 lck = (kmp_user_lock_p)user_lock;
2353 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
2355 lck = (kmp_user_lock_p)user_lock;
2359 lck = __kmp_user_lock_allocate(user_lock, gtid, 0);
2362 INIT_NESTED_LOCK(lck);
2363 __kmp_set_user_lock_location(lck, loc);
2378 __kmp_itt_lock_creating(lck);
2388 kmp_user_lock_p lck;
2390 lck = ((kmp_indirect_lock_t *)KMP_LOOKUP_I_LOCK(user_lock))->lock;
2392 lck = (kmp_user_lock_p)user_lock;
2394 __kmp_itt_lock_destroyed(lck);
2402 kmp_user_lock_p lck;
2404 lck = ((kmp_indirect_lock_t *)KMP_LOOKUP_I_LOCK(user_lock))->lock;
2406 lck = (kmp_user_lock_p)user_lock;
2414 kmp_user_lock_p lck;
2417 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
2418 lck = (kmp_user_lock_p)user_lock;
2422 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
2423 lck = (kmp_user_lock_p)user_lock;
2427 lck = __kmp_lookup_user_lock(user_lock, "omp_destroy_lock");
2442 __kmp_itt_lock_destroyed(lck);
2444 DESTROY_LOCK(lck);
2447 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
2452 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
2457 __kmp_user_lock_free(user_lock, gtid, lck);
2484 kmp_user_lock_p lck;
2487 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
2489 lck = (kmp_user_lock_p)user_lock;
2493 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
2495 lck = (kmp_user_lock_p)user_lock;
2499 lck = __kmp_lookup_user_lock(user_lock, "omp_destroy_nest_lock");
2514 __kmp_itt_lock_destroyed(lck);
2517 DESTROY_NESTED_LOCK(lck);
2520 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
2526 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
2532 __kmp_user_lock_free(user_lock, gtid, lck);
2582 kmp_user_lock_p lck;
2585 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
2586 lck = (kmp_user_lock_p)user_lock;
2590 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
2591 lck = (kmp_user_lock_p)user_lock;
2595 lck = __kmp_lookup_user_lock(user_lock, "omp_set_lock");
2599 __kmp_itt_lock_acquiring(lck);
2609 (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2613 ACQUIRE_LOCK(lck, gtid);
2616 __kmp_itt_lock_acquired(lck);
2622 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2677 kmp_user_lock_p lck;
2680 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
2682 lck = (kmp_user_lock_p)user_lock;
2686 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
2688 lck = (kmp_user_lock_p)user_lock;
2692 lck = __kmp_lookup_user_lock(user_lock, "omp_set_nest_lock");
2696 __kmp_itt_lock_acquiring(lck);
2707 __ompt_get_mutex_impl_type(), (ompt_wait_id_t)(uintptr_t)lck,
2713 ACQUIRE_NESTED_LOCK(lck, gtid, &acquire_status);
2716 __kmp_itt_lock_acquired(lck);
2725 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2731 ompt_scope_begin, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2773 kmp_user_lock_p lck;
2779 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
2796 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2802 lck = (kmp_user_lock_p)user_lock;
2807 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
2808 lck = (kmp_user_lock_p)user_lock;
2812 lck = __kmp_lookup_user_lock(user_lock, "omp_unset_lock");
2816 __kmp_itt_lock_releasing(lck);
2819 RELEASE_LOCK(lck, gtid);
2828 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2869 kmp_user_lock_p lck;
2874 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
2906 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2911 ompt_mutex_scope_end, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2918 lck = (kmp_user_lock_p)user_lock;
2923 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
2925 lck = (kmp_user_lock_p)user_lock;
2929 lck = __kmp_lookup_user_lock(user_lock, "omp_unset_nest_lock");
2933 __kmp_itt_lock_releasing(lck);
2937 release_status = RELEASE_NESTED_LOCK(lck, gtid);
2948 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
2953 ompt_mutex_scope_end, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
3015 kmp_user_lock_p lck;
3019 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) {
3020 lck = (kmp_user_lock_p)user_lock;
3024 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) {
3025 lck = (kmp_user_lock_p)user_lock;
3029 lck = __kmp_lookup_user_lock(user_lock, "omp_test_lock");
3033 __kmp_itt_lock_acquiring(lck);
3043 (ompt_wait_id_t)(uintptr_t)lck, codeptr);
3047 rc = TEST_LOCK(lck, gtid);
3050 __kmp_itt_lock_acquired(lck);
3052 __kmp_itt_lock_cancelled(lck);
3058 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
3118 kmp_user_lock_p lck;
3122 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <=
3124 lck = (kmp_user_lock_p)user_lock;
3128 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <=
3130 lck = (kmp_user_lock_p)user_lock;
3134 lck = __kmp_lookup_user_lock(user_lock, "omp_test_nest_lock");
3138 __kmp_itt_lock_acquiring(lck);
3150 __ompt_get_mutex_impl_type(), (ompt_wait_id_t)(uintptr_t)lck,
3155 rc = TEST_NESTED_LOCK(lck, gtid);
3158 __kmp_itt_lock_acquired(lck);
3160 __kmp_itt_lock_cancelled(lck);
3169 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
3175 ompt_mutex_scope_begin, (ompt_wait_id_t)(uintptr_t)lck, codeptr);
3212 kmp_user_lock_p lck;
3231 lck = (kmp_user_lock_p)lk;
3232 KMP_DEBUG_ASSERT(lck != NULL);
3234 __kmp_push_sync(global_tid, ct_critical, loc, lck, __kmp_user_lock_seq);
3239 lck = ilk->lock;
3240 KMP_DEBUG_ASSERT(lck != NULL);
3242 __kmp_push_sync(global_tid, ct_critical, loc, lck, __kmp_user_lock_seq);
3244 KMP_I_LOCK_FUNC(ilk, set)(lck, global_tid);
3253 lck = (kmp_user_lock_p)crit;
3255 lck = __kmp_get_critical_section_ptr(crit, loc, global_tid);
3257 KMP_DEBUG_ASSERT(lck != NULL);
3260 __kmp_push_sync(global_tid, ct_critical, loc, lck);
3262 __kmp_acquire_user_lock_with_checks(lck, global_tid);
3272 kmp_user_lock_p lck;
3277 lck = (kmp_user_lock_p)crit;
3280 KMP_D_LOCK_FUNC(lck, unset)((kmp_dyna_lock_t *)lck, global_tid);
3295 lck = *((kmp_user_lock_p *)crit);
3296 KMP_ASSERT(lck != NULL);
3298 lck = (kmp_user_lock_p)crit;
3304 __kmp_release_user_lock_with_checks(lck, global_tid);
3354 @param lck pointer to the unique lock data structure
3364 kmp_critical_name *lck) {
3414 loc, global_tid, num_vars, reduce_size, reduce_data, reduce_func, lck);
3422 __kmp_enter_critical_section_reduce_block(loc, global_tid, lck);
3516 @param lck pointer to the unique lock data structure
3521 kmp_critical_name *lck) {
3533 __kmp_end_critical_section_reduce_block(loc, global_tid, lck);
3582 @param lck pointer to the unique lock data structure
3591 kmp_critical_name *lck) {
3625 loc, global_tid, num_vars, reduce_size, reduce_data, reduce_func, lck);
3633 __kmp_enter_critical_section_reduce_block(loc, global_tid, lck);
3703 @param lck pointer to the unique lock data structure
3706 The <tt>lck</tt> pointer must be the same as that used in the corresponding
3710 kmp_critical_name *lck) {
3729 __kmp_end_critical_section_reduce_block(loc, global_tid, lck);