Searched refs:smp_cond_load_acquire (Results 1 - 8 of 8) sorted by path

/linux-master/arch/arm64/include/asm/
H A Dbarrier.h202 #define smp_cond_load_acquire(ptr, cond_expr) \ macro
H A Dmte.h87 smp_cond_load_acquire(&page->flags, VAL & (1UL << PG_mte_tagged));
/linux-master/include/asm-generic/
H A Dbarrier.h260 * smp_cond_load_acquire() - (Spin) wait for cond with ACQUIRE ordering
267 #ifndef smp_cond_load_acquire
268 #define smp_cond_load_acquire(ptr, cond_expr) ({ \ macro
/linux-master/include/linux/
H A Datomic.h28 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
31 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
/linux-master/kernel/locking/
H A Dmcs_spinlock.h26 * Using smp_cond_load_acquire() provides the acquire semantics
30 * spinning, and smp_cond_load_acquire() provides that behavior.
34 smp_cond_load_acquire(l, VAL); \
H A Dqspinlock.c383 smp_cond_load_acquire(&lock->locked, !VAL);
/linux-master/kernel/sched/
H A Dcore.c2766 * smp_cond_load_acquire(&p->on_cpu, !VAL), and we're in __schedule()
3890 smp_cond_load_acquire(&p->on_cpu, !VAL);
4160 * 2) smp_cond_load_acquire(!X->on_cpu) -- try_to_wake_up()
4171 * smp_cond_load_acquire(&X->on_cpu, !VAL);
4272 * in smp_cond_load_acquire() below.
4342 * smp_mb__after_spin_lock() smp_cond_load_acquire(&p->on_cpu)
4361 smp_cond_load_acquire(&p->on_cpu, !VAL);
4415 smp_cond_load_acquire(&p->on_cpu, !VAL);
5031 * Pairs with the smp_cond_load_acquire() in try_to_wake_up().
/linux-master/kernel/
H A Dsmp.c311 smp_cond_load_acquire(&csd->node.u_flags, !(VAL & CSD_FLAG_LOCK));
320 smp_cond_load_acquire(&csd->node.u_flags, !(VAL & CSD_FLAG_LOCK));

Completed in 292 milliseconds