Searched refs:smp_mb__after_spinlock (Results 1 - 12 of 12) sorted by relevance

/linux-master/arch/csky/include/asm/
H A Dspinlock.h10 #define smp_mb__after_spinlock() smp_mb() macro
/linux-master/arch/xtensa/include/asm/
H A Dspinlock.h18 #define smp_mb__after_spinlock() smp_mb() macro
/linux-master/arch/arm64/include/asm/
H A Dspinlock.h12 #define smp_mb__after_spinlock() smp_mb() macro
/linux-master/arch/powerpc/include/asm/
H A Dspinlock.h14 #define smp_mb__after_spinlock() smp_mb() macro
/linux-master/arch/riscv/include/asm/
H A Dbarrier.h54 * smp_mb__after_spinlock()
71 #define smp_mb__after_spinlock() RISCV_FENCE(iorw, iorw) macro
/linux-master/kernel/kcsan/
H A Dselftest.c148 KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
177 KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
209 KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
H A Dkcsan_test.c578 KCSAN_EXPECT_READ_BARRIER(smp_mb__after_spinlock(), true);
623 KCSAN_EXPECT_WRITE_BARRIER(smp_mb__after_spinlock(), true);
668 KCSAN_EXPECT_RW_BARRIER(smp_mb__after_spinlock(), true);
/linux-master/include/linux/
H A Dspinlock.h125 * smp_mb__after_spinlock() provides the equivalent of a full memory barrier
139 * smp_mb__after_spinlock(); r1 = READ_ONCE(X);
145 * preceding the call to smp_mb__after_spinlock() in __schedule() and in
155 * WRITE_ONCE(X, 1); smp_mb__after_spinlock(); smp_rmb();
175 #ifndef smp_mb__after_spinlock
176 #define smp_mb__after_spinlock() kcsan_mb() macro
/linux-master/kernel/
H A Dkthread.c1489 smp_mb__after_spinlock();
H A Dexit.c560 smp_mb__after_spinlock();
/linux-master/kernel/rcu/
H A Dtree_nocb.h1061 smp_mb__after_spinlock(); /* Timer expire before wakeup. */
/linux-master/kernel/sched/
H A Dcore.c1820 * // link thread smp_mb__after_spinlock()
1830 smp_mb__after_spinlock();
4263 smp_mb__after_spinlock();
4280 * smp_mb__after_spinlock();
4286 * Pairs with the LOCK+smp_mb__after_spinlock() on rq->lock in
4287 * __schedule(). See the comment for smp_mb__after_spinlock().
4309 * smp_mb__after_spinlock();
4312 * Pairs with the LOCK+smp_mb__after_spinlock() on rq->lock in
4313 * __schedule(). See the comment for smp_mb__after_spinlock().
6646 * smp_mb__after_spinlock() smp_mb__after_spinloc
[all...]

Completed in 298 milliseconds