Searched refs:arch_spin_unlock (Results 1 - 25 of 48) sorted by relevance

12

/linux-master/arch/parisc/include/asm/
H A Dspinlock.h49 static inline void arch_spin_unlock(arch_spinlock_t *x) function
96 arch_spin_unlock(&(rw->lock_mutex));
121 arch_spin_unlock(&(rw->lock_mutex));
146 arch_spin_unlock(&(rw->lock_mutex));
157 arch_spin_unlock(&(rw->lock_mutex));
H A Dfutex.h30 arch_spin_unlock(s);
/linux-master/include/linux/
H A Dspinlock_up.h45 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
65 # define arch_spin_unlock(lock) do { barrier(); (void)(lock); } while (0) macro
/linux-master/arch/arc/include/asm/
H A Dspinlock.h67 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
261 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
315 arch_spin_unlock(&(rw->lock_mutex));
340 arch_spin_unlock(&(rw->lock_mutex));
365 arch_spin_unlock(&(rw->lock_mutex));
376 arch_spin_unlock(&(rw->lock_mutex));
H A Dsmp.h117 arch_spin_unlock(&smp_atomic_ops_lock); \
/linux-master/kernel/locking/
H A Dqrwlock.c56 arch_spin_unlock(&lock->wait_lock);
88 arch_spin_unlock(&lock->wait_lock);
/linux-master/tools/include/linux/
H A Dspinlock.h31 static inline void arch_spin_unlock(arch_spinlock_t *mutex) function
/linux-master/arch/alpha/include/asm/
H A Dspinlock.h24 static inline void arch_spin_unlock(arch_spinlock_t * lock) function
/linux-master/arch/sh/include/asm/
H A Dspinlock-llsc.h46 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
53 "mov #1, %0 ! arch_spin_unlock \n\t"
H A Dspinlock-cas.h33 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
/linux-master/arch/hexagon/include/asm/
H A Dspinlock.h130 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
/linux-master/kernel/trace/
H A Dtrace_clock.c139 arch_spin_unlock(&trace_clock_struct.lock);
H A Dtrace_sched_switch.c277 arch_spin_unlock(&trace_cmdline_lock);
316 arch_spin_unlock(&trace_cmdline_lock);
558 arch_spin_unlock(&trace_cmdline_lock);
607 arch_spin_unlock(&trace_cmdline_lock);
630 arch_spin_unlock(&trace_cmdline_lock);
H A Dtrace_stack.c282 arch_spin_unlock(&stack_trace_max_lock);
360 arch_spin_unlock(&stack_trace_max_lock);
410 arch_spin_unlock(&stack_trace_max_lock);
/linux-master/include/asm-generic/
H A Dspinlock.h63 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) function
H A Dqspinlock.h148 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
/linux-master/arch/arm/common/
H A Dmcpm_entry.c232 arch_spin_unlock(&mcpm_lock);
268 arch_spin_unlock(&mcpm_lock);
274 arch_spin_unlock(&mcpm_lock);
335 arch_spin_unlock(&mcpm_lock);
365 arch_spin_unlock(&mcpm_lock);
/linux-master/arch/sparc/include/asm/
H A Dspinlock_32.h48 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
/linux-master/arch/s390/include/asm/
H A Dspinlock.h77 static inline void arch_spin_unlock(arch_spinlock_t *lp) function
/linux-master/arch/powerpc/include/asm/
H A Dqspinlock.h166 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
H A Dsimple_spinlock.h128 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
131 __asm__ __volatile__("# arch_spin_unlock\n\t"
/linux-master/kernel/kcsan/
H A Dselftest.c166 KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
195 KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
227 KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
/linux-master/arch/arm/include/asm/
H A Dspinlock.h107 static inline void arch_spin_unlock(arch_spinlock_t *lock) function
/linux-master/arch/x86/mm/
H A Dkmmio.c356 arch_spin_unlock(&kmmio_lock);
467 arch_spin_unlock(&kmmio_lock);
516 arch_spin_unlock(&kmmio_lock);
559 arch_spin_unlock(&kmmio_lock);
/linux-master/arch/s390/lib/
H A Dspinlock.c286 arch_spin_unlock(&rw->wait);
309 arch_spin_unlock(&rw->wait);

Completed in 959 milliseconds

12