Searched refs:lock (Results 1 - 25 of 6514) sorted by relevance

1234567891011>>

/linux-master/include/linux/
H A Dspinlock_api_up.h19 #define assert_raw_spin_locked(lock) do { (void)(lock); } while (0)
24 * flags straight, to suppress compiler warnings of unused lock
27 #define ___LOCK(lock) \
28 do { __acquire(lock); (void)(lock); } while (0)
30 #define __LOCK(lock) \
31 do { preempt_disable(); ___LOCK(lock); } while (0)
33 #define __LOCK_BH(lock) \
34 do { __local_bh_disable_ip(_THIS_IP_, SOFTIRQ_LOCK_OFFSET); ___LOCK(lock); } whil
[all...]
H A Dspinlock_up.h29 static inline void arch_spin_lock(arch_spinlock_t *lock) argument
31 lock->slock = 0;
35 static inline int arch_spin_trylock(arch_spinlock_t *lock) argument
37 char oldval = lock->slock;
39 lock->slock = 0;
45 static inline void arch_spin_unlock(arch_spinlock_t *lock) argument
48 lock->slock = 1;
54 #define arch_read_lock(lock) do { barrier(); (void)(lock); } while (0)
55 #define arch_write_lock(lock) d
[all...]
H A Dlocal_lock.h8 * local_lock_init - Runtime initialize a lock instance
10 #define local_lock_init(lock) __local_lock_init(lock)
13 * local_lock - Acquire a per CPU local lock
14 * @lock: The lock variable
16 #define local_lock(lock) __local_lock(lock)
19 * local_lock_irq - Acquire a per CPU local lock and disable interrupts
20 * @lock
[all...]
H A Drwlock_api_smp.h18 void __lockfunc _raw_read_lock(rwlock_t *lock) __acquires(lock); variable
19 void __lockfunc _raw_write_lock(rwlock_t *lock) __acquires(lock); variable
20 void __lockfunc _raw_write_lock_nested(rwlock_t *lock, int subclass) __acquires(lock); variable
21 void __lockfunc _raw_read_lock_bh(rwlock_t *lock) __acquires(lock); variable
22 void __lockfunc _raw_write_lock_bh(rwlock_t *lock) __acquires(lock); variable
23 void __lockfunc _raw_read_lock_irq(rwlock_t *lock) __acquires(lock); variable
24 void __lockfunc _raw_write_lock_irq(rwlock_t *lock) __acquires(lock); variable
26 __acquires(lock); variable
28 __acquires(lock); variable
31 void __lockfunc _raw_read_unlock(rwlock_t *lock) __releases(lock); variable
32 void __lockfunc _raw_write_unlock(rwlock_t *lock) __releases(lock); variable
33 void __lockfunc _raw_read_unlock_bh(rwlock_t *lock) __releases(lock); variable
34 void __lockfunc _raw_write_unlock_bh(rwlock_t *lock) __releases(lock); variable
35 void __lockfunc _raw_read_unlock_irq(rwlock_t *lock) __releases(lock); variable
36 void __lockfunc _raw_write_unlock_irq(rwlock_t *lock) __releases(lock); variable
39 __releases(lock); variable
42 __releases(lock); variable
118 __raw_read_trylock(rwlock_t *lock) argument
129 __raw_write_trylock(rwlock_t *lock) argument
147 __raw_read_lock(rwlock_t *lock) argument
154 __raw_read_lock_irqsave(rwlock_t *lock) argument
165 __raw_read_lock_irq(rwlock_t *lock) argument
173 __raw_read_lock_bh(rwlock_t *lock) argument
180 __raw_write_lock_irqsave(rwlock_t *lock) argument
191 __raw_write_lock_irq(rwlock_t *lock) argument
199 __raw_write_lock_bh(rwlock_t *lock) argument
206 __raw_write_lock(rwlock_t *lock) argument
213 __raw_write_lock_nested(rwlock_t *lock, int subclass) argument
222 __raw_write_unlock(rwlock_t *lock) argument
229 __raw_read_unlock(rwlock_t *lock) argument
237 __raw_read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
245 __raw_read_unlock_irq(rwlock_t *lock) argument
253 __raw_read_unlock_bh(rwlock_t *lock) argument
260 __raw_write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
269 __raw_write_unlock_irq(rwlock_t *lock) argument
277 __raw_write_unlock_bh(rwlock_t *lock) argument
[all...]
H A Dspinlock_api_smp.h22 void __lockfunc _raw_spin_lock(raw_spinlock_t *lock) __acquires(lock); variable
23 void __lockfunc _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass)
24 __acquires(lock); variable
26 _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *map)
27 __acquires(lock); variable
28 void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock) __acquires(lock); variable
29 void __lockfunc _raw_spin_lock_irq(raw_spinlock_t *lock)
30 __acquires(lock); variable
33 __acquires(lock); variable
36 __acquires(lock); variable
39 void __lockfunc _raw_spin_unlock(raw_spinlock_t *lock) __releases(lock); variable
40 void __lockfunc _raw_spin_unlock_bh(raw_spinlock_t *lock) __releases(lock); variable
41 void __lockfunc _raw_spin_unlock_irq(raw_spinlock_t *lock) __releases(lock); variable
44 __releases(lock); variable
86 __raw_spin_trylock(raw_spinlock_t *lock) argument
104 __raw_spin_lock_irqsave(raw_spinlock_t *lock) argument
115 __raw_spin_lock_irq(raw_spinlock_t *lock) argument
123 __raw_spin_lock_bh(raw_spinlock_t *lock) argument
130 __raw_spin_lock(raw_spinlock_t *lock) argument
139 __raw_spin_unlock(raw_spinlock_t *lock) argument
146 __raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags) argument
155 __raw_spin_unlock_irq(raw_spinlock_t *lock) argument
163 __raw_spin_unlock_bh(raw_spinlock_t *lock) argument
170 __raw_spin_trylock_bh(raw_spinlock_t *lock) argument
[all...]
H A Drwlock.h18 extern void __rwlock_init(rwlock_t *lock, const char *name,
20 # define rwlock_init(lock) \
24 __rwlock_init((lock), #lock, &__key); \
27 # define rwlock_init(lock) \
28 do { *(lock) = __RW_LOCK_UNLOCKED(lock); } while (0)
32 extern void do_raw_read_lock(rwlock_t *lock) __acquires(lock); variable
33 extern int do_raw_read_trylock(rwlock_t *lock);
34 extern void do_raw_read_unlock(rwlock_t *lock) __releases(lock); variable
35 extern void do_raw_write_lock(rwlock_t *lock) __acquires(lock); variable
37 extern void do_raw_write_unlock(rwlock_t *lock) __releases(lock); variable
[all...]
H A Dspinlock_rt.h10 extern void __rt_spin_lock_init(spinlock_t *lock, const char *name,
13 static inline void __rt_spin_lock_init(spinlock_t *lock, const char *name, argument
23 rt_mutex_base_init(&(slock)->lock); \
31 rt_mutex_base_init(&(slock)->lock); \
35 extern void rt_spin_lock(spinlock_t *lock);
36 extern void rt_spin_lock_nested(spinlock_t *lock, int subclass);
37 extern void rt_spin_lock_nest_lock(spinlock_t *lock, struct lockdep_map *nest_lock);
38 extern void rt_spin_unlock(spinlock_t *lock);
39 extern void rt_spin_lock_unlock(spinlock_t *lock);
40 extern int rt_spin_trylock_bh(spinlock_t *lock);
43 spin_lock(spinlock_t *lock) argument
85 spin_lock_bh(spinlock_t *lock) argument
92 spin_lock_irq(spinlock_t *lock) argument
104 spin_unlock(spinlock_t *lock) argument
109 spin_unlock_bh(spinlock_t *lock) argument
115 spin_unlock_irq(spinlock_t *lock) argument
120 spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) argument
150 spin_is_locked(spinlock_t *lock) argument
[all...]
H A Dosq_lock.h6 * An MCS like lock especially tailored for optimistic spinning for sleeping
7 * lock implementations (mutex, rwsem, etc).
23 static inline void osq_lock_init(struct optimistic_spin_queue *lock) argument
25 atomic_set(&lock->tail, OSQ_UNLOCKED_VAL);
28 extern bool osq_lock(struct optimistic_spin_queue *lock);
29 extern void osq_unlock(struct optimistic_spin_queue *lock);
31 static inline bool osq_is_locked(struct optimistic_spin_queue *lock) argument
33 return atomic_read(&lock->tail) != OSQ_UNLOCKED_VAL;
H A Dspinlock.h72 #define LOCK_SECTION_NAME ".text..lock."KBUILD_BASENAME
101 extern void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name,
104 # define raw_spin_lock_init(lock) \
108 __raw_spin_lock_init((lock), #lock, &__key, LD_WAIT_SPIN); \
112 # define raw_spin_lock_init(lock) \
113 do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0)
116 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)
180 extern void do_raw_spin_lock(raw_spinlock_t *lock) __acquires(lock); variable
182 extern void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock); variable
191 do_raw_spin_trylock(raw_spinlock_t *lock) argument
324 spinlock_check(spinlock_t *lock) argument
349 spin_lock(spinlock_t *lock) argument
354 spin_lock_bh(spinlock_t *lock) argument
359 spin_trylock(spinlock_t *lock) argument
374 spin_lock_irq(spinlock_t *lock) argument
389 spin_unlock(spinlock_t *lock) argument
394 spin_unlock_bh(spinlock_t *lock) argument
399 spin_unlock_irq(spinlock_t *lock) argument
404 spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) argument
409 spin_trylock_bh(spinlock_t *lock) argument
414 spin_trylock_irq(spinlock_t *lock) argument
442 spin_is_locked(spinlock_t *lock) argument
447 spin_is_contended(spinlock_t *lock) argument
463 spin_needbreak(spinlock_t *lock) argument
480 rwlock_needbreak(rwlock_t *lock) argument
[all...]
/linux-master/fs/bcachefs/
H A Dtwo_state_shared_lock.c5 void __bch2_two_state_lock(two_state_lock_t *lock, int s) argument
7 __wait_event(lock->wait, bch2_two_state_trylock(lock, s));
H A Dseqmutex.h8 struct mutex lock; member in struct:seqmutex
12 #define seqmutex_init(_lock) mutex_init(&(_lock)->lock)
14 static inline bool seqmutex_trylock(struct seqmutex *lock) argument
16 return mutex_trylock(&lock->lock);
19 static inline void seqmutex_lock(struct seqmutex *lock) argument
21 mutex_lock(&lock->lock);
24 static inline void seqmutex_unlock(struct seqmutex *lock) argument
26 lock
30 seqmutex_seq(struct seqmutex *lock) argument
35 seqmutex_relock(struct seqmutex *lock, u32 seq) argument
[all...]
H A Dtwo_state_shared_lock.h12 * Two-state lock - can be taken for add or block - both states are shared,
20 static inline void two_state_lock_init(two_state_lock_t *lock) argument
22 atomic_long_set(&lock->v, 0);
23 init_waitqueue_head(&lock->wait);
26 static inline void bch2_two_state_unlock(two_state_lock_t *lock, int s) argument
30 EBUG_ON(atomic_long_read(&lock->v) == 0);
32 if (atomic_long_sub_return_release(i, &lock->v) == 0)
33 wake_up_all(&lock->wait);
36 static inline bool bch2_two_state_trylock(two_state_lock_t *lock, int s) argument
39 long v = atomic_long_read(&lock
53 bch2_two_state_lock(two_state_lock_t *lock, int s) argument
[all...]
/linux-master/arch/alpha/include/asm/
H A Dspinlock.h11 * Simple spin lock operations. There are two variants, one clears IRQ's
17 #define arch_spin_is_locked(x) ((x)->lock != 0)
19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) argument
21 return lock.lock == 0;
24 static inline void arch_spin_unlock(arch_spinlock_t * lock) argument
27 lock->lock = 0;
30 static inline void arch_spin_lock(arch_spinlock_t * lock) argument
46 : "=&r" (tmp), "=m" (lock
50 arch_spin_trylock(arch_spinlock_t *lock) argument
57 arch_read_lock(arch_rwlock_t *lock) argument
77 arch_write_lock(arch_rwlock_t *lock) argument
97 arch_read_trylock(arch_rwlock_t * lock) argument
119 arch_write_trylock(arch_rwlock_t * lock) argument
141 arch_read_unlock(arch_rwlock_t * lock) argument
157 arch_write_unlock(arch_rwlock_t * lock) argument
[all...]
H A Dspinlock_types.h10 volatile unsigned int lock; member in struct:__anon1
16 volatile unsigned int lock; member in struct:__anon2
/linux-master/tools/virtio/linux/
H A Dspinlock.h8 static inline void spin_lock_init(spinlock_t *lock) argument
10 int r = pthread_spin_init(lock, 0);
14 static inline void spin_lock(spinlock_t *lock) argument
16 int ret = pthread_spin_lock(lock);
20 static inline void spin_unlock(spinlock_t *lock) argument
22 int ret = pthread_spin_unlock(lock);
26 static inline void spin_lock_bh(spinlock_t *lock) argument
28 spin_lock(lock);
31 static inline void spin_unlock_bh(spinlock_t *lock) argument
33 spin_unlock(lock);
36 spin_lock_irq(spinlock_t *lock) argument
41 spin_unlock_irq(spinlock_t *lock) argument
46 spin_lock_irqsave(spinlock_t *lock, unsigned long f) argument
51 spin_unlock_irqrestore(spinlock_t *lock, unsigned long f) argument
[all...]
/linux-master/arch/hexagon/include/asm/
H A Dspinlock.h22 * - load the lock value
24 * - if the lock value is still negative, go back and try again.
26 * - successful store new lock value if positive -> lock acquired
28 static inline void arch_read_lock(arch_rwlock_t *lock) argument
37 : "r" (&lock->lock)
43 static inline void arch_read_unlock(arch_rwlock_t *lock) argument
51 : "r" (&lock->lock)
58 arch_read_trylock(arch_rwlock_t *lock) argument
76 arch_write_lock(arch_rwlock_t *lock) argument
91 arch_write_trylock(arch_rwlock_t *lock) argument
109 arch_write_unlock(arch_rwlock_t *lock) argument
115 arch_spin_lock(arch_spinlock_t *lock) argument
130 arch_spin_unlock(arch_spinlock_t *lock) argument
136 arch_spin_trylock(arch_spinlock_t *lock) argument
[all...]
H A Dspinlock_types.h16 volatile unsigned int lock; member in struct:__anon9
22 volatile unsigned int lock; member in struct:__anon10
/linux-master/drivers/acpi/acpica/
H A Dutlock.c4 * Module Name: utlock - Reader/Writer lock interfaces
21 * PARAMETERS: lock - Pointer to a valid RW lock
25 * DESCRIPTION: Reader/writer lock creation and deletion interfaces.
28 acpi_status acpi_ut_create_rw_lock(struct acpi_rw_lock *lock) argument
32 lock->num_readers = 0;
33 status = acpi_os_create_mutex(&lock->reader_mutex);
38 status = acpi_os_create_mutex(&lock->writer_mutex);
42 void acpi_ut_delete_rw_lock(struct acpi_rw_lock *lock) argument
45 acpi_os_delete_mutex(lock
71 acpi_ut_acquire_read_lock(struct acpi_rw_lock *lock) argument
93 acpi_ut_release_read_lock(struct acpi_rw_lock *lock) argument
129 acpi_ut_acquire_write_lock(struct acpi_rw_lock *lock) argument
137 acpi_ut_release_write_lock(struct acpi_rw_lock *lock) argument
[all...]
/linux-master/kernel/locking/
H A Dspinlock_debug.c17 void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name, argument
22 * Make sure we are not reinitializing a held lock:
24 debug_check_no_locks_freed((void *)lock, sizeof(*lock));
25 lockdep_init_map_wait(&lock->dep_map, name, key, 0, inner);
27 lock->raw_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED;
28 lock->magic = SPINLOCK_MAGIC;
29 lock->owner = SPINLOCK_OWNER_INIT;
30 lock->owner_cpu = -1;
36 void __rwlock_init(rwlock_t *lock, cons argument
55 spin_dump(raw_spinlock_t *lock, const char *msg) argument
73 spin_bug(raw_spinlock_t *lock, const char *msg) argument
84 debug_spin_lock_before(raw_spinlock_t *lock) argument
92 debug_spin_lock_after(raw_spinlock_t *lock) argument
98 debug_spin_unlock(raw_spinlock_t *lock) argument
113 do_raw_spin_lock(raw_spinlock_t *lock) argument
121 do_raw_spin_trylock(raw_spinlock_t *lock) argument
138 do_raw_spin_unlock(raw_spinlock_t *lock) argument
146 rwlock_bug(rwlock_t *lock, const char *msg) argument
159 do_raw_read_lock(rwlock_t *lock) argument
165 do_raw_read_trylock(rwlock_t *lock) argument
178 do_raw_read_unlock(rwlock_t *lock) argument
184 debug_write_lock_before(rwlock_t *lock) argument
192 debug_write_lock_after(rwlock_t *lock) argument
198 debug_write_unlock(rwlock_t *lock) argument
208 do_raw_write_lock(rwlock_t *lock) argument
215 do_raw_write_trylock(rwlock_t *lock) argument
230 do_raw_write_unlock(rwlock_t *lock) argument
[all...]
H A Dqrwlock.c15 #include <trace/events/lock.h>
18 * queued_read_lock_slowpath - acquire read lock of a queued rwlock
19 * @lock: Pointer to queued rwlock structure
21 void __lockfunc queued_read_lock_slowpath(struct qrwlock *lock) argument
24 * Readers come here when they cannot get the lock without waiting
28 * Readers in interrupt context will get the lock immediately
29 * if the writer is just waiting (not holding the lock yet),
30 * so spin with ACQUIRE semantics until the lock is available
33 atomic_cond_read_acquire(&lock->cnts, !(VAL & _QW_LOCKED));
36 atomic_sub(_QR_BIAS, &lock
66 queued_write_lock_slowpath(struct qrwlock *lock) argument
[all...]
H A Dspinlock.c35 * not re-enabled during lock-acquire (which the preempt-spin-ops do):
46 * Some architectures can relax in favour of the CPU owning the lock.
63 * This could be a long-held lock. We both prepare to spin for a long
65 * towards that other CPU that it should break the lock ASAP.
68 void __lockfunc __raw_##op##_lock(locktype##_t *lock) \
72 if (likely(do_raw_##op##_trylock(lock))) \
76 arch_##op##_relax(&lock->raw_lock); \
80 unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \
87 if (likely(do_raw_##op##_trylock(lock))) \
92 arch_##op##_relax(&lock
136 _raw_spin_trylock(raw_spinlock_t *lock) argument
144 _raw_spin_trylock_bh(raw_spinlock_t *lock) argument
152 _raw_spin_lock(raw_spinlock_t *lock) argument
160 _raw_spin_lock_irqsave(raw_spinlock_t *lock) argument
168 _raw_spin_lock_irq(raw_spinlock_t *lock) argument
176 _raw_spin_lock_bh(raw_spinlock_t *lock) argument
184 _raw_spin_unlock(raw_spinlock_t *lock) argument
192 _raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags) argument
200 _raw_spin_unlock_irq(raw_spinlock_t *lock) argument
208 _raw_spin_unlock_bh(raw_spinlock_t *lock) argument
218 _raw_read_trylock(rwlock_t *lock) argument
226 _raw_read_lock(rwlock_t *lock) argument
234 _raw_read_lock_irqsave(rwlock_t *lock) argument
242 _raw_read_lock_irq(rwlock_t *lock) argument
250 _raw_read_lock_bh(rwlock_t *lock) argument
258 _raw_read_unlock(rwlock_t *lock) argument
266 _raw_read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
274 _raw_read_unlock_irq(rwlock_t *lock) argument
282 _raw_read_unlock_bh(rwlock_t *lock) argument
290 _raw_write_trylock(rwlock_t *lock) argument
298 _raw_write_lock(rwlock_t *lock) argument
308 _raw_write_lock_nested(rwlock_t *lock, int subclass) argument
316 _raw_write_lock_irqsave(rwlock_t *lock) argument
324 _raw_write_lock_irq(rwlock_t *lock) argument
332 _raw_write_lock_bh(rwlock_t *lock) argument
340 _raw_write_unlock(rwlock_t *lock) argument
348 _raw_write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
356 _raw_write_unlock_irq(rwlock_t *lock) argument
364 _raw_write_unlock_bh(rwlock_t *lock) argument
375 _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass) argument
383 _raw_spin_lock_irqsave_nested(raw_spinlock_t *lock, int subclass) argument
396 _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *nest_lock) argument
[all...]
/linux-master/arch/arm/include/asm/
H A Dmcs_spinlock.h9 #define arch_mcs_spin_lock_contended(lock) \
13 while (!(smp_load_acquire(lock))) \
17 #define arch_mcs_spin_unlock_contended(lock) \
19 smp_store_release(lock, 1); \
/linux-master/tools/testing/selftests/kvm/lib/aarch64/
H A Dspinlock.c9 void spin_lock(struct spinlock *lock) argument
20 : "r" (&lock->v)
24 void spin_unlock(struct spinlock *lock) argument
26 asm volatile("stlr wzr, [%0]\n" : : "r" (&lock->v) : "memory");
/linux-master/tools/testing/selftests/kvm/include/aarch64/
H A Dspinlock.h10 extern void spin_lock(struct spinlock *lock);
11 extern void spin_unlock(struct spinlock *lock);
/linux-master/include/asm-generic/bitops/
H A Dext2-atomic.h9 #define ext2_set_bit_atomic(lock, nr, addr) \
12 spin_lock(lock); \
14 spin_unlock(lock); \
18 #define ext2_clear_bit_atomic(lock, nr, addr) \
21 spin_lock(lock); \
23 spin_unlock(lock); \

Completed in 191 milliseconds

1234567891011>>