Searched refs:lock (Results 1 - 25 of 6105) sorted by relevance

1234567891011>>

/linux-master/include/linux/
H A Dspinlock_api_up.h19 #define assert_raw_spin_locked(lock) do { (void)(lock); } while (0)
24 * flags straight, to suppress compiler warnings of unused lock
27 #define ___LOCK(lock) \
28 do { __acquire(lock); (void)(lock); } while (0)
30 #define __LOCK(lock) \
31 do { preempt_disable(); ___LOCK(lock); } while (0)
33 #define __LOCK_BH(lock) \
34 do { __local_bh_disable_ip(_THIS_IP_, SOFTIRQ_LOCK_OFFSET); ___LOCK(lock); } whil
[all...]
H A Dspinlock_up.h29 static inline void arch_spin_lock(arch_spinlock_t *lock) argument
31 lock->slock = 0;
35 static inline int arch_spin_trylock(arch_spinlock_t *lock) argument
37 char oldval = lock->slock;
39 lock->slock = 0;
45 static inline void arch_spin_unlock(arch_spinlock_t *lock) argument
48 lock->slock = 1;
54 #define arch_read_lock(lock) do { barrier(); (void)(lock); } while (0)
55 #define arch_write_lock(lock) d
[all...]
H A Dlocal_lock.h8 * local_lock_init - Runtime initialize a lock instance
10 #define local_lock_init(lock) __local_lock_init(lock)
13 * local_lock - Acquire a per CPU local lock
14 * @lock: The lock variable
16 #define local_lock(lock) __local_lock(lock)
19 * local_lock_irq - Acquire a per CPU local lock and disable interrupts
20 * @lock
[all...]
H A Drwlock_api_smp.h18 void __lockfunc _raw_read_lock(rwlock_t *lock) __acquires(lock); variable
19 void __lockfunc _raw_write_lock(rwlock_t *lock) __acquires(lock); variable
20 void __lockfunc _raw_write_lock_nested(rwlock_t *lock, int subclass) __acquires(lock); variable
21 void __lockfunc _raw_read_lock_bh(rwlock_t *lock) __acquires(lock); variable
22 void __lockfunc _raw_write_lock_bh(rwlock_t *lock) __acquires(lock); variable
23 void __lockfunc _raw_read_lock_irq(rwlock_t *lock) __acquires(lock); variable
24 void __lockfunc _raw_write_lock_irq(rwlock_t *lock) __acquires(lock); variable
26 __acquires(lock); variable
28 __acquires(lock); variable
31 void __lockfunc _raw_read_unlock(rwlock_t *lock) __releases(lock); variable
32 void __lockfunc _raw_write_unlock(rwlock_t *lock) __releases(lock); variable
33 void __lockfunc _raw_read_unlock_bh(rwlock_t *lock) __releases(lock); variable
34 void __lockfunc _raw_write_unlock_bh(rwlock_t *lock) __releases(lock); variable
35 void __lockfunc _raw_read_unlock_irq(rwlock_t *lock) __releases(lock); variable
36 void __lockfunc _raw_write_unlock_irq(rwlock_t *lock) __releases(lock); variable
39 __releases(lock); variable
42 __releases(lock); variable
118 __raw_read_trylock(rwlock_t *lock) argument
129 __raw_write_trylock(rwlock_t *lock) argument
147 __raw_read_lock(rwlock_t *lock) argument
154 __raw_read_lock_irqsave(rwlock_t *lock) argument
165 __raw_read_lock_irq(rwlock_t *lock) argument
173 __raw_read_lock_bh(rwlock_t *lock) argument
180 __raw_write_lock_irqsave(rwlock_t *lock) argument
191 __raw_write_lock_irq(rwlock_t *lock) argument
199 __raw_write_lock_bh(rwlock_t *lock) argument
206 __raw_write_lock(rwlock_t *lock) argument
213 __raw_write_lock_nested(rwlock_t *lock, int subclass) argument
222 __raw_write_unlock(rwlock_t *lock) argument
229 __raw_read_unlock(rwlock_t *lock) argument
237 __raw_read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
245 __raw_read_unlock_irq(rwlock_t *lock) argument
253 __raw_read_unlock_bh(rwlock_t *lock) argument
260 __raw_write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
269 __raw_write_unlock_irq(rwlock_t *lock) argument
277 __raw_write_unlock_bh(rwlock_t *lock) argument
[all...]
H A Dspinlock.h70 #define LOCK_SECTION_NAME ".text..lock."KBUILD_BASENAME
99 extern void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name,
102 # define raw_spin_lock_init(lock) \
106 __raw_spin_lock_init((lock), #lock, &__key, LD_WAIT_SPIN); \
110 # define raw_spin_lock_init(lock) \
111 do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0)
114 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)
178 extern void do_raw_spin_lock(raw_spinlock_t *lock) __acquires(lock); variable
180 extern void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock); variable
189 do_raw_spin_trylock(raw_spinlock_t *lock) argument
322 spinlock_check(spinlock_t *lock) argument
347 spin_lock(spinlock_t *lock) argument
352 spin_lock_bh(spinlock_t *lock) argument
357 spin_trylock(spinlock_t *lock) argument
372 spin_lock_irq(spinlock_t *lock) argument
387 spin_unlock(spinlock_t *lock) argument
392 spin_unlock_bh(spinlock_t *lock) argument
397 spin_unlock_irq(spinlock_t *lock) argument
402 spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) argument
407 spin_trylock_bh(spinlock_t *lock) argument
412 spin_trylock_irq(spinlock_t *lock) argument
440 spin_is_locked(spinlock_t *lock) argument
445 spin_is_contended(spinlock_t *lock) argument
[all...]
H A Dspinlock_api_smp.h22 void __lockfunc _raw_spin_lock(raw_spinlock_t *lock) __acquires(lock); variable
23 void __lockfunc _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass)
24 __acquires(lock); variable
26 _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *map)
27 __acquires(lock); variable
28 void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock) __acquires(lock); variable
29 void __lockfunc _raw_spin_lock_irq(raw_spinlock_t *lock)
30 __acquires(lock); variable
33 __acquires(lock); variable
36 __acquires(lock); variable
39 void __lockfunc _raw_spin_unlock(raw_spinlock_t *lock) __releases(lock); variable
40 void __lockfunc _raw_spin_unlock_bh(raw_spinlock_t *lock) __releases(lock); variable
41 void __lockfunc _raw_spin_unlock_irq(raw_spinlock_t *lock) __releases(lock); variable
44 __releases(lock); variable
86 __raw_spin_trylock(raw_spinlock_t *lock) argument
104 __raw_spin_lock_irqsave(raw_spinlock_t *lock) argument
115 __raw_spin_lock_irq(raw_spinlock_t *lock) argument
123 __raw_spin_lock_bh(raw_spinlock_t *lock) argument
130 __raw_spin_lock(raw_spinlock_t *lock) argument
139 __raw_spin_unlock(raw_spinlock_t *lock) argument
146 __raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags) argument
155 __raw_spin_unlock_irq(raw_spinlock_t *lock) argument
163 __raw_spin_unlock_bh(raw_spinlock_t *lock) argument
170 __raw_spin_trylock_bh(raw_spinlock_t *lock) argument
[all...]
H A Drwlock.h18 extern void __rwlock_init(rwlock_t *lock, const char *name,
20 # define rwlock_init(lock) \
24 __rwlock_init((lock), #lock, &__key); \
27 # define rwlock_init(lock) \
28 do { *(lock) = __RW_LOCK_UNLOCKED(lock); } while (0)
32 extern void do_raw_read_lock(rwlock_t *lock) __acquires(lock); variable
33 extern int do_raw_read_trylock(rwlock_t *lock);
34 extern void do_raw_read_unlock(rwlock_t *lock) __releases(lock); variable
35 extern void do_raw_write_lock(rwlock_t *lock) __acquires(lock); variable
37 extern void do_raw_write_unlock(rwlock_t *lock) __releases(lock); variable
[all...]
H A Dspinlock_rt.h10 extern void __rt_spin_lock_init(spinlock_t *lock, const char *name,
13 static inline void __rt_spin_lock_init(spinlock_t *lock, const char *name, argument
23 rt_mutex_base_init(&(slock)->lock); \
31 rt_mutex_base_init(&(slock)->lock); \
35 extern void rt_spin_lock(spinlock_t *lock);
36 extern void rt_spin_lock_nested(spinlock_t *lock, int subclass);
37 extern void rt_spin_lock_nest_lock(spinlock_t *lock, struct lockdep_map *nest_lock);
38 extern void rt_spin_unlock(spinlock_t *lock);
39 extern void rt_spin_lock_unlock(spinlock_t *lock);
40 extern int rt_spin_trylock_bh(spinlock_t *lock);
43 spin_lock(spinlock_t *lock) argument
85 spin_lock_bh(spinlock_t *lock) argument
92 spin_lock_irq(spinlock_t *lock) argument
104 spin_unlock(spinlock_t *lock) argument
109 spin_unlock_bh(spinlock_t *lock) argument
115 spin_unlock_irq(spinlock_t *lock) argument
120 spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) argument
150 spin_is_locked(spinlock_t *lock) argument
[all...]
/linux-master/arch/alpha/include/asm/
H A Dspinlock.h11 * Simple spin lock operations. There are two variants, one clears IRQ's
17 #define arch_spin_is_locked(x) ((x)->lock != 0)
19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) argument
21 return lock.lock == 0;
24 static inline void arch_spin_unlock(arch_spinlock_t * lock) argument
27 lock->lock = 0;
30 static inline void arch_spin_lock(arch_spinlock_t * lock) argument
46 : "=&r" (tmp), "=m" (lock
50 arch_spin_trylock(arch_spinlock_t *lock) argument
57 arch_read_lock(arch_rwlock_t *lock) argument
77 arch_write_lock(arch_rwlock_t *lock) argument
97 arch_read_trylock(arch_rwlock_t * lock) argument
119 arch_write_trylock(arch_rwlock_t * lock) argument
141 arch_read_unlock(arch_rwlock_t * lock) argument
157 arch_write_unlock(arch_rwlock_t * lock) argument
[all...]
H A Dspinlock_types.h10 volatile unsigned int lock; member in struct:__anon1
16 volatile unsigned int lock; member in struct:__anon2
/linux-master/tools/virtio/linux/
H A Dspinlock.h8 static inline void spin_lock_init(spinlock_t *lock) argument
10 int r = pthread_spin_init(lock, 0);
14 static inline void spin_lock(spinlock_t *lock) argument
16 int ret = pthread_spin_lock(lock);
20 static inline void spin_unlock(spinlock_t *lock) argument
22 int ret = pthread_spin_unlock(lock);
26 static inline void spin_lock_bh(spinlock_t *lock) argument
28 spin_lock(lock);
31 static inline void spin_unlock_bh(spinlock_t *lock) argument
33 spin_unlock(lock);
36 spin_lock_irq(spinlock_t *lock) argument
41 spin_unlock_irq(spinlock_t *lock) argument
46 spin_lock_irqsave(spinlock_t *lock, unsigned long f) argument
51 spin_unlock_irqrestore(spinlock_t *lock, unsigned long f) argument
[all...]
/linux-master/arch/riscv/include/asm/
H A Dspinlock.h15 * Simple spin lock operations. These provide no fairness guarantees.
18 /* FIXME: Replace this with a ticket lock, like MIPS. */
20 #define arch_spin_is_locked(x) (READ_ONCE((x)->lock) != 0)
22 static inline void arch_spin_unlock(arch_spinlock_t *lock) argument
24 smp_store_release(&lock->lock, 0);
27 static inline int arch_spin_trylock(arch_spinlock_t *lock) argument
34 : "=r" (busy), "+A" (lock->lock)
41 static inline void arch_spin_lock(arch_spinlock_t *lock) argument
54 arch_read_lock(arch_rwlock_t *lock) argument
69 arch_write_lock(arch_rwlock_t *lock) argument
84 arch_read_trylock(arch_rwlock_t *lock) argument
102 arch_write_trylock(arch_rwlock_t *lock) argument
120 arch_read_unlock(arch_rwlock_t *lock) argument
130 arch_write_unlock(arch_rwlock_t *lock) argument
[all...]
H A Dspinlock_types.h14 volatile unsigned int lock; member in struct:__anon19
20 volatile unsigned int lock; member in struct:__anon20
/linux-master/arch/hexagon/include/asm/
H A Dspinlock.h22 * - load the lock value
24 * - if the lock value is still negative, go back and try again.
26 * - successful store new lock value if positive -> lock acquired
28 static inline void arch_read_lock(arch_rwlock_t *lock) argument
37 : "r" (&lock->lock)
43 static inline void arch_read_unlock(arch_rwlock_t *lock) argument
51 : "r" (&lock->lock)
58 arch_read_trylock(arch_rwlock_t *lock) argument
76 arch_write_lock(arch_rwlock_t *lock) argument
91 arch_write_trylock(arch_rwlock_t *lock) argument
109 arch_write_unlock(arch_rwlock_t *lock) argument
115 arch_spin_lock(arch_spinlock_t *lock) argument
130 arch_spin_unlock(arch_spinlock_t *lock) argument
136 arch_spin_trylock(arch_spinlock_t *lock) argument
[all...]
H A Dspinlock_types.h16 volatile unsigned int lock; member in struct:__anon9
22 volatile unsigned int lock; member in struct:__anon10
/linux-master/drivers/acpi/acpica/
H A Dutlock.c4 * Module Name: utlock - Reader/Writer lock interfaces
21 * PARAMETERS: lock - Pointer to a valid RW lock
25 * DESCRIPTION: Reader/writer lock creation and deletion interfaces.
28 acpi_status acpi_ut_create_rw_lock(struct acpi_rw_lock *lock) argument
32 lock->num_readers = 0;
33 status = acpi_os_create_mutex(&lock->reader_mutex);
38 status = acpi_os_create_mutex(&lock->writer_mutex);
42 void acpi_ut_delete_rw_lock(struct acpi_rw_lock *lock) argument
45 acpi_os_delete_mutex(lock
71 acpi_ut_acquire_read_lock(struct acpi_rw_lock *lock) argument
93 acpi_ut_release_read_lock(struct acpi_rw_lock *lock) argument
129 acpi_ut_acquire_write_lock(struct acpi_rw_lock *lock) argument
137 acpi_ut_release_write_lock(struct acpi_rw_lock *lock) argument
[all...]
/linux-master/kernel/locking/
H A Dspinlock_debug.c16 void __raw_spin_lock_init(raw_spinlock_t *lock, const char *name, argument
21 * Make sure we are not reinitializing a held lock:
23 debug_check_no_locks_freed((void *)lock, sizeof(*lock));
24 lockdep_init_map_wait(&lock->dep_map, name, key, 0, inner);
26 lock->raw_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED;
27 lock->magic = SPINLOCK_MAGIC;
28 lock->owner = SPINLOCK_OWNER_INIT;
29 lock->owner_cpu = -1;
35 void __rwlock_init(rwlock_t *lock, cons argument
54 spin_dump(raw_spinlock_t *lock, const char *msg) argument
72 spin_bug(raw_spinlock_t *lock, const char *msg) argument
83 debug_spin_lock_before(raw_spinlock_t *lock) argument
91 debug_spin_lock_after(raw_spinlock_t *lock) argument
97 debug_spin_unlock(raw_spinlock_t *lock) argument
112 do_raw_spin_lock(raw_spinlock_t *lock) argument
120 do_raw_spin_trylock(raw_spinlock_t *lock) argument
137 do_raw_spin_unlock(raw_spinlock_t *lock) argument
145 rwlock_bug(rwlock_t *lock, const char *msg) argument
158 do_raw_read_lock(rwlock_t *lock) argument
164 do_raw_read_trylock(rwlock_t *lock) argument
177 do_raw_read_unlock(rwlock_t *lock) argument
183 debug_write_lock_before(rwlock_t *lock) argument
191 debug_write_lock_after(rwlock_t *lock) argument
197 debug_write_unlock(rwlock_t *lock) argument
207 do_raw_write_lock(rwlock_t *lock) argument
214 do_raw_write_trylock(rwlock_t *lock) argument
229 do_raw_write_unlock(rwlock_t *lock) argument
[all...]
H A Dspinlock.c35 * not re-enabled during lock-acquire (which the preempt-spin-ops do):
46 * Some architectures can relax in favour of the CPU owning the lock.
63 * This could be a long-held lock. We both prepare to spin for a long
65 * towards that other CPU that it should break the lock ASAP.
68 void __lockfunc __raw_##op##_lock(locktype##_t *lock) \
72 if (likely(do_raw_##op##_trylock(lock))) \
76 arch_##op##_relax(&lock->raw_lock); \
80 unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \
87 if (likely(do_raw_##op##_trylock(lock))) \
92 arch_##op##_relax(&lock
136 _raw_spin_trylock(raw_spinlock_t *lock) argument
144 _raw_spin_trylock_bh(raw_spinlock_t *lock) argument
152 _raw_spin_lock(raw_spinlock_t *lock) argument
160 _raw_spin_lock_irqsave(raw_spinlock_t *lock) argument
168 _raw_spin_lock_irq(raw_spinlock_t *lock) argument
176 _raw_spin_lock_bh(raw_spinlock_t *lock) argument
184 _raw_spin_unlock(raw_spinlock_t *lock) argument
192 _raw_spin_unlock_irqrestore(raw_spinlock_t *lock, unsigned long flags) argument
200 _raw_spin_unlock_irq(raw_spinlock_t *lock) argument
208 _raw_spin_unlock_bh(raw_spinlock_t *lock) argument
218 _raw_read_trylock(rwlock_t *lock) argument
226 _raw_read_lock(rwlock_t *lock) argument
234 _raw_read_lock_irqsave(rwlock_t *lock) argument
242 _raw_read_lock_irq(rwlock_t *lock) argument
250 _raw_read_lock_bh(rwlock_t *lock) argument
258 _raw_read_unlock(rwlock_t *lock) argument
266 _raw_read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
274 _raw_read_unlock_irq(rwlock_t *lock) argument
282 _raw_read_unlock_bh(rwlock_t *lock) argument
290 _raw_write_trylock(rwlock_t *lock) argument
298 _raw_write_lock(rwlock_t *lock) argument
308 _raw_write_lock_nested(rwlock_t *lock, int subclass) argument
316 _raw_write_lock_irqsave(rwlock_t *lock) argument
324 _raw_write_lock_irq(rwlock_t *lock) argument
332 _raw_write_lock_bh(rwlock_t *lock) argument
340 _raw_write_unlock(rwlock_t *lock) argument
348 _raw_write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) argument
356 _raw_write_unlock_irq(rwlock_t *lock) argument
364 _raw_write_unlock_bh(rwlock_t *lock) argument
375 _raw_spin_lock_nested(raw_spinlock_t *lock, int subclass) argument
383 _raw_spin_lock_irqsave_nested(raw_spinlock_t *lock, int subclass) argument
396 _raw_spin_lock_nest_lock(raw_spinlock_t *lock, struct lockdep_map *nest_lock) argument
[all...]
/linux-master/arch/arm/include/asm/
H A Dmcs_spinlock.h9 #define arch_mcs_spin_lock_contended(lock) \
13 while (!(smp_load_acquire(lock))) \
17 #define arch_mcs_spin_unlock_contended(lock) \
19 smp_store_release(lock, 1); \
/linux-master/tools/testing/selftests/kvm/lib/aarch64/
H A Dspinlock.c9 void spin_lock(struct spinlock *lock) argument
20 : "r" (&lock->v)
24 void spin_unlock(struct spinlock *lock) argument
26 asm volatile("stlr wzr, [%0]\n" : : "r" (&lock->v) : "memory");
/linux-master/tools/testing/selftests/kvm/include/aarch64/
H A Dspinlock.h10 extern void spin_lock(struct spinlock *lock);
11 extern void spin_unlock(struct spinlock *lock);
/linux-master/include/asm-generic/bitops/
H A Dext2-atomic.h9 #define ext2_set_bit_atomic(lock, nr, addr) \
12 spin_lock(lock); \
14 spin_unlock(lock); \
18 #define ext2_clear_bit_atomic(lock, nr, addr) \
21 spin_lock(lock); \
23 spin_unlock(lock); \
/linux-master/include/trace/events/
H A Dlock.h3 #define TRACE_SYSTEM lock
15 TP_PROTO(struct lockdep_map *lock, unsigned int subclass,
19 TP_ARGS(lock, subclass, trylock, read, check, next_lock, ip),
23 __string(name, lock->name)
29 __assign_str(name, lock->name);
30 __entry->lockdep_addr = lock;
39 DECLARE_EVENT_CLASS(lock,
41 TP_PROTO(struct lockdep_map *lock, unsigned long ip),
43 TP_ARGS(lock, ip),
46 __string( name, lock
[all...]
/linux-master/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/
H A Dlocks.h20 /* Only use one lock mechanism. Select which one. */
26 static inline void lock_impl_lock(struct lock_impl *lock) argument
28 BUG_ON(pthread_mutex_lock(&lock->mutex));
31 static inline void lock_impl_unlock(struct lock_impl *lock) argument
33 BUG_ON(pthread_mutex_unlock(&lock->mutex));
36 static inline bool lock_impl_trylock(struct lock_impl *lock) argument
38 int err = pthread_mutex_trylock(&lock->mutex);
47 static inline void lock_impl_init(struct lock_impl *lock) argument
49 pthread_mutex_init(&lock->mutex, NULL);
55 /* Spinlock that assumes that it always gets the lock immediatel
61 lock_impl_trylock(struct lock_impl *lock) argument
79 lock_impl_lock(struct lock_impl *lock) argument
93 lock_impl_unlock(struct lock_impl *lock) argument
110 lock_impl_init(struct lock_impl *lock) argument
131 spin_lock_init(spinlock_t *lock) argument
136 spin_lock(spinlock_t *lock) argument
148 spin_unlock(spinlock_t *lock) argument
166 spin_trylock(spinlock_t *lock) argument
[all...]
/linux-master/arch/powerpc/include/asm/
H A Dsimple_spinlock.h6 * Simple spin lock operations.
34 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) argument
36 return lock.slock == 0;
39 static inline int arch_spin_is_locked(arch_spinlock_t *lock) argument
41 return !arch_spin_value_unlocked(READ_ONCE(*lock));
45 * This returns the old value in the lock, so we succeeded
46 * in getting the lock if the return value is 0.
48 static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) argument
62 : "r" (token), "r" (&lock->slock)
68 static inline int arch_spin_trylock(arch_spinlock_t *lock) argument
92 splpar_spin_yield(arch_spinlock_t *lock) argument
93 splpar_rw_yield(arch_rwlock_t *lock) argument
96 spin_yield(arch_spinlock_t *lock) argument
104 rw_yield(arch_rwlock_t *lock) argument
112 arch_spin_lock(arch_spinlock_t *lock) argument
126 arch_spin_unlock(arch_spinlock_t *lock) argument
[all...]

Completed in 255 milliseconds

1234567891011>>