Lines Matching defs:lock

7  * ticket-lock.h and only come looking here when you've considered all the
48 * @lock: Pointer to queued spinlock structure
51 static __always_inline int queued_spin_is_locked(struct qspinlock *lock)
57 return atomic_read(&lock->val);
63 * @lock: queued spinlock structure
66 * N.B. Whenever there are tasks waiting for the lock, it is considered
67 * locked wrt the lockref code to avoid lock stealing by the lockref
68 * code and change things underneath the lock. This also allows some
71 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock)
73 return !lock.val.counter;
77 * queued_spin_is_contended - check if the lock is contended
78 * @lock : Pointer to queued spinlock structure
79 * Return: 1 if lock contended, 0 otherwise
81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock)
83 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK;
87 * @lock : Pointer to queued spinlock structure
88 * Return: 1 if lock acquired, 0 if failed
90 static __always_inline int queued_spin_trylock(struct qspinlock *lock)
92 int val = atomic_read(&lock->val);
97 return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL));
100 extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
105 * @lock: Pointer to queued spinlock structure
107 static __always_inline void queued_spin_lock(struct qspinlock *lock)
111 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)))
114 queued_spin_lock_slowpath(lock, val);
121 * @lock : Pointer to queued spinlock structure
123 static __always_inline void queued_spin_unlock(struct qspinlock *lock)
128 smp_store_release(&lock->locked, 0);
133 static __always_inline bool virt_spin_lock(struct qspinlock *lock)