Lines Matching refs:lock
42 #define __xchg_op(ptr, arg, op, lock) \
47 asm volatile (lock #op "b %b0, %1\n" \
52 asm volatile (lock #op "w %w0, %1\n" \
57 asm volatile (lock #op "l %0, %1\n" \
62 asm volatile (lock #op "q %q0, %1\n" \
73 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
85 #define __raw_cmpxchg(ptr, old, new, size, lock) \
94 asm volatile(lock "cmpxchgb %2,%1" \
103 asm volatile(lock "cmpxchgw %2,%1" \
112 asm volatile(lock "cmpxchgl %2,%1" \
121 asm volatile(lock "cmpxchgq %2,%1" \
137 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
158 #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock) \
168 asm volatile(lock "cmpxchgb %[new], %[ptr]" \
180 asm volatile(lock "cmpxchgw %[new], %[ptr]" \
192 asm volatile(lock "cmpxchgl %[new], %[ptr]" \
204 asm volatile(lock "cmpxchgq %[new], %[ptr]" \
225 __raw_try_cmpxchg((ptr), (pold), (new), (size), "lock; ")
245 #define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock)