Lines Matching defs:ptr
22 asm volatile(_lock "cmpxchg8b %[ptr]" \
23 : [ptr] "+m" (*(_ptr)), \
32 static __always_inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
34 return __arch_cmpxchg64(ptr, old, new, LOCK_PREFIX);
37 static __always_inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
39 return __arch_cmpxchg64(ptr, old, new,);
48 asm volatile(_lock "cmpxchg8b %[ptr]" \
51 [ptr] "+m" (*(_ptr)), \
62 static __always_inline bool __try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
64 return __arch_try_cmpxchg64(ptr, oldp, new, LOCK_PREFIX);
67 static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
69 return __arch_try_cmpxchg64(ptr, oldp, new,);
96 _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
97 : [ptr] "+m" (*(_ptr)), \
105 static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
107 return __arch_cmpxchg64_emu(ptr, old, new, LOCK_PREFIX_HERE, "lock; ");
111 static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
113 return __arch_cmpxchg64_emu(ptr, old, new, ,);
125 _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \
128 [ptr] "+m" (*(_ptr)), \
139 static __always_inline bool arch_try_cmpxchg64(volatile u64 *ptr, u64 *oldp, u64 new)
141 return __arch_try_cmpxchg64_emu(ptr, oldp, new, LOCK_PREFIX_HERE, "lock; ");
145 static __always_inline bool arch_try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, u64 new)
147 return __arch_try_cmpxchg64_emu(ptr, oldp, new, ,);