Lines Matching refs:val

15 static inline int atomic_swap(volatile int* ptr, int val) {
16 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
19 static inline int atomic_add(volatile int* ptr, int val) {
20 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
23 static inline int atomic_and(volatile int* ptr, int val) {
24 return __atomic_fetch_and(ptr, val, __ATOMIC_SEQ_CST);
27 static inline int atomic_or(volatile int* ptr, int val) {
28 return __atomic_fetch_or(ptr, val, __ATOMIC_SEQ_CST);
31 static inline int atomic_xor(volatile int* ptr, int val) {
32 return __atomic_fetch_xor(ptr, val, __ATOMIC_SEQ_CST);
49 static inline int atomic_swap_relaxed(volatile int* ptr, int val) {
50 return __atomic_exchange_n(ptr, val, __ATOMIC_RELAXED);
53 static inline int atomic_add_relaxed(volatile int* ptr, int val) {
54 return __atomic_fetch_add(ptr, val, __ATOMIC_RELAXED);
57 static inline int atomic_and_relaxed(volatile int* ptr, int val) {
58 return __atomic_fetch_and(ptr, val, __ATOMIC_RELAXED);
61 static inline int atomic_or_relaxed(volatile int* ptr, int val) {
62 return __atomic_fetch_or(ptr, val, __ATOMIC_RELAXED);
65 static inline int atomic_xor_relaxed(volatile int* ptr, int val) {
66 return __atomic_fetch_xor(ptr, val, __ATOMIC_RELAXED);
82 static inline int atomic_add_release(volatile int* ptr, int val) {
83 return __atomic_fetch_add(ptr, val, __ATOMIC_RELEASE);
103 static inline int64_t atomic_swap_64(volatile int64_t* ptr, int64_t val) {
104 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
107 static inline int64_t atomic_add_64(volatile int64_t* ptr, int64_t val) {
108 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
111 static inline int64_t atomic_and_64(volatile int64_t* ptr, int64_t val) {
112 return __atomic_fetch_and(ptr, val, __ATOMIC_SEQ_CST);
115 static inline int64_t atomic_or_64(volatile int64_t* ptr, int64_t val) {
116 return __atomic_fetch_or(ptr, val, __ATOMIC_SEQ_CST);
119 static inline int64_t atomic_xor_64(volatile int64_t* ptr, int64_t val) {
120 return __atomic_fetch_xor(ptr, val, __ATOMIC_SEQ_CST);
137 static inline uint64_t atomic_swap_u64(volatile uint64_t* ptr, uint64_t val) {
138 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
141 static inline uint64_t atomic_add_u64(volatile uint64_t* ptr, uint64_t val) {
142 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
145 static inline uint64_t atomic_and_u64(volatile uint64_t* ptr, uint64_t val) {
146 return __atomic_fetch_and(ptr, val, __ATOMIC_SEQ_CST);
149 static inline uint64_t atomic_or_u64(volatile uint64_t* ptr, uint64_t val) {
150 return __atomic_fetch_or(ptr, val, __ATOMIC_SEQ_CST);
153 static inline uint64_t atomic_xor_u64(volatile uint64_t* ptr, uint64_t val) {
154 return __atomic_fetch_xor(ptr, val, __ATOMIC_SEQ_CST);
183 static inline int64_t atomic_add_64_relaxed(volatile int64_t* ptr, int64_t val) {
184 return __atomic_fetch_add(ptr, val, __ATOMIC_RELAXED);
187 static inline uint64_t atomic_add_u64_relaxed(volatile uint64_t* ptr, uint64_t val) {
188 return __atomic_fetch_add(ptr, val, __ATOMIC_RELAXED);