1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2014 Regents of the University of California
4 */
5
6#ifndef _ASM_RISCV_CMPXCHG_H
7#define _ASM_RISCV_CMPXCHG_H
8
9#include <linux/bug.h>
10
11#include <asm/fence.h>
12
13#define __xchg_relaxed(ptr, new, size)					\
14({									\
15	__typeof__(ptr) __ptr = (ptr);					\
16	__typeof__(new) __new = (new);					\
17	__typeof__(*(ptr)) __ret;					\
18	switch (size) {							\
19	case 4:								\
20		__asm__ __volatile__ (					\
21			"	amoswap.w %0, %2, %1\n"			\
22			: "=r" (__ret), "+A" (*__ptr)			\
23			: "r" (__new)					\
24			: "memory");					\
25		break;							\
26	case 8:								\
27		__asm__ __volatile__ (					\
28			"	amoswap.d %0, %2, %1\n"			\
29			: "=r" (__ret), "+A" (*__ptr)			\
30			: "r" (__new)					\
31			: "memory");					\
32		break;							\
33	default:							\
34		BUILD_BUG();						\
35	}								\
36	__ret;								\
37})
38
39#define arch_xchg_relaxed(ptr, x)					\
40({									\
41	__typeof__(*(ptr)) _x_ = (x);					\
42	(__typeof__(*(ptr))) __xchg_relaxed((ptr),			\
43					    _x_, sizeof(*(ptr)));	\
44})
45
46#define __xchg_acquire(ptr, new, size)					\
47({									\
48	__typeof__(ptr) __ptr = (ptr);					\
49	__typeof__(new) __new = (new);					\
50	__typeof__(*(ptr)) __ret;					\
51	switch (size) {							\
52	case 4:								\
53		__asm__ __volatile__ (					\
54			"	amoswap.w %0, %2, %1\n"			\
55			RISCV_ACQUIRE_BARRIER				\
56			: "=r" (__ret), "+A" (*__ptr)			\
57			: "r" (__new)					\
58			: "memory");					\
59		break;							\
60	case 8:								\
61		__asm__ __volatile__ (					\
62			"	amoswap.d %0, %2, %1\n"			\
63			RISCV_ACQUIRE_BARRIER				\
64			: "=r" (__ret), "+A" (*__ptr)			\
65			: "r" (__new)					\
66			: "memory");					\
67		break;							\
68	default:							\
69		BUILD_BUG();						\
70	}								\
71	__ret;								\
72})
73
74#define arch_xchg_acquire(ptr, x)					\
75({									\
76	__typeof__(*(ptr)) _x_ = (x);					\
77	(__typeof__(*(ptr))) __xchg_acquire((ptr),			\
78					    _x_, sizeof(*(ptr)));	\
79})
80
81#define __xchg_release(ptr, new, size)					\
82({									\
83	__typeof__(ptr) __ptr = (ptr);					\
84	__typeof__(new) __new = (new);					\
85	__typeof__(*(ptr)) __ret;					\
86	switch (size) {							\
87	case 4:								\
88		__asm__ __volatile__ (					\
89			RISCV_RELEASE_BARRIER				\
90			"	amoswap.w %0, %2, %1\n"			\
91			: "=r" (__ret), "+A" (*__ptr)			\
92			: "r" (__new)					\
93			: "memory");					\
94		break;							\
95	case 8:								\
96		__asm__ __volatile__ (					\
97			RISCV_RELEASE_BARRIER				\
98			"	amoswap.d %0, %2, %1\n"			\
99			: "=r" (__ret), "+A" (*__ptr)			\
100			: "r" (__new)					\
101			: "memory");					\
102		break;							\
103	default:							\
104		BUILD_BUG();						\
105	}								\
106	__ret;								\
107})
108
109#define arch_xchg_release(ptr, x)					\
110({									\
111	__typeof__(*(ptr)) _x_ = (x);					\
112	(__typeof__(*(ptr))) __xchg_release((ptr),			\
113					    _x_, sizeof(*(ptr)));	\
114})
115
116#define __arch_xchg(ptr, new, size)					\
117({									\
118	__typeof__(ptr) __ptr = (ptr);					\
119	__typeof__(new) __new = (new);					\
120	__typeof__(*(ptr)) __ret;					\
121	switch (size) {							\
122	case 4:								\
123		__asm__ __volatile__ (					\
124			"	amoswap.w.aqrl %0, %2, %1\n"		\
125			: "=r" (__ret), "+A" (*__ptr)			\
126			: "r" (__new)					\
127			: "memory");					\
128		break;							\
129	case 8:								\
130		__asm__ __volatile__ (					\
131			"	amoswap.d.aqrl %0, %2, %1\n"		\
132			: "=r" (__ret), "+A" (*__ptr)			\
133			: "r" (__new)					\
134			: "memory");					\
135		break;							\
136	default:							\
137		BUILD_BUG();						\
138	}								\
139	__ret;								\
140})
141
142#define arch_xchg(ptr, x)						\
143({									\
144	__typeof__(*(ptr)) _x_ = (x);					\
145	(__typeof__(*(ptr))) __arch_xchg((ptr), _x_, sizeof(*(ptr)));	\
146})
147
148#define xchg32(ptr, x)							\
149({									\
150	BUILD_BUG_ON(sizeof(*(ptr)) != 4);				\
151	arch_xchg((ptr), (x));						\
152})
153
154#define xchg64(ptr, x)							\
155({									\
156	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
157	arch_xchg((ptr), (x));						\
158})
159
160/*
161 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
162 * store NEW in MEM.  Return the initial value in MEM.  Success is
163 * indicated by comparing RETURN with OLD.
164 */
165#define __cmpxchg_relaxed(ptr, old, new, size)				\
166({									\
167	__typeof__(ptr) __ptr = (ptr);					\
168	__typeof__(*(ptr)) __old = (old);				\
169	__typeof__(*(ptr)) __new = (new);				\
170	__typeof__(*(ptr)) __ret;					\
171	register unsigned int __rc;					\
172	switch (size) {							\
173	case 4:								\
174		__asm__ __volatile__ (					\
175			"0:	lr.w %0, %2\n"				\
176			"	bne  %0, %z3, 1f\n"			\
177			"	sc.w %1, %z4, %2\n"			\
178			"	bnez %1, 0b\n"				\
179			"1:\n"						\
180			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
181			: "rJ" ((long)__old), "rJ" (__new)		\
182			: "memory");					\
183		break;							\
184	case 8:								\
185		__asm__ __volatile__ (					\
186			"0:	lr.d %0, %2\n"				\
187			"	bne %0, %z3, 1f\n"			\
188			"	sc.d %1, %z4, %2\n"			\
189			"	bnez %1, 0b\n"				\
190			"1:\n"						\
191			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
192			: "rJ" (__old), "rJ" (__new)			\
193			: "memory");					\
194		break;							\
195	default:							\
196		BUILD_BUG();						\
197	}								\
198	__ret;								\
199})
200
201#define arch_cmpxchg_relaxed(ptr, o, n)					\
202({									\
203	__typeof__(*(ptr)) _o_ = (o);					\
204	__typeof__(*(ptr)) _n_ = (n);					\
205	(__typeof__(*(ptr))) __cmpxchg_relaxed((ptr),			\
206					_o_, _n_, sizeof(*(ptr)));	\
207})
208
209#define __cmpxchg_acquire(ptr, old, new, size)				\
210({									\
211	__typeof__(ptr) __ptr = (ptr);					\
212	__typeof__(*(ptr)) __old = (old);				\
213	__typeof__(*(ptr)) __new = (new);				\
214	__typeof__(*(ptr)) __ret;					\
215	register unsigned int __rc;					\
216	switch (size) {							\
217	case 4:								\
218		__asm__ __volatile__ (					\
219			"0:	lr.w %0, %2\n"				\
220			"	bne  %0, %z3, 1f\n"			\
221			"	sc.w %1, %z4, %2\n"			\
222			"	bnez %1, 0b\n"				\
223			RISCV_ACQUIRE_BARRIER				\
224			"1:\n"						\
225			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
226			: "rJ" ((long)__old), "rJ" (__new)		\
227			: "memory");					\
228		break;							\
229	case 8:								\
230		__asm__ __volatile__ (					\
231			"0:	lr.d %0, %2\n"				\
232			"	bne %0, %z3, 1f\n"			\
233			"	sc.d %1, %z4, %2\n"			\
234			"	bnez %1, 0b\n"				\
235			RISCV_ACQUIRE_BARRIER				\
236			"1:\n"						\
237			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
238			: "rJ" (__old), "rJ" (__new)			\
239			: "memory");					\
240		break;							\
241	default:							\
242		BUILD_BUG();						\
243	}								\
244	__ret;								\
245})
246
247#define arch_cmpxchg_acquire(ptr, o, n)					\
248({									\
249	__typeof__(*(ptr)) _o_ = (o);					\
250	__typeof__(*(ptr)) _n_ = (n);					\
251	(__typeof__(*(ptr))) __cmpxchg_acquire((ptr),			\
252					_o_, _n_, sizeof(*(ptr)));	\
253})
254
255#define __cmpxchg_release(ptr, old, new, size)				\
256({									\
257	__typeof__(ptr) __ptr = (ptr);					\
258	__typeof__(*(ptr)) __old = (old);				\
259	__typeof__(*(ptr)) __new = (new);				\
260	__typeof__(*(ptr)) __ret;					\
261	register unsigned int __rc;					\
262	switch (size) {							\
263	case 4:								\
264		__asm__ __volatile__ (					\
265			RISCV_RELEASE_BARRIER				\
266			"0:	lr.w %0, %2\n"				\
267			"	bne  %0, %z3, 1f\n"			\
268			"	sc.w %1, %z4, %2\n"			\
269			"	bnez %1, 0b\n"				\
270			"1:\n"						\
271			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
272			: "rJ" ((long)__old), "rJ" (__new)		\
273			: "memory");					\
274		break;							\
275	case 8:								\
276		__asm__ __volatile__ (					\
277			RISCV_RELEASE_BARRIER				\
278			"0:	lr.d %0, %2\n"				\
279			"	bne %0, %z3, 1f\n"			\
280			"	sc.d %1, %z4, %2\n"			\
281			"	bnez %1, 0b\n"				\
282			"1:\n"						\
283			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
284			: "rJ" (__old), "rJ" (__new)			\
285			: "memory");					\
286		break;							\
287	default:							\
288		BUILD_BUG();						\
289	}								\
290	__ret;								\
291})
292
293#define arch_cmpxchg_release(ptr, o, n)					\
294({									\
295	__typeof__(*(ptr)) _o_ = (o);					\
296	__typeof__(*(ptr)) _n_ = (n);					\
297	(__typeof__(*(ptr))) __cmpxchg_release((ptr),			\
298					_o_, _n_, sizeof(*(ptr)));	\
299})
300
301#define __cmpxchg(ptr, old, new, size)					\
302({									\
303	__typeof__(ptr) __ptr = (ptr);					\
304	__typeof__(*(ptr)) __old = (old);				\
305	__typeof__(*(ptr)) __new = (new);				\
306	__typeof__(*(ptr)) __ret;					\
307	register unsigned int __rc;					\
308	switch (size) {							\
309	case 4:								\
310		__asm__ __volatile__ (					\
311			"0:	lr.w %0, %2\n"				\
312			"	bne  %0, %z3, 1f\n"			\
313			"	sc.w.rl %1, %z4, %2\n"			\
314			"	bnez %1, 0b\n"				\
315			RISCV_FULL_BARRIER				\
316			"1:\n"						\
317			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
318			: "rJ" ((long)__old), "rJ" (__new)		\
319			: "memory");					\
320		break;							\
321	case 8:								\
322		__asm__ __volatile__ (					\
323			"0:	lr.d %0, %2\n"				\
324			"	bne %0, %z3, 1f\n"			\
325			"	sc.d.rl %1, %z4, %2\n"			\
326			"	bnez %1, 0b\n"				\
327			RISCV_FULL_BARRIER				\
328			"1:\n"						\
329			: "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr)	\
330			: "rJ" (__old), "rJ" (__new)			\
331			: "memory");					\
332		break;							\
333	default:							\
334		BUILD_BUG();						\
335	}								\
336	__ret;								\
337})
338
339#define arch_cmpxchg(ptr, o, n)						\
340({									\
341	__typeof__(*(ptr)) _o_ = (o);					\
342	__typeof__(*(ptr)) _n_ = (n);					\
343	(__typeof__(*(ptr))) __cmpxchg((ptr),				\
344				       _o_, _n_, sizeof(*(ptr)));	\
345})
346
347#define arch_cmpxchg_local(ptr, o, n)					\
348	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
349
350#define arch_cmpxchg64(ptr, o, n)					\
351({									\
352	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
353	arch_cmpxchg((ptr), (o), (n));					\
354})
355
356#define arch_cmpxchg64_local(ptr, o, n)					\
357({									\
358	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
359	arch_cmpxchg_relaxed((ptr), (o), (n));				\
360})
361
362#endif /* _ASM_RISCV_CMPXCHG_H */
363