1/* SPDX-License-Identifier: GPL-2.0 */
2
3#ifndef __ASM_CSKY_CMPXCHG_H
4#define __ASM_CSKY_CMPXCHG_H
5
6#ifdef CONFIG_SMP
7#include <linux/bug.h>
8#include <asm/barrier.h>
9
10#define __xchg_relaxed(new, ptr, size)				\
11({								\
12	__typeof__(ptr) __ptr = (ptr);				\
13	__typeof__(new) __new = (new);				\
14	__typeof__(*(ptr)) __ret;				\
15	unsigned long tmp;					\
16	switch (size) {						\
17	case 2: {						\
18		u32 ret;					\
19		u32 shif = ((ulong)__ptr & 2) ? 16 : 0;		\
20		u32 mask = 0xffff << shif;			\
21		__ptr = (__typeof__(ptr))((ulong)__ptr & ~2);	\
22		__asm__ __volatile__ (				\
23			"1:	ldex.w %0, (%4)\n"		\
24			"	and    %1, %0, %2\n"		\
25			"	or     %1, %1, %3\n"		\
26			"	stex.w %1, (%4)\n"		\
27			"	bez    %1, 1b\n"		\
28			: "=&r" (ret), "=&r" (tmp)		\
29			: "r" (~mask),				\
30			  "r" ((u32)__new << shif),		\
31			  "r" (__ptr)				\
32			: "memory");				\
33		__ret = (__typeof__(*(ptr)))			\
34			((ret & mask) >> shif);			\
35		break;						\
36	}							\
37	case 4:							\
38		asm volatile (					\
39		"1:	ldex.w		%0, (%3) \n"		\
40		"	mov		%1, %2   \n"		\
41		"	stex.w		%1, (%3) \n"		\
42		"	bez		%1, 1b   \n"		\
43			: "=&r" (__ret), "=&r" (tmp)		\
44			: "r" (__new), "r"(__ptr)		\
45			:);					\
46		break;						\
47	default:						\
48		BUILD_BUG();					\
49	}							\
50	__ret;							\
51})
52
53#define arch_xchg_relaxed(ptr, x) \
54		(__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
55
56#define __cmpxchg_relaxed(ptr, old, new, size)			\
57({								\
58	__typeof__(ptr) __ptr = (ptr);				\
59	__typeof__(new) __new = (new);				\
60	__typeof__(new) __tmp;					\
61	__typeof__(old) __old = (old);				\
62	__typeof__(*(ptr)) __ret;				\
63	switch (size) {						\
64	case 4:							\
65		asm volatile (					\
66		"1:	ldex.w		%0, (%3) \n"		\
67		"	cmpne		%0, %4   \n"		\
68		"	bt		2f       \n"		\
69		"	mov		%1, %2   \n"		\
70		"	stex.w		%1, (%3) \n"		\
71		"	bez		%1, 1b   \n"		\
72		"2:				 \n"		\
73			: "=&r" (__ret), "=&r" (__tmp)		\
74			: "r" (__new), "r"(__ptr), "r"(__old)	\
75			:);					\
76		break;						\
77	default:						\
78		BUILD_BUG();					\
79	}							\
80	__ret;							\
81})
82
83#define arch_cmpxchg_relaxed(ptr, o, n) \
84	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
85
86#define __cmpxchg_acquire(ptr, old, new, size)			\
87({								\
88	__typeof__(ptr) __ptr = (ptr);				\
89	__typeof__(new) __new = (new);				\
90	__typeof__(new) __tmp;					\
91	__typeof__(old) __old = (old);				\
92	__typeof__(*(ptr)) __ret;				\
93	switch (size) {						\
94	case 4:							\
95		asm volatile (					\
96		"1:	ldex.w		%0, (%3) \n"		\
97		"	cmpne		%0, %4   \n"		\
98		"	bt		2f       \n"		\
99		"	mov		%1, %2   \n"		\
100		"	stex.w		%1, (%3) \n"		\
101		"	bez		%1, 1b   \n"		\
102		ACQUIRE_FENCE					\
103		"2:				 \n"		\
104			: "=&r" (__ret), "=&r" (__tmp)		\
105			: "r" (__new), "r"(__ptr), "r"(__old)	\
106			:);					\
107		break;						\
108	default:						\
109		BUILD_BUG();					\
110	}							\
111	__ret;							\
112})
113
114#define arch_cmpxchg_acquire(ptr, o, n) \
115	(__cmpxchg_acquire((ptr), (o), (n), sizeof(*(ptr))))
116
117#define __cmpxchg(ptr, old, new, size)				\
118({								\
119	__typeof__(ptr) __ptr = (ptr);				\
120	__typeof__(new) __new = (new);				\
121	__typeof__(new) __tmp;					\
122	__typeof__(old) __old = (old);				\
123	__typeof__(*(ptr)) __ret;				\
124	switch (size) {						\
125	case 4:							\
126		asm volatile (					\
127		RELEASE_FENCE					\
128		"1:	ldex.w		%0, (%3) \n"		\
129		"	cmpne		%0, %4   \n"		\
130		"	bt		2f       \n"		\
131		"	mov		%1, %2   \n"		\
132		"	stex.w		%1, (%3) \n"		\
133		"	bez		%1, 1b   \n"		\
134		FULL_FENCE					\
135		"2:				 \n"		\
136			: "=&r" (__ret), "=&r" (__tmp)		\
137			: "r" (__new), "r"(__ptr), "r"(__old)	\
138			:);					\
139		break;						\
140	default:						\
141		BUILD_BUG();					\
142	}							\
143	__ret;							\
144})
145
146#define arch_cmpxchg(ptr, o, n)					\
147	(__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
148
149#define arch_cmpxchg_local(ptr, o, n)				\
150	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
151#else
152#include <asm-generic/cmpxchg.h>
153#endif
154
155#endif /* __ASM_CSKY_CMPXCHG_H */
156