1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_CMPXCHG_GRB_H
3#define __ASM_SH_CMPXCHG_GRB_H
4
5static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
6{
7	unsigned long retval;
8
9	__asm__ __volatile__ (
10		"   .align 2              \n\t"
11		"   mova    1f,   r0      \n\t" /* r0 = end point */
12		"   nop                   \n\t"
13		"   mov    r15,   r1      \n\t" /* r1 = saved sp */
14		"   mov    #-4,   r15     \n\t" /* LOGIN */
15		"   mov.l  @%1,   %0      \n\t" /* load  old value */
16		"   mov.l   %2,   @%1     \n\t" /* store new value */
17		"1: mov     r1,   r15     \n\t" /* LOGOUT */
18		: "=&r" (retval),
19		  "+r"  (m),
20		  "+r"  (val)		/* inhibit r15 overloading */
21		:
22		: "memory", "r0", "r1");
23
24	return retval;
25}
26
27static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
28{
29	unsigned long retval;
30
31	__asm__ __volatile__ (
32		"   .align  2             \n\t"
33		"   mova    1f,   r0      \n\t" /* r0 = end point */
34		"   mov    r15,   r1      \n\t" /* r1 = saved sp */
35		"   mov    #-6,   r15     \n\t" /* LOGIN */
36		"   mov.w  @%1,   %0      \n\t" /* load  old value */
37		"   extu.w  %0,   %0      \n\t" /* extend as unsigned */
38		"   mov.w   %2,   @%1     \n\t" /* store new value */
39		"1: mov     r1,   r15     \n\t" /* LOGOUT */
40		: "=&r" (retval),
41		  "+r"  (m),
42		  "+r"  (val)		/* inhibit r15 overloading */
43		:
44		: "memory" , "r0", "r1");
45
46	return retval;
47}
48
49static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
50{
51	unsigned long retval;
52
53	__asm__ __volatile__ (
54		"   .align  2             \n\t"
55		"   mova    1f,   r0      \n\t" /* r0 = end point */
56		"   mov    r15,   r1      \n\t" /* r1 = saved sp */
57		"   mov    #-6,   r15     \n\t" /* LOGIN */
58		"   mov.b  @%1,   %0      \n\t" /* load  old value */
59		"   extu.b  %0,   %0      \n\t" /* extend as unsigned */
60		"   mov.b   %2,   @%1     \n\t" /* store new value */
61		"1: mov     r1,   r15     \n\t" /* LOGOUT */
62		: "=&r" (retval),
63		  "+r"  (m),
64		  "+r"  (val)		/* inhibit r15 overloading */
65		:
66		: "memory" , "r0", "r1");
67
68	return retval;
69}
70
71static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old,
72					  unsigned long new)
73{
74	unsigned long retval;
75
76	__asm__ __volatile__ (
77		"   .align  2             \n\t"
78		"   mova    1f,   r0      \n\t" /* r0 = end point */
79		"   nop                   \n\t"
80		"   mov    r15,   r1      \n\t" /* r1 = saved sp */
81		"   mov    #-8,   r15     \n\t" /* LOGIN */
82		"   mov.l  @%3,   %0      \n\t" /* load  old value */
83		"   cmp/eq  %0,   %1      \n\t"
84		"   bf            1f      \n\t" /* if not equal */
85		"   mov.l   %2,   @%3     \n\t" /* store new value */
86		"1: mov     r1,   r15     \n\t" /* LOGOUT */
87		: "=&r" (retval),
88		  "+r"  (old), "+r"  (new) /* old or new can be r15 */
89		:  "r"  (m)
90		: "memory" , "r0", "r1", "t");
91
92	return retval;
93}
94
95#endif /* __ASM_SH_CMPXCHG_GRB_H */
96