linux-atomic.c revision 1.8
1/* Linux-specific atomic operations for PA Linux.
2   Copyright (C) 2008-2018 Free Software Foundation, Inc.
3   Based on code contributed by CodeSourcery for ARM EABI Linux.
4   Modifications for PA Linux by Helge Deller <deller@gmx.de>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18Under Section 7 of GPL version 3, you are granted additional
19permissions described in the GCC Runtime Library Exception, version
203.1, as published by the Free Software Foundation.
21
22You should have received a copy of the GNU General Public License and
23a copy of the GCC Runtime Library Exception along with this program;
24see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
25<http://www.gnu.org/licenses/>.  */
26
27#define EFAULT  14
28#define EBUSY   16
29#define ENOSYS 251
30
31/* PA-RISC 2.0 supports out-of-order execution for loads and stores.
32   Thus, we need to synchonize memory accesses.  For more info, see:
33   "Advanced Performance Features of the 64-bit PA-8000" by Doug Hunt.
34
35   We implement byte, short and int versions of each atomic operation
36   using the kernel helper defined below.  There is no support for
37   64-bit operations yet.  */
38
39/* Determine kernel LWS function call (0=32-bit, 1=64-bit userspace).  */
40#define LWS_CAS (sizeof(long) == 4 ? 0 : 1)
41
42/* Kernel helper for compare-and-exchange a 32-bit value.  */
43static inline long
44__kernel_cmpxchg (volatile void *mem, int oldval, int newval)
45{
46  register unsigned long lws_mem asm("r26") = (unsigned long) (mem);
47  register int lws_old asm("r25") = oldval;
48  register int lws_new asm("r24") = newval;
49  register long lws_ret   asm("r28");
50  register long lws_errno asm("r21");
51  asm volatile (	"ble	0xb0(%%sr2, %%r0)	\n\t"
52			"ldi	%2, %%r20		\n\t"
53	: "=r" (lws_ret), "=r" (lws_errno)
54	: "i" (LWS_CAS), "r" (lws_mem), "r" (lws_old), "r" (lws_new)
55	: "r1", "r20", "r22", "r23", "r29", "r31", "memory"
56  );
57
58  /* If the kernel LWS call succeeded (lws_errno == 0), lws_ret contains
59     the old value from memory.  If this value is equal to OLDVAL, the
60     new value was written to memory.  If not, return -EBUSY.  */
61  if (!lws_errno && lws_ret != oldval)
62    return -EBUSY;
63
64  return lws_errno;
65}
66
67static inline long
68__kernel_cmpxchg2 (volatile void *mem, const void *oldval, const void *newval,
69		   int val_size)
70{
71  register unsigned long lws_mem asm("r26") = (unsigned long) (mem);
72  register unsigned long lws_old asm("r25") = (unsigned long) oldval;
73  register unsigned long lws_new asm("r24") = (unsigned long) newval;
74  register int lws_size asm("r23") = val_size;
75  register long lws_ret   asm("r28");
76  register long lws_errno asm("r21");
77  asm volatile (	"ble	0xb0(%%sr2, %%r0)	\n\t"
78			"ldi	%6, %%r20		\n\t"
79	: "=r" (lws_ret), "=r" (lws_errno), "+r" (lws_mem),
80	  "+r" (lws_old), "+r" (lws_new), "+r" (lws_size)
81	: "i" (2)
82	: "r1", "r20", "r22", "r29", "r31", "fr4", "memory"
83  );
84
85  /* If the kernel LWS call is successful, lws_ret contains 0.  */
86  if (__builtin_expect (lws_ret == 0, 1))
87    return 0;
88
89  /* If the kernel LWS call fails with no error, return -EBUSY */
90  if (__builtin_expect (!lws_errno, 0))
91    return -EBUSY;
92
93  return lws_errno;
94}
95#define HIDDEN __attribute__ ((visibility ("hidden")))
96
97/* Big endian masks  */
98#define INVERT_MASK_1 24
99#define INVERT_MASK_2 16
100
101#define MASK_1 0xffu
102#define MASK_2 0xffffu
103
104#define FETCH_AND_OP_2(OP, PFX_OP, INF_OP, TYPE, WIDTH, INDEX)		\
105  TYPE HIDDEN								\
106  __sync_fetch_and_##OP##_##WIDTH (volatile void *ptr, TYPE val)	\
107  {									\
108    TYPE tmp, newval;							\
109    long failure;							\
110									\
111    do {								\
112      tmp = __atomic_load_n ((volatile TYPE *)ptr, __ATOMIC_RELAXED);	\
113      newval = PFX_OP (tmp INF_OP val);					\
114      failure = __kernel_cmpxchg2 (ptr, &tmp, &newval, INDEX);		\
115    } while (failure != 0);						\
116									\
117    return tmp;								\
118  }
119
120FETCH_AND_OP_2 (add,   , +, long long unsigned int, 8, 3)
121FETCH_AND_OP_2 (sub,   , -, long long unsigned int, 8, 3)
122FETCH_AND_OP_2 (or,    , |, long long unsigned int, 8, 3)
123FETCH_AND_OP_2 (and,   , &, long long unsigned int, 8, 3)
124FETCH_AND_OP_2 (xor,   , ^, long long unsigned int, 8, 3)
125FETCH_AND_OP_2 (nand, ~, &, long long unsigned int, 8, 3)
126
127FETCH_AND_OP_2 (add,   , +, short unsigned int, 2, 1)
128FETCH_AND_OP_2 (sub,   , -, short unsigned int, 2, 1)
129FETCH_AND_OP_2 (or,    , |, short unsigned int, 2, 1)
130FETCH_AND_OP_2 (and,   , &, short unsigned int, 2, 1)
131FETCH_AND_OP_2 (xor,   , ^, short unsigned int, 2, 1)
132FETCH_AND_OP_2 (nand, ~, &, short unsigned int, 2, 1)
133
134FETCH_AND_OP_2 (add,   , +, unsigned char, 1, 0)
135FETCH_AND_OP_2 (sub,   , -, unsigned char, 1, 0)
136FETCH_AND_OP_2 (or,    , |, unsigned char, 1, 0)
137FETCH_AND_OP_2 (and,   , &, unsigned char, 1, 0)
138FETCH_AND_OP_2 (xor,   , ^, unsigned char, 1, 0)
139FETCH_AND_OP_2 (nand, ~, &, unsigned char, 1, 0)
140
141#define OP_AND_FETCH_2(OP, PFX_OP, INF_OP, TYPE, WIDTH, INDEX)		\
142  TYPE HIDDEN								\
143  __sync_##OP##_and_fetch_##WIDTH (volatile void *ptr, TYPE val)	\
144  {									\
145    TYPE tmp, newval;							\
146    long failure;							\
147									\
148    do {								\
149      tmp = __atomic_load_n ((volatile TYPE *)ptr, __ATOMIC_RELAXED);	\
150      newval = PFX_OP (tmp INF_OP val);					\
151      failure = __kernel_cmpxchg2 (ptr, &tmp, &newval, INDEX);		\
152    } while (failure != 0);						\
153									\
154    return PFX_OP (tmp INF_OP val);					\
155  }
156
157OP_AND_FETCH_2 (add,   , +, long long unsigned int, 8, 3)
158OP_AND_FETCH_2 (sub,   , -, long long unsigned int, 8, 3)
159OP_AND_FETCH_2 (or,    , |, long long unsigned int, 8, 3)
160OP_AND_FETCH_2 (and,   , &, long long unsigned int, 8, 3)
161OP_AND_FETCH_2 (xor,   , ^, long long unsigned int, 8, 3)
162OP_AND_FETCH_2 (nand, ~, &, long long unsigned int, 8, 3)
163
164OP_AND_FETCH_2 (add,   , +, short unsigned int, 2, 1)
165OP_AND_FETCH_2 (sub,   , -, short unsigned int, 2, 1)
166OP_AND_FETCH_2 (or,    , |, short unsigned int, 2, 1)
167OP_AND_FETCH_2 (and,   , &, short unsigned int, 2, 1)
168OP_AND_FETCH_2 (xor,   , ^, short unsigned int, 2, 1)
169OP_AND_FETCH_2 (nand, ~, &, short unsigned int, 2, 1)
170
171OP_AND_FETCH_2 (add,   , +, unsigned char, 1, 0)
172OP_AND_FETCH_2 (sub,   , -, unsigned char, 1, 0)
173OP_AND_FETCH_2 (or,    , |, unsigned char, 1, 0)
174OP_AND_FETCH_2 (and,   , &, unsigned char, 1, 0)
175OP_AND_FETCH_2 (xor,   , ^, unsigned char, 1, 0)
176OP_AND_FETCH_2 (nand, ~, &, unsigned char, 1, 0)
177
178#define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP)				\
179  unsigned int HIDDEN							\
180  __sync_fetch_and_##OP##_4 (volatile void *ptr, unsigned int val)	\
181  {									\
182    unsigned int tmp;							\
183    long failure;							\
184									\
185    do {								\
186      tmp = __atomic_load_n ((volatile unsigned int *)ptr,		\
187			     __ATOMIC_RELAXED);				\
188      failure = __kernel_cmpxchg (ptr, tmp, PFX_OP (tmp INF_OP val));	\
189    } while (failure != 0);						\
190									\
191    return tmp;								\
192  }
193
194FETCH_AND_OP_WORD (add,   , +)
195FETCH_AND_OP_WORD (sub,   , -)
196FETCH_AND_OP_WORD (or,    , |)
197FETCH_AND_OP_WORD (and,   , &)
198FETCH_AND_OP_WORD (xor,   , ^)
199FETCH_AND_OP_WORD (nand, ~, &)
200
201#define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP)				\
202  unsigned int HIDDEN							\
203  __sync_##OP##_and_fetch_4 (volatile void *ptr, unsigned int val)	\
204  {									\
205    unsigned int tmp;							\
206    long failure;							\
207									\
208    do {								\
209      tmp = __atomic_load_n ((volatile unsigned int *)ptr,		\
210			     __ATOMIC_RELAXED);				\
211      failure = __kernel_cmpxchg (ptr, tmp, PFX_OP (tmp INF_OP val));	\
212    } while (failure != 0);						\
213									\
214    return PFX_OP (tmp INF_OP val);					\
215  }
216
217OP_AND_FETCH_WORD (add,   , +)
218OP_AND_FETCH_WORD (sub,   , -)
219OP_AND_FETCH_WORD (or,    , |)
220OP_AND_FETCH_WORD (and,   , &)
221OP_AND_FETCH_WORD (xor,   , ^)
222OP_AND_FETCH_WORD (nand, ~, &)
223
224typedef unsigned char bool;
225
226#define COMPARE_AND_SWAP_2(TYPE, WIDTH, INDEX)				\
227  TYPE HIDDEN								\
228  __sync_val_compare_and_swap_##WIDTH (volatile void *ptr, TYPE oldval,	\
229				       TYPE newval)			\
230  {									\
231    TYPE actual_oldval;							\
232    long fail;								\
233									\
234    while (1)								\
235      {									\
236	actual_oldval = __atomic_load_n ((volatile TYPE *)ptr,		\
237					 __ATOMIC_RELAXED);		\
238									\
239	if (__builtin_expect (oldval != actual_oldval, 0))		\
240	  return actual_oldval;						\
241									\
242	fail = __kernel_cmpxchg2 (ptr, &actual_oldval, &newval, INDEX);	\
243									\
244	if (__builtin_expect (!fail, 1))				\
245	  return actual_oldval;						\
246      }									\
247  }									\
248									\
249  _Bool HIDDEN								\
250  __sync_bool_compare_and_swap_##WIDTH (volatile void *ptr,		\
251					TYPE oldval, TYPE newval)	\
252  {									\
253    long failure = __kernel_cmpxchg2 (ptr, &oldval, &newval, INDEX);	\
254    return (failure == 0);						\
255  }
256
257COMPARE_AND_SWAP_2 (long long unsigned int, 8, 3)
258COMPARE_AND_SWAP_2 (short unsigned int, 2, 1)
259COMPARE_AND_SWAP_2 (unsigned char, 1, 0)
260
261unsigned int HIDDEN
262__sync_val_compare_and_swap_4 (volatile void *ptr, unsigned int oldval,
263			       unsigned int newval)
264{
265  long fail;
266  unsigned int actual_oldval;
267
268  while (1)
269    {
270      actual_oldval = __atomic_load_n ((volatile unsigned int *)ptr,
271				       __ATOMIC_RELAXED);
272
273      if (__builtin_expect (oldval != actual_oldval, 0))
274	return actual_oldval;
275
276      fail = __kernel_cmpxchg (ptr, actual_oldval, newval);
277
278      if (__builtin_expect (!fail, 1))
279	return actual_oldval;
280    }
281}
282
283_Bool HIDDEN
284__sync_bool_compare_and_swap_4 (volatile void *ptr, unsigned int oldval,
285				unsigned int newval)
286{
287  long failure = __kernel_cmpxchg (ptr, oldval, newval);
288  return (failure == 0);
289}
290
291#define SYNC_LOCK_TEST_AND_SET_2(TYPE, WIDTH, INDEX)			\
292TYPE HIDDEN								\
293  __sync_lock_test_and_set_##WIDTH (volatile void *ptr, TYPE val)	\
294  {									\
295    TYPE oldval;							\
296    long failure;							\
297									\
298    do {								\
299      oldval = __atomic_load_n ((volatile TYPE *)ptr,			\
300				__ATOMIC_RELAXED);			\
301      failure = __kernel_cmpxchg2 (ptr, &oldval, &val, INDEX);		\
302    } while (failure != 0);						\
303									\
304    return oldval;							\
305  }
306
307SYNC_LOCK_TEST_AND_SET_2 (long long unsigned int, 8, 3)
308SYNC_LOCK_TEST_AND_SET_2 (short unsigned int, 2, 1)
309SYNC_LOCK_TEST_AND_SET_2 (unsigned char, 1, 0)
310
311unsigned int HIDDEN
312__sync_lock_test_and_set_4 (volatile void *ptr, unsigned int val)
313{
314  long failure;
315  unsigned int oldval;
316
317  do {
318    oldval = __atomic_load_n ((volatile unsigned int *)ptr, __ATOMIC_RELAXED);
319    failure = __kernel_cmpxchg (ptr, oldval, val);
320  } while (failure != 0);
321
322  return oldval;
323}
324
325#define SYNC_LOCK_RELEASE_1(TYPE, WIDTH, INDEX)			\
326  void HIDDEN							\
327  __sync_lock_release_##WIDTH (volatile void *ptr)		\
328  {								\
329    TYPE oldval, val = 0;					\
330    long failure;						\
331								\
332    do {							\
333      oldval = __atomic_load_n ((volatile TYPE *)ptr,		\
334				__ATOMIC_RELAXED);		\
335      failure = __kernel_cmpxchg2 (ptr, &oldval, &val, INDEX);	\
336    } while (failure != 0);					\
337  }
338
339SYNC_LOCK_RELEASE_1 (long long unsigned int, 8, 3)
340SYNC_LOCK_RELEASE_1 (short unsigned int, 2, 1)
341SYNC_LOCK_RELEASE_1 (unsigned char, 1, 0)
342
343void HIDDEN
344__sync_lock_release_4 (volatile void *ptr)
345{
346  long failure;
347  unsigned int oldval;
348
349  do {
350    oldval = __atomic_load_n ((volatile unsigned int *)ptr, __ATOMIC_RELAXED);
351    failure = __kernel_cmpxchg (ptr, oldval, 0);
352  } while (failure != 0);
353}
354