1/* Linux-specific atomic operations for ARM EABI.
2   Copyright (C) 2008-2015 Free Software Foundation, Inc.
3   Contributed by CodeSourcery.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17Under Section 7 of GPL version 3, you are granted additional
18permissions described in the GCC Runtime Library Exception, version
193.1, as published by the Free Software Foundation.
20
21You should have received a copy of the GNU General Public License and
22a copy of the GCC Runtime Library Exception along with this program;
23see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
24<http://www.gnu.org/licenses/>.  */
25
26/* Kernel helper for compare-and-exchange.  */
27typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr);
28#define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0)
29
30/* Kernel helper for memory barrier.  */
31typedef void (__kernel_dmb_t) (void);
32#define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
33
34/* Note: we implement byte, short and int versions of atomic operations using
35   the above kernel helpers; see linux-atomic-64bit.c for "long long" (64-bit)
36   operations.  */
37
38#define HIDDEN __attribute__ ((visibility ("hidden")))
39
40#ifdef __ARMEL__
41#define INVERT_MASK_1 0
42#define INVERT_MASK_2 0
43#else
44#define INVERT_MASK_1 24
45#define INVERT_MASK_2 16
46#endif
47
48#define MASK_1 0xffu
49#define MASK_2 0xffffu
50
51#define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP)				\
52  int HIDDEN								\
53  __sync_fetch_and_##OP##_4 (int *ptr, int val)				\
54  {									\
55    int failure, tmp;							\
56									\
57    do {								\
58      tmp = *ptr;							\
59      failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr);	\
60    } while (failure != 0);						\
61									\
62    return tmp;								\
63  }
64
65FETCH_AND_OP_WORD (add,   , +)
66FETCH_AND_OP_WORD (sub,   , -)
67FETCH_AND_OP_WORD (or,    , |)
68FETCH_AND_OP_WORD (and,   , &)
69FETCH_AND_OP_WORD (xor,   , ^)
70FETCH_AND_OP_WORD (nand, ~, &)
71
72#define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
73#define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
74
75/* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
76   subword-sized quantities.  */
77
78#define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN)	\
79  TYPE HIDDEN								\
80  NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val)			\
81  {									\
82    int *wordptr = (int *) ((unsigned int) ptr & ~3);			\
83    unsigned int mask, shift, oldval, newval;				\
84    int failure;							\
85									\
86    shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
87    mask = MASK_##WIDTH << shift;					\
88									\
89    do {								\
90      oldval = *wordptr;						\
91      newval = ((PFX_OP (((oldval & mask) >> shift)			\
92			 INF_OP (unsigned int) val)) << shift) & mask;	\
93      newval |= oldval & ~mask;						\
94      failure = __kernel_cmpxchg (oldval, newval, wordptr);		\
95    } while (failure != 0);						\
96									\
97    return (RETURN & mask) >> shift;					\
98  }
99
100SUBWORD_SYNC_OP (add,   , +, short, 2, oldval)
101SUBWORD_SYNC_OP (sub,   , -, short, 2, oldval)
102SUBWORD_SYNC_OP (or,    , |, short, 2, oldval)
103SUBWORD_SYNC_OP (and,   , &, short, 2, oldval)
104SUBWORD_SYNC_OP (xor,   , ^, short, 2, oldval)
105SUBWORD_SYNC_OP (nand, ~, &, short, 2, oldval)
106
107SUBWORD_SYNC_OP (add,   , +, signed char, 1, oldval)
108SUBWORD_SYNC_OP (sub,   , -, signed char, 1, oldval)
109SUBWORD_SYNC_OP (or,    , |, signed char, 1, oldval)
110SUBWORD_SYNC_OP (and,   , &, signed char, 1, oldval)
111SUBWORD_SYNC_OP (xor,   , ^, signed char, 1, oldval)
112SUBWORD_SYNC_OP (nand, ~, &, signed char, 1, oldval)
113
114#define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP)				\
115  int HIDDEN								\
116  __sync_##OP##_and_fetch_4 (int *ptr, int val)				\
117  {									\
118    int tmp, failure;							\
119									\
120    do {								\
121      tmp = *ptr;							\
122      failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr);	\
123    } while (failure != 0);						\
124									\
125    return PFX_OP (tmp INF_OP val);					\
126  }
127
128OP_AND_FETCH_WORD (add,   , +)
129OP_AND_FETCH_WORD (sub,   , -)
130OP_AND_FETCH_WORD (or,    , |)
131OP_AND_FETCH_WORD (and,   , &)
132OP_AND_FETCH_WORD (xor,   , ^)
133OP_AND_FETCH_WORD (nand, ~, &)
134
135SUBWORD_SYNC_OP (add,   , +, short, 2, newval)
136SUBWORD_SYNC_OP (sub,   , -, short, 2, newval)
137SUBWORD_SYNC_OP (or,    , |, short, 2, newval)
138SUBWORD_SYNC_OP (and,   , &, short, 2, newval)
139SUBWORD_SYNC_OP (xor,   , ^, short, 2, newval)
140SUBWORD_SYNC_OP (nand, ~, &, short, 2, newval)
141
142SUBWORD_SYNC_OP (add,   , +, signed char, 1, newval)
143SUBWORD_SYNC_OP (sub,   , -, signed char, 1, newval)
144SUBWORD_SYNC_OP (or,    , |, signed char, 1, newval)
145SUBWORD_SYNC_OP (and,   , &, signed char, 1, newval)
146SUBWORD_SYNC_OP (xor,   , ^, signed char, 1, newval)
147SUBWORD_SYNC_OP (nand, ~, &, signed char, 1, newval)
148
149int HIDDEN
150__sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
151{
152  int actual_oldval, fail;
153
154  while (1)
155    {
156      actual_oldval = *ptr;
157
158      if (__builtin_expect (oldval != actual_oldval, 0))
159	return actual_oldval;
160
161      fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
162
163      if (__builtin_expect (!fail, 1))
164        return oldval;
165    }
166}
167
168#define SUBWORD_VAL_CAS(TYPE, WIDTH)					\
169  TYPE HIDDEN								\
170  __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,		\
171				       TYPE newval)			\
172  {									\
173    int *wordptr = (int *)((unsigned int) ptr & ~3), fail;		\
174    unsigned int mask, shift, actual_oldval, actual_newval;		\
175									\
176    shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
177    mask = MASK_##WIDTH << shift;					\
178									\
179    while (1)								\
180      {									\
181	actual_oldval = *wordptr;					\
182									\
183	if (__builtin_expect (((actual_oldval & mask) >> shift) !=      \
184                              ((unsigned int) oldval & MASK_##WIDTH), 0)) \
185          return (actual_oldval & mask) >> shift;			\
186									\
187	actual_newval = (actual_oldval & ~mask)				\
188			| (((unsigned int) newval << shift) & mask);	\
189									\
190	fail = __kernel_cmpxchg (actual_oldval, actual_newval,		\
191				 wordptr);				\
192									\
193      if (__builtin_expect (!fail, 1))                                  \
194          return oldval;						\
195      }									\
196  }
197
198SUBWORD_VAL_CAS (short,       2)
199SUBWORD_VAL_CAS (signed char, 1)
200
201typedef unsigned char bool;
202
203bool HIDDEN
204__sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
205{
206  int failure = __kernel_cmpxchg (oldval, newval, ptr);
207  return (failure == 0);
208}
209
210#define SUBWORD_BOOL_CAS(TYPE, WIDTH)					\
211  bool HIDDEN								\
212  __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,		\
213					TYPE newval)			\
214  {									\
215    TYPE actual_oldval							\
216      = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval);	\
217    return (oldval == actual_oldval);					\
218  }
219
220SUBWORD_BOOL_CAS (short,       2)
221SUBWORD_BOOL_CAS (signed char, 1)
222
223void HIDDEN
224__sync_synchronize (void)
225{
226  __kernel_dmb ();
227}
228
229int HIDDEN
230__sync_lock_test_and_set_4 (int *ptr, int val)
231{
232  int failure, oldval;
233
234  do {
235    oldval = *ptr;
236    failure = __kernel_cmpxchg (oldval, val, ptr);
237  } while (failure != 0);
238
239  return oldval;
240}
241
242#define SUBWORD_TEST_AND_SET(TYPE, WIDTH)				\
243  TYPE HIDDEN								\
244  __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val)		\
245  {									\
246    int failure;							\
247    unsigned int oldval, newval, shift, mask;				\
248    int *wordptr = (int *) ((unsigned int) ptr & ~3);			\
249									\
250    shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
251    mask = MASK_##WIDTH << shift;					\
252									\
253    do {								\
254      oldval = *wordptr;						\
255      newval = (oldval & ~mask)						\
256	       | (((unsigned int) val << shift) & mask);		\
257      failure = __kernel_cmpxchg (oldval, newval, wordptr);		\
258    } while (failure != 0);						\
259									\
260    return (oldval & mask) >> shift;					\
261  }
262
263SUBWORD_TEST_AND_SET (short,       2)
264SUBWORD_TEST_AND_SET (signed char, 1)
265
266#define SYNC_LOCK_RELEASE(TYPE, WIDTH)					\
267  void HIDDEN								\
268  __sync_lock_release_##WIDTH (TYPE *ptr)				\
269  {									\
270    /* All writes before this point must be seen before we release	\
271       the lock itself.  */						\
272    __kernel_dmb ();							\
273    *ptr = 0;								\
274  }
275
276SYNC_LOCK_RELEASE (long long,   8)
277SYNC_LOCK_RELEASE (int,   4)
278SYNC_LOCK_RELEASE (short, 2)
279SYNC_LOCK_RELEASE (char,  1)
280