atomic-v4.h revision 331643
1129198Scognet/* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
2129198Scognet
3139735Simp/*-
4129198Scognet * Copyright (C) 2003-2004 Olivier Houchard
5129198Scognet * Copyright (C) 1994-1997 Mark Brinicombe
6129198Scognet * Copyright (C) 1994 Brini
7129198Scognet * All rights reserved.
8129198Scognet *
9129198Scognet * This code is derived from software written for Brini by Mark Brinicombe
10129198Scognet *
11129198Scognet * Redistribution and use in source and binary forms, with or without
12129198Scognet * modification, are permitted provided that the following conditions
13129198Scognet * are met:
14129198Scognet * 1. Redistributions of source code must retain the above copyright
15129198Scognet *    notice, this list of conditions and the following disclaimer.
16129198Scognet * 2. Redistributions in binary form must reproduce the above copyright
17129198Scognet *    notice, this list of conditions and the following disclaimer in the
18129198Scognet *    documentation and/or other materials provided with the distribution.
19129198Scognet * 3. All advertising materials mentioning features or use of this software
20129198Scognet *    must display the following acknowledgement:
21129198Scognet *	This product includes software developed by Brini.
22129198Scognet * 4. The name of Brini may not be used to endorse or promote products
23129198Scognet *    derived from this software without specific prior written permission.
24129198Scognet *
25129198Scognet * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26129198Scognet * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27129198Scognet * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28129198Scognet * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29129198Scognet * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30129198Scognet * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31129198Scognet * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32129198Scognet * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33129198Scognet * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34129198Scognet * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35129198Scognet *
36129198Scognet * $FreeBSD: stable/11/sys/arm/include/atomic-v4.h 331643 2018-03-27 18:52:27Z dim $
37129198Scognet */
38129198Scognet
39285631Sandrew#ifndef _MACHINE_ATOMIC_V4_H_
40285631Sandrew#define	_MACHINE_ATOMIC_V4_H_
41129198Scognet
42285631Sandrew#ifndef _MACHINE_ATOMIC_H_
43285631Sandrew#error Do not include this file directly, use <machine/atomic.h>
44175982Sraj#endif
45175982Sraj
46285631Sandrew#if __ARM_ARCH <= 5
47245135Sgonzo#define isb()  __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
48245135Sgonzo#define dsb()  __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
49285631Sandrew#define dmb()  dsb()
50245135Sgonzo#else
51285631Sandrew#error Only use this file with ARMv5 and earlier
52245135Sgonzo#endif
53185162Skmacy
54245135Sgonzo#define mb()   dmb()
55245135Sgonzo#define wmb()  dmb()
56245135Sgonzo#define rmb()  dmb()
57245135Sgonzo
58129198Scognet#define __with_interrupts_disabled(expr) \
59129198Scognet	do {						\
60129198Scognet		u_int cpsr_save, tmp;			\
61129198Scognet							\
62129198Scognet		__asm __volatile(			\
63129198Scognet			"mrs  %0, cpsr;"		\
64129198Scognet			"orr  %1, %0, %2;"		\
65261393Sian			"msr  cpsr_fsxc, %1;"		\
66129198Scognet			: "=r" (cpsr_save), "=r" (tmp)	\
67271398Sandrew			: "I" (PSR_I | PSR_F)		\
68129198Scognet		        : "cc" );		\
69129198Scognet		(expr);				\
70129198Scognet		 __asm __volatile(		\
71261393Sian			"msr  cpsr_fsxc, %0"	\
72129198Scognet			: /* no output */	\
73129198Scognet			: "r" (cpsr_save)	\
74129198Scognet			: "cc" );		\
75129198Scognet	} while(0)
76129198Scognet
77137222Scognetstatic __inline uint32_t
78137222Scognet__swp(uint32_t val, volatile uint32_t *ptr)
79129198Scognet{
80148453Sjhb	__asm __volatile("swp	%0, %2, [%3]"
81148453Sjhb	    : "=&r" (val), "=m" (*ptr)
82151340Sjhb	    : "r" (val), "r" (ptr), "m" (*ptr)
83148453Sjhb	    : "memory");
84137222Scognet	return (val);
85129198Scognet}
86129198Scognet
87137222Scognet
88144761Scognet#ifdef _KERNEL
89269414Sian#define	ARM_HAVE_ATOMIC64
90269414Sian
91129198Scognetstatic __inline void
92285687Sandrewatomic_add_32(volatile u_int32_t *p, u_int32_t val)
93129198Scognet{
94285687Sandrew	__with_interrupts_disabled(*p += val);
95129198Scognet}
96129198Scognet
97129198Scognetstatic __inline void
98285687Sandrewatomic_add_64(volatile u_int64_t *p, u_int64_t val)
99269405Sian{
100285687Sandrew	__with_interrupts_disabled(*p += val);
101269405Sian}
102269405Sian
103269405Sianstatic __inline void
104129198Scognetatomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
105129198Scognet{
106144761Scognet	__with_interrupts_disabled(*address &= ~clearmask);
107129198Scognet}
108129198Scognet
109269405Sianstatic __inline void
110269405Sianatomic_clear_64(volatile uint64_t *address, uint64_t clearmask)
111269405Sian{
112269405Sian	__with_interrupts_disabled(*address &= ~clearmask);
113269405Sian}
114269405Sian
115315371Smjgstatic __inline int
116315371Smjgatomic_fcmpset_32(volatile u_int32_t *p, volatile u_int32_t *cmpval, volatile u_int32_t newval)
117315371Smjg{
118315371Smjg	u_int32_t ret;
119315371Smjg
120315371Smjg	__with_interrupts_disabled(
121315371Smjg	 {
122315371Smjg	 	ret = *p;
123315371Smjg	    	if (*p == *cmpval) {
124315371Smjg			*p = newval;
125315371Smjg			ret = 1;
126315371Smjg		} else {
127315371Smjg			*cmpval = *p;
128315371Smjg			ret = 0;
129315371Smjg		}
130315371Smjg	});
131315371Smjg	return (ret);
132315371Smjg}
133315371Smjg
134315371Smjgstatic __inline int
135315371Smjgatomic_fcmpset_64(volatile u_int64_t *p, volatile u_int64_t *cmpval, volatile u_int64_t newval)
136315371Smjg{
137315371Smjg	u_int64_t ret;
138315371Smjg
139315371Smjg	__with_interrupts_disabled(
140315371Smjg	 {
141315371Smjg	    	if (*p == *cmpval) {
142315371Smjg			*p = newval;
143315371Smjg			ret = 1;
144315371Smjg		} else {
145315371Smjg			*cmpval = *p;
146315371Smjg			ret = 0;
147315371Smjg		}
148315371Smjg	});
149315371Smjg	return (ret);
150315371Smjg}
151315371Smjg
152144761Scognetstatic __inline u_int32_t
153144761Scognetatomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
154129198Scognet{
155144761Scognet	int ret;
156290648Smmel
157144761Scognet	__with_interrupts_disabled(
158144761Scognet	 {
159144761Scognet	    	if (*p == cmpval) {
160144761Scognet			*p = newval;
161144761Scognet			ret = 1;
162144761Scognet		} else {
163144761Scognet			ret = 0;
164144761Scognet		}
165144761Scognet	});
166144761Scognet	return (ret);
167129198Scognet}
168129198Scognet
169269405Sianstatic __inline u_int64_t
170269405Sianatomic_cmpset_64(volatile u_int64_t *p, volatile u_int64_t cmpval, volatile u_int64_t newval)
171269405Sian{
172269405Sian	int ret;
173290648Smmel
174269405Sian	__with_interrupts_disabled(
175269405Sian	 {
176269405Sian	    	if (*p == cmpval) {
177269405Sian			*p = newval;
178269405Sian			ret = 1;
179269405Sian		} else {
180269405Sian			ret = 0;
181269405Sian		}
182269405Sian	});
183269405Sian	return (ret);
184269405Sian}
185269405Sian
186129198Scognet
187150627Sjhbstatic __inline uint32_t
188150627Sjhbatomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
189150627Sjhb{
190150627Sjhb	uint32_t value;
191150627Sjhb
192150627Sjhb	__with_interrupts_disabled(
193150627Sjhb	{
194150627Sjhb	    	value = *p;
195150627Sjhb		*p += v;
196150627Sjhb	});
197150627Sjhb	return (value);
198150627Sjhb}
199150627Sjhb
200269405Sianstatic __inline uint64_t
201269405Sianatomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
202269405Sian{
203269405Sian	uint64_t value;
204269405Sian
205269405Sian	__with_interrupts_disabled(
206269405Sian	{
207269405Sian	    	value = *p;
208269405Sian		*p += v;
209269405Sian	});
210269405Sian	return (value);
211269405Sian}
212269405Sian
213269405Sianstatic __inline uint64_t
214269405Sianatomic_load_64(volatile uint64_t *p)
215269405Sian{
216269405Sian	uint64_t value;
217269405Sian
218269405Sian	__with_interrupts_disabled(value = *p);
219269405Sian	return (value);
220269405Sian}
221269405Sian
222269405Sianstatic __inline void
223285687Sandrewatomic_set_32(volatile uint32_t *address, uint32_t setmask)
224285687Sandrew{
225285687Sandrew	__with_interrupts_disabled(*address |= setmask);
226285687Sandrew}
227285687Sandrew
228285687Sandrewstatic __inline void
229285687Sandrewatomic_set_64(volatile uint64_t *address, uint64_t setmask)
230285687Sandrew{
231285687Sandrew	__with_interrupts_disabled(*address |= setmask);
232285687Sandrew}
233285687Sandrew
234285687Sandrewstatic __inline void
235269405Sianatomic_store_64(volatile uint64_t *p, uint64_t value)
236269405Sian{
237269405Sian	__with_interrupts_disabled(*p = value);
238269405Sian}
239269405Sian
240285687Sandrewstatic __inline void
241285687Sandrewatomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
242285687Sandrew{
243285687Sandrew	__with_interrupts_disabled(*p -= val);
244285687Sandrew}
245285687Sandrew
246285687Sandrewstatic __inline void
247285687Sandrewatomic_subtract_64(volatile u_int64_t *p, u_int64_t val)
248285687Sandrew{
249285687Sandrew	__with_interrupts_disabled(*p -= val);
250285687Sandrew}
251285687Sandrew
252144761Scognet#else /* !_KERNEL */
253144761Scognet
254285687Sandrewstatic __inline void
255285687Sandrewatomic_add_32(volatile u_int32_t *p, u_int32_t val)
256129198Scognet{
257285687Sandrew	int start, ras_start = ARM_RAS_START;
258144761Scognet
259144761Scognet	__asm __volatile("1:\n"
260174170Scognet	    "adr	%1, 1b\n"
261174170Scognet	    "str	%1, [%0]\n"
262144761Scognet	    "adr	%1, 2f\n"
263175982Sraj	    "str	%1, [%0, #4]\n"
264155355Scognet	    "ldr	%1, [%2]\n"
265285687Sandrew	    "add	%1, %1, %3\n"
266285687Sandrew	    "str	%1, [%2]\n"
267144761Scognet	    "2:\n"
268146591Scognet	    "mov	%1, #0\n"
269146591Scognet	    "str	%1, [%0]\n"
270174170Scognet	    "mov	%1, #0xffffffff\n"
271175982Sraj	    "str	%1, [%0, #4]\n"
272285687Sandrew	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
273285687Sandrew	    : : "memory");
274129198Scognet}
275129198Scognet
276129198Scognetstatic __inline void
277285687Sandrewatomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
278129198Scognet{
279175982Sraj	int start, ras_start = ARM_RAS_START;
280144761Scognet
281144761Scognet	__asm __volatile("1:\n"
282174170Scognet	    "adr	%1, 1b\n"
283174170Scognet	    "str	%1, [%0]\n"
284144761Scognet	    "adr	%1, 2f\n"
285175982Sraj	    "str	%1, [%0, #4]\n"
286155355Scognet	    "ldr	%1, [%2]\n"
287285687Sandrew	    "bic	%1, %1, %3\n"
288155355Scognet	    "str	%1, [%2]\n"
289144761Scognet	    "2:\n"
290146591Scognet	    "mov	%1, #0\n"
291146591Scognet	    "str	%1, [%0]\n"
292174170Scognet	    "mov	%1, #0xffffffff\n"
293175982Sraj	    "str	%1, [%0, #4]\n"
294285687Sandrew	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
295155391Scognet	    : : "memory");
296285687Sandrew
297129198Scognet}
298129198Scognet
299285687Sandrewstatic __inline u_int32_t
300285687Sandrewatomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
301129198Scognet{
302331643Sdim	int done, ras_start = ARM_RAS_START;
303144761Scognet
304144761Scognet	__asm __volatile("1:\n"
305174170Scognet	    "adr	%1, 1b\n"
306174170Scognet	    "str	%1, [%0]\n"
307144761Scognet	    "adr	%1, 2f\n"
308175982Sraj	    "str	%1, [%0, #4]\n"
309155355Scognet	    "ldr	%1, [%2]\n"
310285687Sandrew	    "cmp	%1, %3\n"
311285687Sandrew	    "streq	%4, [%2]\n"
312144761Scognet	    "2:\n"
313146591Scognet	    "mov	%1, #0\n"
314146591Scognet	    "str	%1, [%0]\n"
315174170Scognet	    "mov	%1, #0xffffffff\n"
316175982Sraj	    "str	%1, [%0, #4]\n"
317285687Sandrew	    "moveq	%1, #1\n"
318285687Sandrew	    "movne	%1, #0\n"
319285687Sandrew	    : "+r" (ras_start), "=r" (done)
320285687Sandrew	    ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", "memory");
321285687Sandrew	return (done);
322285687Sandrew}
323146591Scognet
324285687Sandrewstatic __inline uint32_t
325285687Sandrewatomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
326285687Sandrew{
327285687Sandrew	uint32_t start, tmp, ras_start = ARM_RAS_START;
328285687Sandrew
329285687Sandrew	__asm __volatile("1:\n"
330285687Sandrew	    "adr	%1, 1b\n"
331285687Sandrew	    "str	%1, [%0]\n"
332285687Sandrew	    "adr	%1, 2f\n"
333285687Sandrew	    "str	%1, [%0, #4]\n"
334285687Sandrew	    "ldr	%1, [%3]\n"
335285687Sandrew	    "mov	%2, %1\n"
336285687Sandrew	    "add	%2, %2, %4\n"
337285687Sandrew	    "str	%2, [%3]\n"
338285687Sandrew	    "2:\n"
339285687Sandrew	    "mov	%2, #0\n"
340285687Sandrew	    "str	%2, [%0]\n"
341285687Sandrew	    "mov	%2, #0xffffffff\n"
342285687Sandrew	    "str	%2, [%0, #4]\n"
343285687Sandrew	    : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
344155391Scognet	    : : "memory");
345285687Sandrew	return (start);
346129198Scognet}
347129198Scognet
348144761Scognetstatic __inline void
349144761Scognetatomic_set_32(volatile uint32_t *address, uint32_t setmask)
350144761Scognet{
351175982Sraj	int start, ras_start = ARM_RAS_START;
352144761Scognet
353144761Scognet	__asm __volatile("1:\n"
354174170Scognet	    "adr	%1, 1b\n"
355174170Scognet	    "str	%1, [%0]\n"
356144761Scognet	    "adr	%1, 2f\n"
357175982Sraj	    "str	%1, [%0, #4]\n"
358155355Scognet	    "ldr	%1, [%2]\n"
359144761Scognet	    "orr	%1, %1, %3\n"
360155355Scognet	    "str	%1, [%2]\n"
361144761Scognet	    "2:\n"
362146591Scognet	    "mov	%1, #0\n"
363146591Scognet	    "str	%1, [%0]\n"
364174170Scognet	    "mov	%1, #0xffffffff\n"
365175982Sraj	    "str	%1, [%0, #4]\n"
366146591Scognet
367175982Sraj	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
368155391Scognet	    : : "memory");
369144761Scognet}
370144761Scognet
371144761Scognetstatic __inline void
372285687Sandrewatomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
373144761Scognet{
374175982Sraj	int start, ras_start = ARM_RAS_START;
375144761Scognet
376144761Scognet	__asm __volatile("1:\n"
377174170Scognet	    "adr	%1, 1b\n"
378174170Scognet	    "str	%1, [%0]\n"
379144761Scognet	    "adr	%1, 2f\n"
380175982Sraj	    "str	%1, [%0, #4]\n"
381155355Scognet	    "ldr	%1, [%2]\n"
382285687Sandrew	    "sub	%1, %1, %3\n"
383155355Scognet	    "str	%1, [%2]\n"
384144761Scognet	    "2:\n"
385146591Scognet	    "mov	%1, #0\n"
386146591Scognet	    "str	%1, [%0]\n"
387174170Scognet	    "mov	%1, #0xffffffff\n"
388175982Sraj	    "str	%1, [%0, #4]\n"
389144761Scognet
390285687Sandrew	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
391155391Scognet	    : : "memory");
392150627Sjhb}
393150627Sjhb
394144761Scognet#endif /* _KERNEL */
395144761Scognet
396239268Sgonzostatic __inline uint32_t
397239268Sgonzoatomic_readandclear_32(volatile u_int32_t *p)
398239268Sgonzo{
399239268Sgonzo
400239268Sgonzo	return (__swp(0, p));
401239268Sgonzo}
402239268Sgonzo
403291426Smmelstatic __inline uint32_t
404291426Smmelatomic_swap_32(volatile u_int32_t *p, u_int32_t v)
405291426Smmel{
406291426Smmel
407291426Smmel	return (__swp(v, p));
408291426Smmel}
409291426Smmel
410315371Smjg#define atomic_fcmpset_rel_32	atomic_fcmpset_32
411315371Smjg#define atomic_fcmpset_acq_32	atomic_fcmpset_32
412315371Smjg#define atomic_fcmpset_rel_64	atomic_fcmpset_64
413315371Smjg#define atomic_fcmpset_acq_64	atomic_fcmpset_64
414315371Smjg#define atomic_fcmpset_acq_long	atomic_fcmpset_long
415315371Smjg#define atomic_fcmpset_rel_long	atomic_fcmpset_long
416239268Sgonzo#define atomic_cmpset_rel_32	atomic_cmpset_32
417239268Sgonzo#define atomic_cmpset_acq_32	atomic_cmpset_32
418300144Sandrew#define atomic_cmpset_rel_64	atomic_cmpset_64
419300144Sandrew#define atomic_cmpset_acq_64	atomic_cmpset_64
420239268Sgonzo#define atomic_set_rel_32	atomic_set_32
421239268Sgonzo#define atomic_set_acq_32	atomic_set_32
422239268Sgonzo#define atomic_clear_rel_32	atomic_clear_32
423239268Sgonzo#define atomic_clear_acq_32	atomic_clear_32
424239268Sgonzo#define atomic_add_rel_32	atomic_add_32
425239268Sgonzo#define atomic_add_acq_32	atomic_add_32
426239268Sgonzo#define atomic_subtract_rel_32	atomic_subtract_32
427239268Sgonzo#define atomic_subtract_acq_32	atomic_subtract_32
428239268Sgonzo#define atomic_store_rel_32	atomic_store_32
429239268Sgonzo#define atomic_store_rel_long	atomic_store_long
430239268Sgonzo#define atomic_load_acq_32	atomic_load_32
431239268Sgonzo#define atomic_load_acq_long	atomic_load_long
432245475Scognet#define atomic_add_acq_long		atomic_add_long
433245475Scognet#define atomic_add_rel_long		atomic_add_long
434245475Scognet#define atomic_subtract_acq_long	atomic_subtract_long
435245475Scognet#define atomic_subtract_rel_long	atomic_subtract_long
436245475Scognet#define atomic_clear_acq_long		atomic_clear_long
437245475Scognet#define atomic_clear_rel_long		atomic_clear_long
438245475Scognet#define atomic_set_acq_long		atomic_set_long
439245475Scognet#define atomic_set_rel_long		atomic_set_long
440245475Scognet#define atomic_cmpset_acq_long		atomic_cmpset_long
441245475Scognet#define atomic_cmpset_rel_long		atomic_cmpset_long
442245475Scognet#define atomic_load_acq_long		atomic_load_long
443239268Sgonzo#undef __with_interrupts_disabled
444239268Sgonzo
445239268Sgonzostatic __inline void
446239268Sgonzoatomic_add_long(volatile u_long *p, u_long v)
447239268Sgonzo{
448239268Sgonzo
449239268Sgonzo	atomic_add_32((volatile uint32_t *)p, v);
450239268Sgonzo}
451239268Sgonzo
452239268Sgonzostatic __inline void
453239268Sgonzoatomic_clear_long(volatile u_long *p, u_long v)
454239268Sgonzo{
455239268Sgonzo
456239268Sgonzo	atomic_clear_32((volatile uint32_t *)p, v);
457239268Sgonzo}
458239268Sgonzo
459144761Scognetstatic __inline int
460239268Sgonzoatomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
461239268Sgonzo{
462239268Sgonzo
463239268Sgonzo	return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
464239268Sgonzo}
465239268Sgonzo
466315874Smjg#ifdef _KERNEL
467315874Smjg/* atomic_fcmpset_32 is only defined for the kernel */
468239268Sgonzostatic __inline u_long
469315371Smjgatomic_fcmpset_long(volatile u_long *dst, u_long *old, u_long newe)
470315371Smjg{
471315371Smjg
472315371Smjg	return (atomic_fcmpset_32((volatile uint32_t *)dst,
473315371Smjg	    (uint32_t *)old, newe));
474315371Smjg}
475315874Smjg#endif
476315371Smjg
477315371Smjgstatic __inline u_long
478239268Sgonzoatomic_fetchadd_long(volatile u_long *p, u_long v)
479239268Sgonzo{
480239268Sgonzo
481239268Sgonzo	return (atomic_fetchadd_32((volatile uint32_t *)p, v));
482239268Sgonzo}
483239268Sgonzo
484239268Sgonzostatic __inline void
485239268Sgonzoatomic_readandclear_long(volatile u_long *p)
486239268Sgonzo{
487239268Sgonzo
488239268Sgonzo	atomic_readandclear_32((volatile uint32_t *)p);
489239268Sgonzo}
490239268Sgonzo
491239268Sgonzostatic __inline void
492239268Sgonzoatomic_set_long(volatile u_long *p, u_long v)
493239268Sgonzo{
494239268Sgonzo
495239268Sgonzo	atomic_set_32((volatile uint32_t *)p, v);
496239268Sgonzo}
497239268Sgonzo
498239268Sgonzostatic __inline void
499239268Sgonzoatomic_subtract_long(volatile u_long *p, u_long v)
500239268Sgonzo{
501239268Sgonzo
502239268Sgonzo	atomic_subtract_32((volatile uint32_t *)p, v);
503239268Sgonzo}
504239268Sgonzo
505288491Skib/*
506288491Skib * ARMv5 does not support SMP.  For both kernel and user modes, only a
507288491Skib * compiler barrier is needed for fences, since CPU is always
508288491Skib * self-consistent.
509288491Skib */
510288491Skibstatic __inline void
511288491Skibatomic_thread_fence_acq(void)
512288491Skib{
513288491Skib
514288491Skib	__compiler_membar();
515288491Skib}
516288491Skib
517288491Skibstatic __inline void
518288491Skibatomic_thread_fence_rel(void)
519288491Skib{
520288491Skib
521288491Skib	__compiler_membar();
522288491Skib}
523288491Skib
524288491Skibstatic __inline void
525288491Skibatomic_thread_fence_acq_rel(void)
526288491Skib{
527288491Skib
528288491Skib	__compiler_membar();
529288491Skib}
530288491Skib
531288491Skibstatic __inline void
532288491Skibatomic_thread_fence_seq_cst(void)
533288491Skib{
534288491Skib
535288491Skib	__compiler_membar();
536288491Skib}
537288491Skib
538129198Scognet#endif /* _MACHINE_ATOMIC_H_ */
539