1178172Simp/*-
2178172Simp * Copyright (c) 1998 Doug Rabson
3178172Simp * All rights reserved.
4178172Simp *
5178172Simp * Redistribution and use in source and binary forms, with or without
6178172Simp * modification, are permitted provided that the following conditions
7178172Simp * are met:
8178172Simp * 1. Redistributions of source code must retain the above copyright
9178172Simp *    notice, this list of conditions and the following disclaimer.
10178172Simp * 2. Redistributions in binary form must reproduce the above copyright
11178172Simp *    notice, this list of conditions and the following disclaimer in the
12178172Simp *    documentation and/or other materials provided with the distribution.
13178172Simp *
14178172Simp * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15178172Simp * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16178172Simp * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17178172Simp * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18178172Simp * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19178172Simp * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20178172Simp * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21178172Simp * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22178172Simp * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23178172Simp * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24178172Simp * SUCH DAMAGE.
25178172Simp *
26178172Simp *	from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
27178172Simp * $FreeBSD$
28178172Simp */
29178172Simp
30178172Simp#ifndef _MACHINE_ATOMIC_H_
31178172Simp#define	_MACHINE_ATOMIC_H_
32178172Simp
33178172Simp#ifndef _SYS_CDEFS_H_
34178172Simp#error this file needs sys/cdefs.h as a prerequisite
35178172Simp#endif
36178172Simp
37202031Simp/*
38202031Simp * Note: All the 64-bit atomic operations are only atomic when running
39202031Simp * in 64-bit mode.  It is assumed that code compiled for n32 and n64
40202031Simp * fits into this definition and no further safeties are needed.
41202031Simp *
42202031Simp * It is also assumed that the add, subtract and other arithmetic is
43202031Simp * done on numbers not pointers.  The special rules for n32 pointers
44202031Simp * do not have atomic operations defined for them, but generally shouldn't
45202031Simp * need atomic operations.
46202031Simp */
47252965Simp#ifndef __MIPS_PLATFORM_SYNC_NOPS
48252965Simp#define __MIPS_PLATFORM_SYNC_NOPS ""
49252965Simp#endif
50202031Simp
51178172Simpstatic __inline  void
52178172Simpmips_sync(void)
53178172Simp{
54252965Simp	__asm __volatile (".set noreorder\n"
55252965Simp			"\tsync\n"
56252965Simp			__MIPS_PLATFORM_SYNC_NOPS
57178172Simp			".set reorder\n"
58178172Simp			: : : "memory");
59178172Simp}
60178172Simp
61185427Simp#define mb()	mips_sync()
62185427Simp#define wmb()	mips_sync()
63185427Simp#define rmb()	mips_sync()
64185427Simp
65178172Simp/*
66178172Simp * Various simple arithmetic on memory which is atomic in the presence
67178172Simp * of interrupts and SMP safe.
68178172Simp */
69178172Simp
70178172Simpvoid atomic_set_8(__volatile uint8_t *, uint8_t);
71178172Simpvoid atomic_clear_8(__volatile uint8_t *, uint8_t);
72178172Simpvoid atomic_add_8(__volatile uint8_t *, uint8_t);
73178172Simpvoid atomic_subtract_8(__volatile uint8_t *, uint8_t);
74178172Simp
75178172Simpvoid atomic_set_16(__volatile uint16_t *, uint16_t);
76178172Simpvoid atomic_clear_16(__volatile uint16_t *, uint16_t);
77178172Simpvoid atomic_add_16(__volatile uint16_t *, uint16_t);
78178172Simpvoid atomic_subtract_16(__volatile uint16_t *, uint16_t);
79178172Simp
80178172Simpstatic __inline void
81178172Simpatomic_set_32(__volatile uint32_t *p, uint32_t v)
82178172Simp{
83178172Simp	uint32_t temp;
84178172Simp
85178172Simp	__asm __volatile (
86178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
87178172Simp		"or	%0, %2, %0\n\t"		/* calculate new value */
88178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
89178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
90178172Simp		: "=&r" (temp), "=m" (*p)
91178172Simp		: "r" (v), "m" (*p)
92178172Simp		: "memory");
93178172Simp
94178172Simp}
95178172Simp
96178172Simpstatic __inline void
97178172Simpatomic_clear_32(__volatile uint32_t *p, uint32_t v)
98178172Simp{
99178172Simp	uint32_t temp;
100178172Simp	v = ~v;
101178172Simp
102178172Simp	__asm __volatile (
103178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
104178172Simp		"and	%0, %2, %0\n\t"		/* calculate new value */
105178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
106178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
107178172Simp		: "=&r" (temp), "=m" (*p)
108178172Simp		: "r" (v), "m" (*p)
109178172Simp		: "memory");
110178172Simp}
111178172Simp
112178172Simpstatic __inline void
113178172Simpatomic_add_32(__volatile uint32_t *p, uint32_t v)
114178172Simp{
115178172Simp	uint32_t temp;
116178172Simp
117178172Simp	__asm __volatile (
118178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
119178172Simp		"addu	%0, %2, %0\n\t"		/* calculate new value */
120178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
121178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
122178172Simp		: "=&r" (temp), "=m" (*p)
123178172Simp		: "r" (v), "m" (*p)
124178172Simp		: "memory");
125178172Simp}
126178172Simp
127178172Simpstatic __inline void
128178172Simpatomic_subtract_32(__volatile uint32_t *p, uint32_t v)
129178172Simp{
130178172Simp	uint32_t temp;
131178172Simp
132178172Simp	__asm __volatile (
133178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
134178172Simp		"subu	%0, %2\n\t"		/* calculate new value */
135178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
136202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
137178172Simp		: "=&r" (temp), "=m" (*p)
138178172Simp		: "r" (v), "m" (*p)
139178172Simp		: "memory");
140178172Simp}
141178172Simp
142178172Simpstatic __inline uint32_t
143178172Simpatomic_readandclear_32(__volatile uint32_t *addr)
144178172Simp{
145178172Simp	uint32_t result,temp;
146178172Simp
147178172Simp	__asm __volatile (
148178172Simp		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
149178172Simp		"li	 %1,0\n\t"		/* value to store */
150178172Simp		"sc	 %1,%2\n\t"	/* attempt to store */
151178172Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
152178172Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
153178172Simp		: "m" (*addr)
154178172Simp		: "memory");
155178172Simp
156178172Simp	return result;
157178172Simp}
158178172Simp
159178172Simpstatic __inline uint32_t
160178172Simpatomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
161178172Simp{
162178172Simp	uint32_t result,temp;
163178172Simp
164178172Simp	__asm __volatile (
165178172Simp		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
166178172Simp		"or      %1,$0,%4\n\t"
167178172Simp		"sc	 %1,%2\n\t"	/* attempt to store */
168178172Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
169178172Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
170178172Simp		: "m" (*addr), "r" (value)
171178172Simp		: "memory");
172178172Simp
173178172Simp	return result;
174178172Simp}
175178172Simp
176202031Simp#if defined(__mips_n64) || defined(__mips_n32)
177202031Simpstatic __inline void
178202031Simpatomic_set_64(__volatile uint64_t *p, uint64_t v)
179202031Simp{
180202031Simp	uint64_t temp;
181202031Simp
182202031Simp	__asm __volatile (
183202031Simp		"1:\n\t"
184202031Simp		"lld	%0, %3\n\t"		/* load old value */
185202031Simp		"or	%0, %2, %0\n\t"		/* calculate new value */
186202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
187202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
188202031Simp		: "=&r" (temp), "=m" (*p)
189202031Simp		: "r" (v), "m" (*p)
190202031Simp		: "memory");
191202031Simp
192202031Simp}
193202031Simp
194202031Simpstatic __inline void
195202031Simpatomic_clear_64(__volatile uint64_t *p, uint64_t v)
196202031Simp{
197202031Simp	uint64_t temp;
198202031Simp	v = ~v;
199202031Simp
200202031Simp	__asm __volatile (
201202031Simp		"1:\n\t"
202202031Simp		"lld	%0, %3\n\t"		/* load old value */
203202031Simp		"and	%0, %2, %0\n\t"		/* calculate new value */
204202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
205202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
206202031Simp		: "=&r" (temp), "=m" (*p)
207202031Simp		: "r" (v), "m" (*p)
208202031Simp		: "memory");
209202031Simp}
210202031Simp
211202031Simpstatic __inline void
212202031Simpatomic_add_64(__volatile uint64_t *p, uint64_t v)
213202031Simp{
214202031Simp	uint64_t temp;
215202031Simp
216202031Simp	__asm __volatile (
217202031Simp		"1:\n\t"
218202031Simp		"lld	%0, %3\n\t"		/* load old value */
219202031Simp		"daddu	%0, %2, %0\n\t"		/* calculate new value */
220202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
221202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
222202031Simp		: "=&r" (temp), "=m" (*p)
223202031Simp		: "r" (v), "m" (*p)
224202031Simp		: "memory");
225202031Simp}
226202031Simp
227202031Simpstatic __inline void
228202031Simpatomic_subtract_64(__volatile uint64_t *p, uint64_t v)
229202031Simp{
230202031Simp	uint64_t temp;
231202031Simp
232202031Simp	__asm __volatile (
233202031Simp		"1:\n\t"
234202031Simp		"lld	%0, %3\n\t"		/* load old value */
235202031Simp		"dsubu	%0, %2\n\t"		/* calculate new value */
236202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
237202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
238202031Simp		: "=&r" (temp), "=m" (*p)
239202031Simp		: "r" (v), "m" (*p)
240202031Simp		: "memory");
241202031Simp}
242202031Simp
243202031Simpstatic __inline uint64_t
244202031Simpatomic_readandclear_64(__volatile uint64_t *addr)
245202031Simp{
246202031Simp	uint64_t result,temp;
247202031Simp
248202031Simp	__asm __volatile (
249202031Simp		"1:\n\t"
250202031Simp		"lld	 %0, %3\n\t"		/* load old value */
251202031Simp		"li	 %1, 0\n\t"		/* value to store */
252202031Simp		"scd	 %1, %2\n\t"		/* attempt to store */
253202031Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
254202031Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
255202031Simp		: "m" (*addr)
256202031Simp		: "memory");
257202031Simp
258202031Simp	return result;
259202031Simp}
260202031Simp
261202031Simpstatic __inline uint64_t
262202031Simpatomic_readandset_64(__volatile uint64_t *addr, uint64_t value)
263202031Simp{
264202031Simp	uint64_t result,temp;
265202031Simp
266202031Simp	__asm __volatile (
267202031Simp		"1:\n\t"
268202031Simp		"lld	 %0,%3\n\t"		/* Load old value*/
269202031Simp		"or      %1,$0,%4\n\t"
270202031Simp		"scd	 %1,%2\n\t"		/* attempt to store */
271202031Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
272202031Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
273202031Simp		: "m" (*addr), "r" (value)
274202031Simp		: "memory");
275202031Simp
276202031Simp	return result;
277202031Simp}
278202031Simp#endif
279202031Simp
280178172Simp#define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
281178172Simpstatic __inline  void							\
282178172Simpatomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
283178172Simp{									\
284178172Simp	atomic_##NAME##_##WIDTH(p, v);					\
285178172Simp	mips_sync(); 							\
286178172Simp}									\
287178172Simp									\
288178172Simpstatic __inline  void							\
289178172Simpatomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
290178172Simp{									\
291178172Simp	mips_sync();							\
292178172Simp	atomic_##NAME##_##WIDTH(p, v);					\
293178172Simp}
294178172Simp
295178172Simp/* Variants of simple arithmetic with memory barriers. */
296178172SimpATOMIC_ACQ_REL(set, 8)
297178172SimpATOMIC_ACQ_REL(clear, 8)
298178172SimpATOMIC_ACQ_REL(add, 8)
299178172SimpATOMIC_ACQ_REL(subtract, 8)
300178172SimpATOMIC_ACQ_REL(set, 16)
301178172SimpATOMIC_ACQ_REL(clear, 16)
302178172SimpATOMIC_ACQ_REL(add, 16)
303178172SimpATOMIC_ACQ_REL(subtract, 16)
304178172SimpATOMIC_ACQ_REL(set, 32)
305178172SimpATOMIC_ACQ_REL(clear, 32)
306178172SimpATOMIC_ACQ_REL(add, 32)
307178172SimpATOMIC_ACQ_REL(subtract, 32)
308202031Simp#if defined(__mips_n64) || defined(__mips_n32)
309178172SimpATOMIC_ACQ_REL(set, 64)
310178172SimpATOMIC_ACQ_REL(clear, 64)
311178172SimpATOMIC_ACQ_REL(add, 64)
312178172SimpATOMIC_ACQ_REL(subtract, 64)
313178172Simp#endif
314178172Simp
315178172Simp#undef ATOMIC_ACQ_REL
316178172Simp
317178172Simp/*
318178172Simp * We assume that a = b will do atomic loads and stores.
319178172Simp */
320178172Simp#define	ATOMIC_STORE_LOAD(WIDTH)			\
321178172Simpstatic __inline  uint##WIDTH##_t			\
322178172Simpatomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p)	\
323178172Simp{							\
324178172Simp	uint##WIDTH##_t v;				\
325178172Simp							\
326178172Simp	v = *p;						\
327178172Simp	mips_sync();					\
328178172Simp	return (v);					\
329178172Simp}							\
330178172Simp							\
331178172Simpstatic __inline  void					\
332178172Simpatomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
333178172Simp{							\
334178172Simp	mips_sync();					\
335178172Simp	*p = v;						\
336178172Simp}
337178172Simp
338178172SimpATOMIC_STORE_LOAD(32)
339178172SimpATOMIC_STORE_LOAD(64)
340202031Simp#if !defined(__mips_n64) && !defined(__mips_n32)
341202031Simpvoid atomic_store_64(__volatile uint64_t *, uint64_t *);
342202031Simpvoid atomic_load_64(__volatile uint64_t *, uint64_t *);
343202031Simp#else
344202031Simpstatic __inline void
345202031Simpatomic_store_64(__volatile uint64_t *p, uint64_t *v)
346202031Simp{
347202031Simp	*p = *v;
348202031Simp}
349178172Simp
350202031Simpstatic __inline void
351202031Simpatomic_load_64(__volatile uint64_t *p, uint64_t *v)
352202031Simp{
353202031Simp	*v = *p;
354202031Simp}
355202031Simp#endif
356202031Simp
357178172Simp#undef ATOMIC_STORE_LOAD
358178172Simp
359178172Simp/*
360178172Simp * Atomically compare the value stored at *p with cmpval and if the
361178172Simp * two values are equal, update the value of *p with newval. Returns
362178172Simp * zero if the compare failed, nonzero otherwise.
363178172Simp */
364178172Simpstatic __inline uint32_t
365178172Simpatomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
366178172Simp{
367178172Simp	uint32_t ret;
368178172Simp
369178172Simp	__asm __volatile (
370178172Simp		"1:\tll	%0, %4\n\t"		/* load old value */
371178172Simp		"bne %0, %2, 2f\n\t"		/* compare */
372178172Simp		"move %0, %3\n\t"		/* value to store */
373178172Simp		"sc %0, %1\n\t"			/* attempt to store */
374178172Simp		"beqz %0, 1b\n\t"		/* if it failed, spin */
375178172Simp		"j 3f\n\t"
376178172Simp		"2:\n\t"
377178172Simp		"li	%0, 0\n\t"
378178172Simp		"3:\n"
379178172Simp		: "=&r" (ret), "=m" (*p)
380178172Simp		: "r" (cmpval), "r" (newval), "m" (*p)
381178172Simp		: "memory");
382178172Simp
383178172Simp	return ret;
384178172Simp}
385178172Simp
386178172Simp/*
387178172Simp * Atomically compare the value stored at *p with cmpval and if the
388178172Simp * two values are equal, update the value of *p with newval. Returns
389178172Simp * zero if the compare failed, nonzero otherwise.
390178172Simp */
391178172Simpstatic __inline uint32_t
392178172Simpatomic_cmpset_acq_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
393178172Simp{
394178172Simp	int retval;
395178172Simp
396178172Simp	retval = atomic_cmpset_32(p, cmpval, newval);
397178172Simp	mips_sync();
398178172Simp	return (retval);
399178172Simp}
400178172Simp
401178172Simpstatic __inline uint32_t
402178172Simpatomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
403178172Simp{
404178172Simp	mips_sync();
405178172Simp	return (atomic_cmpset_32(p, cmpval, newval));
406178172Simp}
407178172Simp
408178172Simp/*
409178172Simp * Atomically add the value of v to the integer pointed to by p and return
410178172Simp * the previous value of *p.
411178172Simp */
412178172Simpstatic __inline uint32_t
413178172Simpatomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
414178172Simp{
415178172Simp	uint32_t value, temp;
416178172Simp
417178172Simp	__asm __volatile (
418178172Simp		"1:\tll %0, %1\n\t"		/* load old value */
419178172Simp		"addu %2, %3, %0\n\t"		/* calculate new value */
420178172Simp		"sc %2, %1\n\t"			/* attempt to store */
421178172Simp		"beqz %2, 1b\n\t"		/* spin if failed */
422202031Simp		: "=&r" (value), "=m" (*p), "=&r" (temp)
423178172Simp		: "r" (v), "m" (*p));
424178172Simp	return (value);
425178172Simp}
426178172Simp
427202031Simp#if defined(__mips_n64) || defined(__mips_n32)
428202031Simp/*
429202031Simp * Atomically compare the value stored at *p with cmpval and if the
430202031Simp * two values are equal, update the value of *p with newval. Returns
431202031Simp * zero if the compare failed, nonzero otherwise.
432202031Simp */
433202031Simpstatic __inline uint64_t
434202031Simpatomic_cmpset_64(__volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
435202031Simp{
436202031Simp	uint64_t ret;
437202031Simp
438202031Simp	__asm __volatile (
439202031Simp		"1:\n\t"
440202031Simp		"lld	%0, %4\n\t"		/* load old value */
441202031Simp		"bne	%0, %2, 2f\n\t"		/* compare */
442202031Simp		"move	%0, %3\n\t"		/* value to store */
443202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
444202031Simp		"beqz	%0, 1b\n\t"		/* if it failed, spin */
445202031Simp		"j	3f\n\t"
446202031Simp		"2:\n\t"
447202031Simp		"li	%0, 0\n\t"
448202031Simp		"3:\n"
449202031Simp		: "=&r" (ret), "=m" (*p)
450202031Simp		: "r" (cmpval), "r" (newval), "m" (*p)
451202031Simp		: "memory");
452202031Simp
453202031Simp	return ret;
454202031Simp}
455202031Simp
456202031Simp/*
457202031Simp * Atomically compare the value stored at *p with cmpval and if the
458202031Simp * two values are equal, update the value of *p with newval. Returns
459202031Simp * zero if the compare failed, nonzero otherwise.
460202031Simp */
461202031Simpstatic __inline uint64_t
462202031Simpatomic_cmpset_acq_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
463202031Simp{
464202031Simp	int retval;
465202031Simp
466202031Simp	retval = atomic_cmpset_64(p, cmpval, newval);
467202031Simp	mips_sync();
468202031Simp	return (retval);
469202031Simp}
470202031Simp
471202031Simpstatic __inline uint64_t
472202031Simpatomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
473202031Simp{
474202031Simp	mips_sync();
475202031Simp	return (atomic_cmpset_64(p, cmpval, newval));
476202031Simp}
477202031Simp
478202031Simp/*
479202031Simp * Atomically add the value of v to the integer pointed to by p and return
480202031Simp * the previous value of *p.
481202031Simp */
482202031Simpstatic __inline uint64_t
483202031Simpatomic_fetchadd_64(__volatile uint64_t *p, uint64_t v)
484202031Simp{
485202031Simp	uint64_t value, temp;
486202031Simp
487202031Simp	__asm __volatile (
488202031Simp		"1:\n\t"
489202031Simp		"lld	%0, %1\n\t"		/* load old value */
490202031Simp		"daddu	%2, %3, %0\n\t"		/* calculate new value */
491202031Simp		"scd	%2, %1\n\t"		/* attempt to store */
492202031Simp		"beqz	%2, 1b\n\t"		/* spin if failed */
493202031Simp		: "=&r" (value), "=m" (*p), "=&r" (temp)
494202031Simp		: "r" (v), "m" (*p));
495202031Simp	return (value);
496202031Simp}
497202031Simp#endif
498202031Simp
499178172Simp/* Operations on chars. */
500178172Simp#define	atomic_set_char		atomic_set_8
501178172Simp#define	atomic_set_acq_char	atomic_set_acq_8
502178172Simp#define	atomic_set_rel_char	atomic_set_rel_8
503178172Simp#define	atomic_clear_char	atomic_clear_8
504178172Simp#define	atomic_clear_acq_char	atomic_clear_acq_8
505178172Simp#define	atomic_clear_rel_char	atomic_clear_rel_8
506178172Simp#define	atomic_add_char		atomic_add_8
507178172Simp#define	atomic_add_acq_char	atomic_add_acq_8
508178172Simp#define	atomic_add_rel_char	atomic_add_rel_8
509178172Simp#define	atomic_subtract_char	atomic_subtract_8
510178172Simp#define	atomic_subtract_acq_char	atomic_subtract_acq_8
511178172Simp#define	atomic_subtract_rel_char	atomic_subtract_rel_8
512178172Simp
513178172Simp/* Operations on shorts. */
514178172Simp#define	atomic_set_short	atomic_set_16
515178172Simp#define	atomic_set_acq_short	atomic_set_acq_16
516178172Simp#define	atomic_set_rel_short	atomic_set_rel_16
517178172Simp#define	atomic_clear_short	atomic_clear_16
518178172Simp#define	atomic_clear_acq_short	atomic_clear_acq_16
519178172Simp#define	atomic_clear_rel_short	atomic_clear_rel_16
520178172Simp#define	atomic_add_short	atomic_add_16
521178172Simp#define	atomic_add_acq_short	atomic_add_acq_16
522178172Simp#define	atomic_add_rel_short	atomic_add_rel_16
523178172Simp#define	atomic_subtract_short	atomic_subtract_16
524178172Simp#define	atomic_subtract_acq_short	atomic_subtract_acq_16
525178172Simp#define	atomic_subtract_rel_short	atomic_subtract_rel_16
526178172Simp
527178172Simp/* Operations on ints. */
528178172Simp#define	atomic_set_int		atomic_set_32
529178172Simp#define	atomic_set_acq_int	atomic_set_acq_32
530178172Simp#define	atomic_set_rel_int	atomic_set_rel_32
531178172Simp#define	atomic_clear_int	atomic_clear_32
532178172Simp#define	atomic_clear_acq_int	atomic_clear_acq_32
533178172Simp#define	atomic_clear_rel_int	atomic_clear_rel_32
534178172Simp#define	atomic_add_int		atomic_add_32
535178172Simp#define	atomic_add_acq_int	atomic_add_acq_32
536178172Simp#define	atomic_add_rel_int	atomic_add_rel_32
537178172Simp#define	atomic_subtract_int	atomic_subtract_32
538178172Simp#define	atomic_subtract_acq_int	atomic_subtract_acq_32
539178172Simp#define	atomic_subtract_rel_int	atomic_subtract_rel_32
540178172Simp#define	atomic_cmpset_int	atomic_cmpset_32
541178172Simp#define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
542178172Simp#define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
543178172Simp#define	atomic_load_acq_int	atomic_load_acq_32
544178172Simp#define	atomic_store_rel_int	atomic_store_rel_32
545178172Simp#define	atomic_readandclear_int	atomic_readandclear_32
546178172Simp#define	atomic_readandset_int	atomic_readandset_32
547178172Simp#define	atomic_fetchadd_int	atomic_fetchadd_32
548178172Simp
549202031Simp/*
550202031Simp * I think the following is right, even for n32.  For n32 the pointers
551202031Simp * are still 32-bits, so we need to operate on them as 32-bit quantities,
552202031Simp * even though they are sign extended in operation.  For longs, there's
553202031Simp * no question because they are always 32-bits.
554202031Simp */
555202031Simp#ifdef __mips_n64
556178172Simp/* Operations on longs. */
557178172Simp#define	atomic_set_long		atomic_set_64
558178172Simp#define	atomic_set_acq_long	atomic_set_acq_64
559178172Simp#define	atomic_set_rel_long	atomic_set_rel_64
560178172Simp#define	atomic_clear_long	atomic_clear_64
561178172Simp#define	atomic_clear_acq_long	atomic_clear_acq_64
562178172Simp#define	atomic_clear_rel_long	atomic_clear_rel_64
563178172Simp#define	atomic_add_long		atomic_add_64
564178172Simp#define	atomic_add_acq_long	atomic_add_acq_64
565178172Simp#define	atomic_add_rel_long	atomic_add_rel_64
566178172Simp#define	atomic_subtract_long	atomic_subtract_64
567178172Simp#define	atomic_subtract_acq_long	atomic_subtract_acq_64
568178172Simp#define	atomic_subtract_rel_long	atomic_subtract_rel_64
569178172Simp#define	atomic_cmpset_long	atomic_cmpset_64
570178172Simp#define	atomic_cmpset_acq_long	atomic_cmpset_acq_64
571178172Simp#define	atomic_cmpset_rel_long	atomic_cmpset_rel_64
572178172Simp#define	atomic_load_acq_long	atomic_load_acq_64
573178172Simp#define	atomic_store_rel_long	atomic_store_rel_64
574178172Simp#define	atomic_fetchadd_long	atomic_fetchadd_64
575178172Simp#define	atomic_readandclear_long	atomic_readandclear_64
576178172Simp
577202031Simp#else /* !__mips_n64 */
578178172Simp
579178172Simp/* Operations on longs. */
580222234Sattilio#define	atomic_set_long(p, v)						\
581222234Sattilio	atomic_set_32((volatile u_int *)(p), (u_int)(v))
582222234Sattilio#define	atomic_set_acq_long(p, v)					\
583222234Sattilio	atomic_set_acq_32((volatile u_int *)(p), (u_int)(v))
584222234Sattilio#define	atomic_set_rel_long(p, v)					\
585222234Sattilio	atomic_set_rel_32((volatile u_int *)(p), (u_int)(v))
586222234Sattilio#define	atomic_clear_long(p, v)						\
587222234Sattilio	atomic_clear_32((volatile u_int *)(p), (u_int)(v))
588222234Sattilio#define	atomic_clear_acq_long(p, v)					\
589222234Sattilio	atomic_clear_acq_32((volatile u_int *)(p), (u_int)(v))
590222234Sattilio#define	atomic_clear_rel_long(p, v)					\
591222234Sattilio	atomic_clear_rel_32((volatile u_int *)(p), (u_int)(v))
592222234Sattilio#define	atomic_add_long(p, v)						\
593178172Simp	atomic_add_32((volatile u_int *)(p), (u_int)(v))
594222234Sattilio#define	atomic_add_acq_long(p, v)					\
595222234Sattilio	atomic_add_32((volatile u_int *)(p), (u_int)(v))
596222234Sattilio#define	atomic_add_rel_long(p, v)					\
597222234Sattilio	atomic_add_32((volatile u_int *)(p), (u_int)(v))
598222234Sattilio#define	atomic_subtract_long(p, v)					\
599178172Simp	atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
600222234Sattilio#define	atomic_subtract_acq_long(p, v)					\
601222234Sattilio	atomic_subtract_acq_32((volatile u_int *)(p), (u_int)(v))
602222234Sattilio#define	atomic_subtract_rel_long(p, v)					\
603222234Sattilio	atomic_subtract_rel_32((volatile u_int *)(p), (u_int)(v))
604222234Sattilio#define	atomic_cmpset_long(p, cmpval, newval)				\
605222234Sattilio	atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval),	\
606222234Sattilio	    (u_int)(newval))
607222234Sattilio#define	atomic_cmpset_acq_long(p, cmpval, newval)			\
608222234Sattilio	atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval),	\
609222234Sattilio	    (u_int)(newval))
610222234Sattilio#define	atomic_cmpset_rel_long(p, cmpval, newval)			\
611222234Sattilio	atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval),	\
612222234Sattilio	    (u_int)(newval))
613222234Sattilio#define	atomic_load_acq_long(p)						\
614222234Sattilio	(u_long)atomic_load_acq_32((volatile u_int *)(p))
615222234Sattilio#define	atomic_store_rel_long(p, v)					\
616222234Sattilio	atomic_store_rel_32((volatile u_int *)(p), (u_int)(v))
617222234Sattilio#define	atomic_fetchadd_long(p, v)					\
618178172Simp	atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
619222234Sattilio#define	atomic_readandclear_long(p)					\
620222234Sattilio	atomic_readandclear_32((volatile u_int *)(p))
621178172Simp
622202031Simp#endif /* __mips_n64 */
623202031Simp
624178172Simp/* Operations on pointers. */
625202031Simp#define	atomic_set_ptr		atomic_set_long
626202031Simp#define	atomic_set_acq_ptr	atomic_set_acq_long
627202031Simp#define	atomic_set_rel_ptr	atomic_set_rel_long
628202031Simp#define	atomic_clear_ptr	atomic_clear_long
629202031Simp#define	atomic_clear_acq_ptr	atomic_clear_acq_long
630202031Simp#define	atomic_clear_rel_ptr	atomic_clear_rel_long
631202031Simp#define	atomic_add_ptr		atomic_add_long
632202031Simp#define	atomic_add_acq_ptr	atomic_add_acq_long
633202031Simp#define	atomic_add_rel_ptr	atomic_add_rel_long
634202031Simp#define	atomic_subtract_ptr	atomic_subtract_long
635202031Simp#define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
636202031Simp#define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
637202031Simp#define	atomic_cmpset_ptr	atomic_cmpset_long
638202031Simp#define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
639202031Simp#define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
640202031Simp#define	atomic_load_acq_ptr	atomic_load_acq_long
641202031Simp#define	atomic_store_rel_ptr	atomic_store_rel_long
642202031Simp#define	atomic_readandclear_ptr	atomic_readandclear_long
643178172Simp
644178172Simp#endif /* ! _MACHINE_ATOMIC_H_ */
645