1178172Simp/*-
2178172Simp * Copyright (c) 1998 Doug Rabson
3178172Simp * All rights reserved.
4178172Simp *
5178172Simp * Redistribution and use in source and binary forms, with or without
6178172Simp * modification, are permitted provided that the following conditions
7178172Simp * are met:
8178172Simp * 1. Redistributions of source code must retain the above copyright
9178172Simp *    notice, this list of conditions and the following disclaimer.
10178172Simp * 2. Redistributions in binary form must reproduce the above copyright
11178172Simp *    notice, this list of conditions and the following disclaimer in the
12178172Simp *    documentation and/or other materials provided with the distribution.
13178172Simp *
14178172Simp * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15178172Simp * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16178172Simp * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17178172Simp * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18178172Simp * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19178172Simp * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20178172Simp * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21178172Simp * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22178172Simp * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23178172Simp * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24178172Simp * SUCH DAMAGE.
25178172Simp *
26178172Simp *	from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
27178172Simp * $FreeBSD: stable/11/sys/mips/include/atomic.h 327195 2017-12-26 10:07:17Z kib $
28178172Simp */
29178172Simp
30178172Simp#ifndef _MACHINE_ATOMIC_H_
31178172Simp#define	_MACHINE_ATOMIC_H_
32178172Simp
33178172Simp#ifndef _SYS_CDEFS_H_
34178172Simp#error this file needs sys/cdefs.h as a prerequisite
35178172Simp#endif
36178172Simp
37327195Skib#include <sys/atomic_common.h>
38327195Skib
39202031Simp/*
40202031Simp * Note: All the 64-bit atomic operations are only atomic when running
41202031Simp * in 64-bit mode.  It is assumed that code compiled for n32 and n64
42202031Simp * fits into this definition and no further safeties are needed.
43202031Simp *
44202031Simp * It is also assumed that the add, subtract and other arithmetic is
45202031Simp * done on numbers not pointers.  The special rules for n32 pointers
46202031Simp * do not have atomic operations defined for them, but generally shouldn't
47202031Simp * need atomic operations.
48202031Simp */
49252965Simp#ifndef __MIPS_PLATFORM_SYNC_NOPS
50252965Simp#define __MIPS_PLATFORM_SYNC_NOPS ""
51252965Simp#endif
52202031Simp
53178172Simpstatic __inline  void
54178172Simpmips_sync(void)
55178172Simp{
56252965Simp	__asm __volatile (".set noreorder\n"
57252965Simp			"\tsync\n"
58252965Simp			__MIPS_PLATFORM_SYNC_NOPS
59178172Simp			".set reorder\n"
60178172Simp			: : : "memory");
61178172Simp}
62178172Simp
63185427Simp#define mb()	mips_sync()
64185427Simp#define wmb()	mips_sync()
65185427Simp#define rmb()	mips_sync()
66185427Simp
67178172Simp/*
68178172Simp * Various simple arithmetic on memory which is atomic in the presence
69178172Simp * of interrupts and SMP safe.
70178172Simp */
71178172Simp
72178172Simpvoid atomic_set_8(__volatile uint8_t *, uint8_t);
73178172Simpvoid atomic_clear_8(__volatile uint8_t *, uint8_t);
74178172Simpvoid atomic_add_8(__volatile uint8_t *, uint8_t);
75178172Simpvoid atomic_subtract_8(__volatile uint8_t *, uint8_t);
76178172Simp
77178172Simpvoid atomic_set_16(__volatile uint16_t *, uint16_t);
78178172Simpvoid atomic_clear_16(__volatile uint16_t *, uint16_t);
79178172Simpvoid atomic_add_16(__volatile uint16_t *, uint16_t);
80178172Simpvoid atomic_subtract_16(__volatile uint16_t *, uint16_t);
81178172Simp
82178172Simpstatic __inline void
83178172Simpatomic_set_32(__volatile uint32_t *p, uint32_t v)
84178172Simp{
85178172Simp	uint32_t temp;
86178172Simp
87178172Simp	__asm __volatile (
88178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
89178172Simp		"or	%0, %2, %0\n\t"		/* calculate new value */
90178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
91178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
92178172Simp		: "=&r" (temp), "=m" (*p)
93178172Simp		: "r" (v), "m" (*p)
94178172Simp		: "memory");
95178172Simp
96178172Simp}
97178172Simp
98178172Simpstatic __inline void
99178172Simpatomic_clear_32(__volatile uint32_t *p, uint32_t v)
100178172Simp{
101178172Simp	uint32_t temp;
102178172Simp	v = ~v;
103178172Simp
104178172Simp	__asm __volatile (
105178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
106178172Simp		"and	%0, %2, %0\n\t"		/* calculate new value */
107178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
108178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
109178172Simp		: "=&r" (temp), "=m" (*p)
110178172Simp		: "r" (v), "m" (*p)
111178172Simp		: "memory");
112178172Simp}
113178172Simp
114178172Simpstatic __inline void
115178172Simpatomic_add_32(__volatile uint32_t *p, uint32_t v)
116178172Simp{
117178172Simp	uint32_t temp;
118178172Simp
119178172Simp	__asm __volatile (
120178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
121178172Simp		"addu	%0, %2, %0\n\t"		/* calculate new value */
122178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
123178172Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
124178172Simp		: "=&r" (temp), "=m" (*p)
125178172Simp		: "r" (v), "m" (*p)
126178172Simp		: "memory");
127178172Simp}
128178172Simp
129178172Simpstatic __inline void
130178172Simpatomic_subtract_32(__volatile uint32_t *p, uint32_t v)
131178172Simp{
132178172Simp	uint32_t temp;
133178172Simp
134178172Simp	__asm __volatile (
135178172Simp		"1:\tll	%0, %3\n\t"		/* load old value */
136178172Simp		"subu	%0, %2\n\t"		/* calculate new value */
137178172Simp		"sc	%0, %1\n\t"		/* attempt to store */
138202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
139178172Simp		: "=&r" (temp), "=m" (*p)
140178172Simp		: "r" (v), "m" (*p)
141178172Simp		: "memory");
142178172Simp}
143178172Simp
144178172Simpstatic __inline uint32_t
145178172Simpatomic_readandclear_32(__volatile uint32_t *addr)
146178172Simp{
147178172Simp	uint32_t result,temp;
148178172Simp
149178172Simp	__asm __volatile (
150178172Simp		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
151178172Simp		"li	 %1,0\n\t"		/* value to store */
152178172Simp		"sc	 %1,%2\n\t"	/* attempt to store */
153178172Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
154178172Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
155178172Simp		: "m" (*addr)
156178172Simp		: "memory");
157178172Simp
158178172Simp	return result;
159178172Simp}
160178172Simp
161178172Simpstatic __inline uint32_t
162178172Simpatomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
163178172Simp{
164178172Simp	uint32_t result,temp;
165178172Simp
166178172Simp	__asm __volatile (
167178172Simp		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
168178172Simp		"or      %1,$0,%4\n\t"
169178172Simp		"sc	 %1,%2\n\t"	/* attempt to store */
170178172Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
171178172Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
172178172Simp		: "m" (*addr), "r" (value)
173178172Simp		: "memory");
174178172Simp
175178172Simp	return result;
176178172Simp}
177178172Simp
178202031Simp#if defined(__mips_n64) || defined(__mips_n32)
179202031Simpstatic __inline void
180202031Simpatomic_set_64(__volatile uint64_t *p, uint64_t v)
181202031Simp{
182202031Simp	uint64_t temp;
183202031Simp
184202031Simp	__asm __volatile (
185202031Simp		"1:\n\t"
186202031Simp		"lld	%0, %3\n\t"		/* load old value */
187202031Simp		"or	%0, %2, %0\n\t"		/* calculate new value */
188202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
189202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
190202031Simp		: "=&r" (temp), "=m" (*p)
191202031Simp		: "r" (v), "m" (*p)
192202031Simp		: "memory");
193202031Simp
194202031Simp}
195202031Simp
196202031Simpstatic __inline void
197202031Simpatomic_clear_64(__volatile uint64_t *p, uint64_t v)
198202031Simp{
199202031Simp	uint64_t temp;
200202031Simp	v = ~v;
201202031Simp
202202031Simp	__asm __volatile (
203202031Simp		"1:\n\t"
204202031Simp		"lld	%0, %3\n\t"		/* load old value */
205202031Simp		"and	%0, %2, %0\n\t"		/* calculate new value */
206202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
207202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
208202031Simp		: "=&r" (temp), "=m" (*p)
209202031Simp		: "r" (v), "m" (*p)
210202031Simp		: "memory");
211202031Simp}
212202031Simp
213202031Simpstatic __inline void
214202031Simpatomic_add_64(__volatile uint64_t *p, uint64_t v)
215202031Simp{
216202031Simp	uint64_t temp;
217202031Simp
218202031Simp	__asm __volatile (
219202031Simp		"1:\n\t"
220202031Simp		"lld	%0, %3\n\t"		/* load old value */
221202031Simp		"daddu	%0, %2, %0\n\t"		/* calculate new value */
222202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
223202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
224202031Simp		: "=&r" (temp), "=m" (*p)
225202031Simp		: "r" (v), "m" (*p)
226202031Simp		: "memory");
227202031Simp}
228202031Simp
229202031Simpstatic __inline void
230202031Simpatomic_subtract_64(__volatile uint64_t *p, uint64_t v)
231202031Simp{
232202031Simp	uint64_t temp;
233202031Simp
234202031Simp	__asm __volatile (
235202031Simp		"1:\n\t"
236202031Simp		"lld	%0, %3\n\t"		/* load old value */
237202031Simp		"dsubu	%0, %2\n\t"		/* calculate new value */
238202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
239202031Simp		"beqz	%0, 1b\n\t"		/* spin if failed */
240202031Simp		: "=&r" (temp), "=m" (*p)
241202031Simp		: "r" (v), "m" (*p)
242202031Simp		: "memory");
243202031Simp}
244202031Simp
245202031Simpstatic __inline uint64_t
246202031Simpatomic_readandclear_64(__volatile uint64_t *addr)
247202031Simp{
248202031Simp	uint64_t result,temp;
249202031Simp
250202031Simp	__asm __volatile (
251202031Simp		"1:\n\t"
252202031Simp		"lld	 %0, %3\n\t"		/* load old value */
253202031Simp		"li	 %1, 0\n\t"		/* value to store */
254202031Simp		"scd	 %1, %2\n\t"		/* attempt to store */
255202031Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
256202031Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
257202031Simp		: "m" (*addr)
258202031Simp		: "memory");
259202031Simp
260202031Simp	return result;
261202031Simp}
262202031Simp
263202031Simpstatic __inline uint64_t
264202031Simpatomic_readandset_64(__volatile uint64_t *addr, uint64_t value)
265202031Simp{
266202031Simp	uint64_t result,temp;
267202031Simp
268202031Simp	__asm __volatile (
269202031Simp		"1:\n\t"
270202031Simp		"lld	 %0,%3\n\t"		/* Load old value*/
271202031Simp		"or      %1,$0,%4\n\t"
272202031Simp		"scd	 %1,%2\n\t"		/* attempt to store */
273202031Simp		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
274202031Simp		: "=&r"(result), "=&r"(temp), "=m" (*addr)
275202031Simp		: "m" (*addr), "r" (value)
276202031Simp		: "memory");
277202031Simp
278202031Simp	return result;
279202031Simp}
280202031Simp#endif
281202031Simp
282178172Simp#define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
283178172Simpstatic __inline  void							\
284178172Simpatomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
285178172Simp{									\
286178172Simp	atomic_##NAME##_##WIDTH(p, v);					\
287178172Simp	mips_sync(); 							\
288178172Simp}									\
289178172Simp									\
290178172Simpstatic __inline  void							\
291178172Simpatomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
292178172Simp{									\
293178172Simp	mips_sync();							\
294178172Simp	atomic_##NAME##_##WIDTH(p, v);					\
295178172Simp}
296178172Simp
297178172Simp/* Variants of simple arithmetic with memory barriers. */
298178172SimpATOMIC_ACQ_REL(set, 8)
299178172SimpATOMIC_ACQ_REL(clear, 8)
300178172SimpATOMIC_ACQ_REL(add, 8)
301178172SimpATOMIC_ACQ_REL(subtract, 8)
302178172SimpATOMIC_ACQ_REL(set, 16)
303178172SimpATOMIC_ACQ_REL(clear, 16)
304178172SimpATOMIC_ACQ_REL(add, 16)
305178172SimpATOMIC_ACQ_REL(subtract, 16)
306178172SimpATOMIC_ACQ_REL(set, 32)
307178172SimpATOMIC_ACQ_REL(clear, 32)
308178172SimpATOMIC_ACQ_REL(add, 32)
309178172SimpATOMIC_ACQ_REL(subtract, 32)
310202031Simp#if defined(__mips_n64) || defined(__mips_n32)
311178172SimpATOMIC_ACQ_REL(set, 64)
312178172SimpATOMIC_ACQ_REL(clear, 64)
313178172SimpATOMIC_ACQ_REL(add, 64)
314178172SimpATOMIC_ACQ_REL(subtract, 64)
315178172Simp#endif
316178172Simp
317178172Simp#undef ATOMIC_ACQ_REL
318178172Simp
319178172Simp/*
320178172Simp * We assume that a = b will do atomic loads and stores.
321178172Simp */
322178172Simp#define	ATOMIC_STORE_LOAD(WIDTH)			\
323178172Simpstatic __inline  uint##WIDTH##_t			\
324178172Simpatomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p)	\
325178172Simp{							\
326178172Simp	uint##WIDTH##_t v;				\
327178172Simp							\
328178172Simp	v = *p;						\
329178172Simp	mips_sync();					\
330178172Simp	return (v);					\
331178172Simp}							\
332178172Simp							\
333178172Simpstatic __inline  void					\
334178172Simpatomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
335178172Simp{							\
336178172Simp	mips_sync();					\
337178172Simp	*p = v;						\
338178172Simp}
339178172Simp
340178172SimpATOMIC_STORE_LOAD(32)
341178172SimpATOMIC_STORE_LOAD(64)
342178172Simp#undef ATOMIC_STORE_LOAD
343178172Simp
344178172Simp/*
345178172Simp * Atomically compare the value stored at *p with cmpval and if the
346178172Simp * two values are equal, update the value of *p with newval. Returns
347178172Simp * zero if the compare failed, nonzero otherwise.
348178172Simp */
349178172Simpstatic __inline uint32_t
350315371Smjgatomic_cmpset_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
351178172Simp{
352178172Simp	uint32_t ret;
353178172Simp
354178172Simp	__asm __volatile (
355178172Simp		"1:\tll	%0, %4\n\t"		/* load old value */
356178172Simp		"bne %0, %2, 2f\n\t"		/* compare */
357178172Simp		"move %0, %3\n\t"		/* value to store */
358178172Simp		"sc %0, %1\n\t"			/* attempt to store */
359178172Simp		"beqz %0, 1b\n\t"		/* if it failed, spin */
360178172Simp		"j 3f\n\t"
361178172Simp		"2:\n\t"
362178172Simp		"li	%0, 0\n\t"
363178172Simp		"3:\n"
364178172Simp		: "=&r" (ret), "=m" (*p)
365178172Simp		: "r" (cmpval), "r" (newval), "m" (*p)
366178172Simp		: "memory");
367178172Simp
368178172Simp	return ret;
369178172Simp}
370178172Simp
371178172Simp/*
372178172Simp * Atomically compare the value stored at *p with cmpval and if the
373178172Simp * two values are equal, update the value of *p with newval. Returns
374178172Simp * zero if the compare failed, nonzero otherwise.
375178172Simp */
376178172Simpstatic __inline uint32_t
377178172Simpatomic_cmpset_acq_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
378178172Simp{
379178172Simp	int retval;
380178172Simp
381178172Simp	retval = atomic_cmpset_32(p, cmpval, newval);
382178172Simp	mips_sync();
383178172Simp	return (retval);
384178172Simp}
385178172Simp
386178172Simpstatic __inline uint32_t
387178172Simpatomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
388178172Simp{
389178172Simp	mips_sync();
390178172Simp	return (atomic_cmpset_32(p, cmpval, newval));
391178172Simp}
392178172Simp
393315371Smjgstatic __inline uint32_t
394315371Smjgatomic_fcmpset_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
395315371Smjg{
396315371Smjg	uint32_t ret;
397315371Smjg
398315371Smjg	__asm __volatile (
399315371Smjg		"1:\n\t"
400315371Smjg		"ll	%0, %1\n\t"		/* load old value */
401315371Smjg		"bne	%0, %4, 2f\n\t"		/* compare */
402315371Smjg		"move	%0, %3\n\t"		/* value to store */
403315371Smjg		"sc	%0, %1\n\t"		/* attempt to store */
404315371Smjg		"beqz	%0, 1b\n\t"		/* if it failed, spin */
405315371Smjg		"j	3f\n\t"
406315371Smjg		"2:\n\t"
407315371Smjg		"sw	%0, %2\n\t"		/* save old value */
408315371Smjg		"li	%0, 0\n\t"
409315371Smjg		"3:\n"
410315371Smjg		: "=&r" (ret), "+m" (*p), "=m" (*cmpval)
411315371Smjg		: "r" (newval), "r" (*cmpval)
412315371Smjg		: "memory");
413315371Smjg	return ret;
414315371Smjg}
415315371Smjg
416315371Smjgstatic __inline uint32_t
417315371Smjgatomic_fcmpset_acq_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
418315371Smjg{
419315371Smjg	int retval;
420315371Smjg
421315371Smjg	retval = atomic_fcmpset_32(p, cmpval, newval);
422315371Smjg	mips_sync();
423315371Smjg	return (retval);
424315371Smjg}
425315371Smjg
426315371Smjgstatic __inline uint32_t
427315371Smjgatomic_fcmpset_rel_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
428315371Smjg{
429315371Smjg	mips_sync();
430315371Smjg	return (atomic_fcmpset_32(p, cmpval, newval));
431315371Smjg}
432315371Smjg
433178172Simp/*
434178172Simp * Atomically add the value of v to the integer pointed to by p and return
435178172Simp * the previous value of *p.
436178172Simp */
437178172Simpstatic __inline uint32_t
438178172Simpatomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
439178172Simp{
440178172Simp	uint32_t value, temp;
441178172Simp
442178172Simp	__asm __volatile (
443178172Simp		"1:\tll %0, %1\n\t"		/* load old value */
444178172Simp		"addu %2, %3, %0\n\t"		/* calculate new value */
445178172Simp		"sc %2, %1\n\t"			/* attempt to store */
446178172Simp		"beqz %2, 1b\n\t"		/* spin if failed */
447202031Simp		: "=&r" (value), "=m" (*p), "=&r" (temp)
448178172Simp		: "r" (v), "m" (*p));
449178172Simp	return (value);
450178172Simp}
451178172Simp
452202031Simp#if defined(__mips_n64) || defined(__mips_n32)
453202031Simp/*
454202031Simp * Atomically compare the value stored at *p with cmpval and if the
455202031Simp * two values are equal, update the value of *p with newval. Returns
456202031Simp * zero if the compare failed, nonzero otherwise.
457202031Simp */
458202031Simpstatic __inline uint64_t
459315371Smjgatomic_cmpset_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
460202031Simp{
461202031Simp	uint64_t ret;
462202031Simp
463202031Simp	__asm __volatile (
464202031Simp		"1:\n\t"
465202031Simp		"lld	%0, %4\n\t"		/* load old value */
466202031Simp		"bne	%0, %2, 2f\n\t"		/* compare */
467202031Simp		"move	%0, %3\n\t"		/* value to store */
468202031Simp		"scd	%0, %1\n\t"		/* attempt to store */
469202031Simp		"beqz	%0, 1b\n\t"		/* if it failed, spin */
470202031Simp		"j	3f\n\t"
471202031Simp		"2:\n\t"
472202031Simp		"li	%0, 0\n\t"
473202031Simp		"3:\n"
474202031Simp		: "=&r" (ret), "=m" (*p)
475202031Simp		: "r" (cmpval), "r" (newval), "m" (*p)
476202031Simp		: "memory");
477202031Simp
478202031Simp	return ret;
479202031Simp}
480202031Simp
481202031Simp/*
482202031Simp * Atomically compare the value stored at *p with cmpval and if the
483202031Simp * two values are equal, update the value of *p with newval. Returns
484202031Simp * zero if the compare failed, nonzero otherwise.
485202031Simp */
486202031Simpstatic __inline uint64_t
487202031Simpatomic_cmpset_acq_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
488202031Simp{
489202031Simp	int retval;
490202031Simp
491202031Simp	retval = atomic_cmpset_64(p, cmpval, newval);
492202031Simp	mips_sync();
493202031Simp	return (retval);
494202031Simp}
495202031Simp
496202031Simpstatic __inline uint64_t
497202031Simpatomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
498202031Simp{
499202031Simp	mips_sync();
500202031Simp	return (atomic_cmpset_64(p, cmpval, newval));
501202031Simp}
502202031Simp
503315371Smjgstatic __inline uint32_t
504315371Smjgatomic_fcmpset_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
505315371Smjg{
506315371Smjg        uint32_t ret;
507315371Smjg
508315371Smjg        __asm __volatile (
509315371Smjg                "1:\n\t"
510315371Smjg		"lld	%0, %1\n\t"		/* load old value */
511315371Smjg                "bne	%0, %4, 2f\n\t"		/* compare */
512315371Smjg                "move	%0, %3\n\t"		/* value to store */
513315371Smjg                "scd	%0, %1\n\t"		/* attempt to store */
514315371Smjg                "beqz	%0, 1b\n\t"		/* if it failed, spin */
515315371Smjg                "j	3f\n\t"
516315371Smjg                "2:\n\t"
517315371Smjg                "sd	%0, %2\n\t"		/* save old value */
518315371Smjg                "li	%0, 0\n\t"
519315371Smjg                "3:\n"
520315371Smjg                : "=&r" (ret), "+m" (*p), "=m" (*cmpval)
521315371Smjg                : "r" (newval), "r" (*cmpval)
522315371Smjg                : "memory");
523315371Smjg
524315371Smjg	return ret;
525315371Smjg}
526315371Smjg
527315371Smjgstatic __inline uint64_t
528315371Smjgatomic_fcmpset_acq_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
529315371Smjg{
530315371Smjg	int retval;
531315371Smjg
532315371Smjg	retval = atomic_fcmpset_64(p, cmpval, newval);
533315371Smjg	mips_sync();
534315371Smjg	return (retval);
535315371Smjg}
536315371Smjg
537315371Smjgstatic __inline uint64_t
538315371Smjgatomic_fcmpset_rel_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
539315371Smjg{
540315371Smjg	mips_sync();
541315371Smjg	return (atomic_fcmpset_64(p, cmpval, newval));
542315371Smjg}
543315371Smjg
544202031Simp/*
545202031Simp * Atomically add the value of v to the integer pointed to by p and return
546202031Simp * the previous value of *p.
547202031Simp */
548202031Simpstatic __inline uint64_t
549202031Simpatomic_fetchadd_64(__volatile uint64_t *p, uint64_t v)
550202031Simp{
551202031Simp	uint64_t value, temp;
552202031Simp
553202031Simp	__asm __volatile (
554202031Simp		"1:\n\t"
555202031Simp		"lld	%0, %1\n\t"		/* load old value */
556202031Simp		"daddu	%2, %3, %0\n\t"		/* calculate new value */
557202031Simp		"scd	%2, %1\n\t"		/* attempt to store */
558202031Simp		"beqz	%2, 1b\n\t"		/* spin if failed */
559202031Simp		: "=&r" (value), "=m" (*p), "=&r" (temp)
560202031Simp		: "r" (v), "m" (*p));
561202031Simp	return (value);
562202031Simp}
563202031Simp#endif
564202031Simp
565285283Skibstatic __inline void
566285283Skibatomic_thread_fence_acq(void)
567285283Skib{
568285283Skib
569285283Skib	mips_sync();
570285283Skib}
571285283Skib
572285283Skibstatic __inline void
573285283Skibatomic_thread_fence_rel(void)
574285283Skib{
575285283Skib
576285283Skib	mips_sync();
577285283Skib}
578285283Skib
579285283Skibstatic __inline void
580285283Skibatomic_thread_fence_acq_rel(void)
581285283Skib{
582285283Skib
583285283Skib	mips_sync();
584285283Skib}
585285283Skib
586285283Skibstatic __inline void
587285283Skibatomic_thread_fence_seq_cst(void)
588285283Skib{
589285283Skib
590285283Skib	mips_sync();
591285283Skib}
592285283Skib
593178172Simp/* Operations on chars. */
594178172Simp#define	atomic_set_char		atomic_set_8
595178172Simp#define	atomic_set_acq_char	atomic_set_acq_8
596178172Simp#define	atomic_set_rel_char	atomic_set_rel_8
597178172Simp#define	atomic_clear_char	atomic_clear_8
598178172Simp#define	atomic_clear_acq_char	atomic_clear_acq_8
599178172Simp#define	atomic_clear_rel_char	atomic_clear_rel_8
600178172Simp#define	atomic_add_char		atomic_add_8
601178172Simp#define	atomic_add_acq_char	atomic_add_acq_8
602178172Simp#define	atomic_add_rel_char	atomic_add_rel_8
603178172Simp#define	atomic_subtract_char	atomic_subtract_8
604178172Simp#define	atomic_subtract_acq_char	atomic_subtract_acq_8
605178172Simp#define	atomic_subtract_rel_char	atomic_subtract_rel_8
606178172Simp
607178172Simp/* Operations on shorts. */
608178172Simp#define	atomic_set_short	atomic_set_16
609178172Simp#define	atomic_set_acq_short	atomic_set_acq_16
610178172Simp#define	atomic_set_rel_short	atomic_set_rel_16
611178172Simp#define	atomic_clear_short	atomic_clear_16
612178172Simp#define	atomic_clear_acq_short	atomic_clear_acq_16
613178172Simp#define	atomic_clear_rel_short	atomic_clear_rel_16
614178172Simp#define	atomic_add_short	atomic_add_16
615178172Simp#define	atomic_add_acq_short	atomic_add_acq_16
616178172Simp#define	atomic_add_rel_short	atomic_add_rel_16
617178172Simp#define	atomic_subtract_short	atomic_subtract_16
618178172Simp#define	atomic_subtract_acq_short	atomic_subtract_acq_16
619178172Simp#define	atomic_subtract_rel_short	atomic_subtract_rel_16
620178172Simp
621178172Simp/* Operations on ints. */
622178172Simp#define	atomic_set_int		atomic_set_32
623178172Simp#define	atomic_set_acq_int	atomic_set_acq_32
624178172Simp#define	atomic_set_rel_int	atomic_set_rel_32
625178172Simp#define	atomic_clear_int	atomic_clear_32
626178172Simp#define	atomic_clear_acq_int	atomic_clear_acq_32
627178172Simp#define	atomic_clear_rel_int	atomic_clear_rel_32
628178172Simp#define	atomic_add_int		atomic_add_32
629178172Simp#define	atomic_add_acq_int	atomic_add_acq_32
630178172Simp#define	atomic_add_rel_int	atomic_add_rel_32
631178172Simp#define	atomic_subtract_int	atomic_subtract_32
632178172Simp#define	atomic_subtract_acq_int	atomic_subtract_acq_32
633178172Simp#define	atomic_subtract_rel_int	atomic_subtract_rel_32
634178172Simp#define	atomic_cmpset_int	atomic_cmpset_32
635178172Simp#define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
636178172Simp#define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
637315371Smjg#define	atomic_fcmpset_int	atomic_fcmpset_32
638315371Smjg#define	atomic_fcmpset_acq_int	atomic_fcmpset_acq_32
639315371Smjg#define	atomic_fcmpset_rel_int	atomic_fcmpset_rel_32
640178172Simp#define	atomic_load_acq_int	atomic_load_acq_32
641178172Simp#define	atomic_store_rel_int	atomic_store_rel_32
642178172Simp#define	atomic_readandclear_int	atomic_readandclear_32
643178172Simp#define	atomic_readandset_int	atomic_readandset_32
644178172Simp#define	atomic_fetchadd_int	atomic_fetchadd_32
645178172Simp
646202031Simp/*
647202031Simp * I think the following is right, even for n32.  For n32 the pointers
648202031Simp * are still 32-bits, so we need to operate on them as 32-bit quantities,
649202031Simp * even though they are sign extended in operation.  For longs, there's
650202031Simp * no question because they are always 32-bits.
651202031Simp */
652202031Simp#ifdef __mips_n64
653178172Simp/* Operations on longs. */
654178172Simp#define	atomic_set_long		atomic_set_64
655178172Simp#define	atomic_set_acq_long	atomic_set_acq_64
656178172Simp#define	atomic_set_rel_long	atomic_set_rel_64
657178172Simp#define	atomic_clear_long	atomic_clear_64
658178172Simp#define	atomic_clear_acq_long	atomic_clear_acq_64
659178172Simp#define	atomic_clear_rel_long	atomic_clear_rel_64
660178172Simp#define	atomic_add_long		atomic_add_64
661178172Simp#define	atomic_add_acq_long	atomic_add_acq_64
662178172Simp#define	atomic_add_rel_long	atomic_add_rel_64
663178172Simp#define	atomic_subtract_long	atomic_subtract_64
664178172Simp#define	atomic_subtract_acq_long	atomic_subtract_acq_64
665178172Simp#define	atomic_subtract_rel_long	atomic_subtract_rel_64
666178172Simp#define	atomic_cmpset_long	atomic_cmpset_64
667178172Simp#define	atomic_cmpset_acq_long	atomic_cmpset_acq_64
668178172Simp#define	atomic_cmpset_rel_long	atomic_cmpset_rel_64
669315371Smjg#define	atomic_fcmpset_long	atomic_fcmpset_64
670315371Smjg#define	atomic_fcmpset_acq_long	atomic_fcmpset_acq_64
671315371Smjg#define	atomic_fcmpset_rel_long	atomic_fcmpset_rel_64
672178172Simp#define	atomic_load_acq_long	atomic_load_acq_64
673178172Simp#define	atomic_store_rel_long	atomic_store_rel_64
674178172Simp#define	atomic_fetchadd_long	atomic_fetchadd_64
675178172Simp#define	atomic_readandclear_long	atomic_readandclear_64
676178172Simp
677202031Simp#else /* !__mips_n64 */
678178172Simp
679178172Simp/* Operations on longs. */
680222234Sattilio#define	atomic_set_long(p, v)						\
681222234Sattilio	atomic_set_32((volatile u_int *)(p), (u_int)(v))
682222234Sattilio#define	atomic_set_acq_long(p, v)					\
683222234Sattilio	atomic_set_acq_32((volatile u_int *)(p), (u_int)(v))
684222234Sattilio#define	atomic_set_rel_long(p, v)					\
685222234Sattilio	atomic_set_rel_32((volatile u_int *)(p), (u_int)(v))
686222234Sattilio#define	atomic_clear_long(p, v)						\
687222234Sattilio	atomic_clear_32((volatile u_int *)(p), (u_int)(v))
688222234Sattilio#define	atomic_clear_acq_long(p, v)					\
689222234Sattilio	atomic_clear_acq_32((volatile u_int *)(p), (u_int)(v))
690222234Sattilio#define	atomic_clear_rel_long(p, v)					\
691222234Sattilio	atomic_clear_rel_32((volatile u_int *)(p), (u_int)(v))
692222234Sattilio#define	atomic_add_long(p, v)						\
693178172Simp	atomic_add_32((volatile u_int *)(p), (u_int)(v))
694222234Sattilio#define	atomic_add_acq_long(p, v)					\
695222234Sattilio	atomic_add_32((volatile u_int *)(p), (u_int)(v))
696222234Sattilio#define	atomic_add_rel_long(p, v)					\
697222234Sattilio	atomic_add_32((volatile u_int *)(p), (u_int)(v))
698222234Sattilio#define	atomic_subtract_long(p, v)					\
699178172Simp	atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
700222234Sattilio#define	atomic_subtract_acq_long(p, v)					\
701222234Sattilio	atomic_subtract_acq_32((volatile u_int *)(p), (u_int)(v))
702222234Sattilio#define	atomic_subtract_rel_long(p, v)					\
703222234Sattilio	atomic_subtract_rel_32((volatile u_int *)(p), (u_int)(v))
704222234Sattilio#define	atomic_cmpset_long(p, cmpval, newval)				\
705222234Sattilio	atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval),	\
706222234Sattilio	    (u_int)(newval))
707222234Sattilio#define	atomic_cmpset_acq_long(p, cmpval, newval)			\
708222234Sattilio	atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval),	\
709222234Sattilio	    (u_int)(newval))
710222234Sattilio#define	atomic_cmpset_rel_long(p, cmpval, newval)			\
711222234Sattilio	atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval),	\
712222234Sattilio	    (u_int)(newval))
713315371Smjg#define	atomic_fcmpset_long(p, cmpval, newval)				\
714315371Smjg	atomic_fcmpset_32((volatile u_int *)(p), (u_int *)(cmpval),	\
715315371Smjg	    (u_int)(newval))
716315371Smjg#define	atomic_fcmpset_acq_long(p, cmpval, newval)			\
717315371Smjg	atomic_fcmpset_acq_32((volatile u_int *)(p), (u_int *)(cmpval),	\
718315371Smjg	    (u_int)(newval))
719315371Smjg#define	atomic_fcmpset_rel_long(p, cmpval, newval)			\
720315371Smjg	atomic_fcmpset_rel_32((volatile u_int *)(p), (u_int *)(cmpval),	\
721315371Smjg	    (u_int)(newval))
722222234Sattilio#define	atomic_load_acq_long(p)						\
723222234Sattilio	(u_long)atomic_load_acq_32((volatile u_int *)(p))
724222234Sattilio#define	atomic_store_rel_long(p, v)					\
725222234Sattilio	atomic_store_rel_32((volatile u_int *)(p), (u_int)(v))
726222234Sattilio#define	atomic_fetchadd_long(p, v)					\
727178172Simp	atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
728222234Sattilio#define	atomic_readandclear_long(p)					\
729222234Sattilio	atomic_readandclear_32((volatile u_int *)(p))
730178172Simp
731202031Simp#endif /* __mips_n64 */
732202031Simp
733178172Simp/* Operations on pointers. */
734202031Simp#define	atomic_set_ptr		atomic_set_long
735202031Simp#define	atomic_set_acq_ptr	atomic_set_acq_long
736202031Simp#define	atomic_set_rel_ptr	atomic_set_rel_long
737202031Simp#define	atomic_clear_ptr	atomic_clear_long
738202031Simp#define	atomic_clear_acq_ptr	atomic_clear_acq_long
739202031Simp#define	atomic_clear_rel_ptr	atomic_clear_rel_long
740202031Simp#define	atomic_add_ptr		atomic_add_long
741202031Simp#define	atomic_add_acq_ptr	atomic_add_acq_long
742202031Simp#define	atomic_add_rel_ptr	atomic_add_rel_long
743202031Simp#define	atomic_subtract_ptr	atomic_subtract_long
744202031Simp#define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
745202031Simp#define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
746202031Simp#define	atomic_cmpset_ptr	atomic_cmpset_long
747202031Simp#define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
748202031Simp#define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
749315371Smjg#define	atomic_fcmpset_ptr	atomic_fcmpset_long
750315371Smjg#define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
751315371Smjg#define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
752202031Simp#define	atomic_load_acq_ptr	atomic_load_acq_long
753202031Simp#define	atomic_store_rel_ptr	atomic_store_rel_long
754202031Simp#define	atomic_readandclear_ptr	atomic_readandclear_long
755178172Simp
756178172Simp#endif /* ! _MACHINE_ATOMIC_H_ */
757