1/*-
2 * Copyright (C) 2015 Andrew Turner
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD$
27 */
28
29#include <machine/asm.h>
30
31/*
32 * uint64_t atomic_add_64_nv(volatile uint64_t *target, int64_t delta)
33 */
34ENTRY(atomic_add_64_nv)
351:	ldxr	x2, [x0]	/* Load *target */
36	add	x2, x2, x1	/* x2 = x2 + delta */
37	stxr	w3, x2, [x0]	/* Store *target */
38	cbnz	w3, 1b		/* Check if the store succeeded */
39	mov	x0, x2		/* Return the new value */
40	ret
41END(atomic_add_64_nv)
42
43/*
44 * uint32_t
45 * atomic_cas_32(volatile uint32_t *target, uint32_t cmp, uint32_t newval)
46 */
47ENTRY(atomic_cas_32)
481:	ldxr	w3, [x0]	/* Load *target */
49	cmp	w3, w1		/* Does *targe == cmp? */
50	b.ne	2f		/* If not exit */
51	stxr	w4, w2, [x0]	/* Store newval to *target */
52	cbnz	w4, 1b		/* Check if the store succeeded */
532:	mov	w0, w3		/* Return the old value */
54	ret
55END(atomic_cas_32)
56
57/*
58 * uint64_t
59 * atomic_cas_64(volatile uint64_t *target, uint64_t cmp, uint64_t newval)
60 */
61ENTRY(atomic_cas_64)
621:	ldxr	x3, [x0]	/* Load *target */
63	cmp	x3, x1		/* Does *targe == cmp? */
64	b.ne	2f		/* If not exit */
65	stxr	w4, x2, [x0]	/* Store newval to *target */
66	cbnz	w4, 1b		/* Check if the store succeeded */
672:	mov	x0, x3		/* Return the old value */
68	ret
69END(atomic_cas_64)
70
71/*
72 * uint8_t atomic_or_8_nv(volatile uint8_t *target, uint8_t value)
73 */
74ENTRY(atomic_or_8_nv)
751:	ldxrb	w2, [x0]	/* Load *target */
76	orr	w2, w2, w1	/* x2 = x2 | delta */
77	stxrb	w3, w2, [x0]	/* Store *target */
78	cbnz	w3, 1b		/* Check if the store succeeded */
79	mov	w0, w2		/* Return the new value */
80	ret
81END(atomic_or_8_nv)
82
83ENTRY(membar_producer)
84	dmb	ish
85	ret
86END(membar_producer)
87
88