1/*
2** Copyright 2003, Axel Dörfler, axeld@pinc-software.de. All rights reserved.
3** Distributed under the terms of the OpenBeOS License.
4*/
5
6#include <asm_defs.h>
7
8
9.text
10
11/* int atomic_add(int *value, int increment)
12 */
13FUNCTION(atomic_add):
14#if __ARM_ARCH__ >= 6
15miss1:	       	ldrex       r12, [r0]
16        	add         r2,  r12, r1
17        	strex       r3,  r2,  [r0]
18        	teq         r3,  #0
19        	bne         miss1
20        	mov         r0,  r12
21	        bx          lr
22#else
23        /* disable interrupts, do the add, and reenable */
24        mrs     r2, cpsr
25        mov     r12, r2
26        orr     r2, r2, #(3<<6)
27        msr     cpsr_c, r2
28
29        /* ints disabled, old cpsr state in r12 */
30
31        /* do the add, leave the previous value in r0 */
32        mov     r3, r0
33        ldr     r0, [r3]
34        add     r2, r0, r1
35        str     r2, [r3]
36
37        /* restore interrupts and exit */
38        msr     cpsr_c, r12
39        bx      lr
40#endif
41FUNCTION_END(atomic_add)
42
43
44
45
46/* int atomic_and(int *value, int andValue)
47 */
48FUNCTION(atomic_and):
49#if __ARM_ARCH__ >= 6
50miss2:	       	ldrex       r12, [r0]
51        	and         r2,  r12, r1
52        	strex       r3,  r2,  [r0]
53        	teq         r3,  #0
54        	bne         miss2
55        	mov         r0,  r12
56	        bx          lr
57#else
58        /* disable interrupts, do the and, and reenable */
59        mrs     r2, cpsr
60        mov     r12, r2
61        orr     r2, r2, #(3<<6)
62        msr     cpsr_c, r2
63
64        /* ints disabled, old cpsr state in r12 */
65
66        /* do the and, leave the previous value in r0 */
67        mov     r3, r0
68        ldr     r0, [r3]
69        and     r2, r0, r1
70        str     r2, [r3]
71
72        /* restore interrupts and exit */
73        msr     cpsr_c, r12
74        bx      lr
75#endif
76
77FUNCTION_END(atomic_and)
78
79/* int atomic_or(int *value, int orValue)
80 */
81FUNCTION(atomic_or):
82#if __ARM_ARCH__ >= 6
83
84miss3:	       	ldrex       r12, [r0]
85        	eor         r2,  r12, r1
86        	strex       r3,  r2,  [r0]
87        	teq         r3,  #0
88        	bne         miss3
89        	mov         r0,  r12
90	        bx	lr
91#else
92        /* disable interrupts, do the or, and reenable */
93        mrs     r2, cpsr
94        mov     r12, r2
95        orr     r2, r2, #(3<<6)
96        msr     cpsr_c, r2
97
98        /* ints disabled, old cpsr state in r12 */
99
100        /* do the or, leave the previous value in r0 */
101        mov     r3, r0
102        ldr     r0, [r3]
103        orr     r2, r0, r1
104        str     r2, [r3]
105
106        /* restore interrupts and exit */
107        msr     cpsr_c, r12
108        bx      lr
109#endif
110
111FUNCTION_END(atomic_or)
112
113/* int atomic_set(int *value, int setTo)
114 */
115FUNCTION(atomic_set):
116#if __ARM_ARCH__ >= 6
117miss4:		ldrex   r12, [r0]
118       		strex   r3, r1, [r0]
119       		teq     r3, #0
120       		bne     miss4
121	        bx  lr
122#else
123	mov	r3, r0
124	swp	r0, r1, [r3]
125        bx      lr
126#endif
127FUNCTION_END(atomic_set)
128
129/* int atomic_test_and_set(int *value, int setTo, int testValue)
130 */
131FUNCTION(atomic_test_and_set):
132#if __ARM_ARCH__ >= 6
133miss5:  ldrex       r12, [r0]                       @ load from the address and mark it exclusive
134        cmp         r12, r2                         @ compare the value with the comperand(r2)
135        strexeq     r3,  r1,  [r0]                  @ if they were equal, attempt to store the new value (r1)
136        bne         differ                          @ if they were not equal jump to (differ) which clears the exclusive tag on the address and returns<
137        cmp         r3,  #1                         @ check the status of the store (returned in r3)
138        beq         miss5                           @ go back to the start if it failed (0=success, 1=failure)
139        bne         same                            @ if it succeeded, jump to (same) and return. there is no need to clrex if strex succeeded
140differ: clrex                           	    @ clrex
141same:   mov         r0,  r12
142#else
143        /* disable interrupts, and save state */
144        mrs     r3, cpsr
145        mov     r12, r3
146        orr     r3, r3, #(3<<6)
147        msr     cpsr_c, r3
148
149	mov	r3, r0
150	ldr	r0, [r3]
151	cmp	r0, r2
152	streq	r1, [r3]
153
154        /* restore interrupts and exit */
155        msr     cpsr_c, r12
156#endif
157        bx      lr
158FUNCTION_END(atomic_test_and_set)
159
160/* int atomic_get(int *value)
161 */
162FUNCTION(atomic_get):
163	ldr	r0, [r0]
164        bx	lr
165FUNCTION_END(atomic_get)
166
167
168/* int64	atomic_add64(vint64 *value, int64 addValue) */
169//FUNCTION(atomic_add64):
170//		movem.l	%d2-%d3/%a2,-(%a7)
171//		move.l	(4,%a7),%a2
172//		lea.l	(4,%a2),%a1
173//		// addValue
174//		move.l	(12,%a7),%d3	/*LSB*/
175//		move.l	(8,%a7),%d2		/*MSB*/
176//miss5:	// old value
177//		move.l	(%a1),%d1		/*LSB*/
178//		move.l	(%a2),%d0		/*MSB*/
179//		add.l	%d1,%d3
180//		addx.l	%d0,%d2
181//		cas2.l	%d0:%d1,%d2:%d3,(%a2):(%a1)
182//		bne		miss5
183//		// return value d0:d1
184//		movem.l	(%a7)+,%d2-%d3/%a2
185//		rts
186//FUNCTION_END(atomic_add64)
187
188/* int64	atomic_and64(vint64 *value, int64 andValue) */
189//FUNCTION(atomic_and64):
190//FUNCTION_END(atomic_and64)
191
192/* int64	atomic_or64(vint64 *value, int64 orValue) */
193//FUNCTION(atomic_or64):
194//FUNCTION_END(atomic_or64)
195
196/* int64	atomic_set64(vint64 *value, int64 newValue) */
197//FUNCTION(atomic_set64):
198//		movem.l	%d2-%d3/%a2,-(%a7)
199//		move.l	(4,%a7),%a2
200//		lea.l	(4,%a2),%a1
201//		// new value
202//		move.l	(12,%a7),%d3	/*LSB*/
203//		move.l	(8,%a7),%d2		/*MSB*/
204//		// old value
205//		move.l	(%a1),%d1		/*LSB*/
206//		move.l	(%a2),%d0		/*MSB*/
207//miss8:	cas2.l	%d0:%d1,%d2:%d3,(%a2):(%a1)
208//		bne		miss8
209//		// return value d0:d1
210//		movem.l	(%a7)+,%d2-%d3/%a2
211//		rts
212//FUNCTION_END(atomic_set64)
213
214/* int64	atomic_test_and_set64(vint64 *value, int64 newValue, int64 testAgainst) */
215//FUNCTION(atomic_test_and_set64):
216//		movem.l	%d2-%d3/%a2,-(%a7)
217//		move.l	(4,%a7),%a2
218//		lea.l	(4,%a2),%a1
219//		// new value
220//		move.l	(12,%a7),%d3	/*LSB*/
221//		move.l	(8,%a7),%d2		/*MSB*/
222//		// test against value
223//		move.l	(20,%a7),%d1	/*LSB*/
224//		move.l	(16,%a7),%d0	/*MSB*/
225//		cas2.l	%d0:%d1,%d2:%d3,(%a2):(%a1)
226//		// return value d0:d1
227//		movem.l	(%a7)+,%d2-%d3/%a2
228//		rts
229//FUNCTION_END(atomic_test_and_set64)
230
231/* int64	atomic_get64(vint64 *value) */
232//FUNCTION(atomic_get64):
233//		movem.l	%d2-%d3/%a2,-(%a7)
234//		move.l	(4,%a7),%a2
235//		lea.l	(4,%a2),%a1
236//		move.l	(%a1),%d1	/*LSB*/
237//		move.l	(%a2),%d0		/*MSB*/
238//		move.l	%d1,%d3
239//		move.l	%d0,%d2
240//		// we must use cas... so we change to the same value if matching,
241//		// else we get the correct one anyway
242//		cas2.l	%d0:%d1,%d2:%d3,(%a2):(%a1)
243//		// return value
244//		movem.l	(%a7)+,%d2-%d3/%a2
245//		rts
246//FUNCTION_END(atomic_get64)
247