Deleted Added
full compact
atomic.h (70741) atomic.h (78342)
1/*-
2 * Copyright (c) 2001 Benno Rice
3 * Copyright (c) 2001 David E. O'Brien
4 * Copyright (c) 1998 Doug Rabson
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions

--- 11 unchanged lines hidden (view full) ---

20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 *
1/*-
2 * Copyright (c) 2001 Benno Rice
3 * Copyright (c) 2001 David E. O'Brien
4 * Copyright (c) 1998 Doug Rabson
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions

--- 11 unchanged lines hidden (view full) ---

20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 *
28 * $FreeBSD: head/sys/powerpc/include/atomic.h 70741 2001-01-07 03:46:01Z benno $
28 * $FreeBSD: head/sys/powerpc/include/atomic.h 78342 2001-06-16 07:14:07Z benno $
29 */
30
31#ifndef _MACHINE_ATOMIC_H_
32#define _MACHINE_ATOMIC_H_
33
34#include <machine/cpufunc.h>
35
36/*
37 * Various simple arithmetic on memory which is atomic in the presence
38 * of interrupts and SMP safe.
39 */
40
29 */
30
31#ifndef _MACHINE_ATOMIC_H_
32#define _MACHINE_ATOMIC_H_
33
34#include <machine/cpufunc.h>
35
36/*
37 * Various simple arithmetic on memory which is atomic in the presence
38 * of interrupts and SMP safe.
39 */
40
41void atomic_set_8(volatile u_int8_t *, u_int8_t);
42void atomic_clear_8(volatile u_int8_t *, u_int8_t);
43void atomic_add_8(volatile u_int8_t *, u_int8_t);
44void atomic_subtract_8(volatile u_int8_t *, u_int8_t);
41void atomic_set_8(volatile u_int8_t *, u_int8_t);
42void atomic_clear_8(volatile u_int8_t *, u_int8_t);
43void atomic_add_8(volatile u_int8_t *, u_int8_t);
44void atomic_subtract_8(volatile u_int8_t *, u_int8_t);
45
45
46void atomic_set_16(volatile u_int16_t *, u_int16_t);
47void atomic_clear_16(volatile u_int16_t *, u_int16_t);
48void atomic_add_16(volatile u_int16_t *, u_int16_t);
49void atomic_subtract_16(volatile u_int16_t *, u_int16_t);
46void atomic_set_16(volatile u_int16_t *, u_int16_t);
47void atomic_clear_16(volatile u_int16_t *, u_int16_t);
48void atomic_add_16(volatile u_int16_t *, u_int16_t);
49void atomic_subtract_16(volatile u_int16_t *, u_int16_t);
50
51static __inline void
52atomic_set_32(volatile u_int32_t *p, u_int32_t v)
53{
54 u_int32_t temp;
55
56 __asm __volatile (
57 "1:\tlwarx %0, 0, %2\n\t" /* load old value */

--- 154 unchanged lines hidden (view full) ---

212#define atomic_set_int atomic_set_32
213#define atomic_clear_int atomic_clear_32
214#define atomic_add_int atomic_add_32
215#define atomic_subtract_int atomic_subtract_32
216#define atomic_readandclear_int atomic_readandclear_32
217
218#define atomic_set_long atomic_set_32
219#define atomic_clear_long atomic_clear_32
50
51static __inline void
52atomic_set_32(volatile u_int32_t *p, u_int32_t v)
53{
54 u_int32_t temp;
55
56 __asm __volatile (
57 "1:\tlwarx %0, 0, %2\n\t" /* load old value */

--- 154 unchanged lines hidden (view full) ---

212#define atomic_set_int atomic_set_32
213#define atomic_clear_int atomic_clear_32
214#define atomic_add_int atomic_add_32
215#define atomic_subtract_int atomic_subtract_32
216#define atomic_readandclear_int atomic_readandclear_32
217
218#define atomic_set_long atomic_set_32
219#define atomic_clear_long atomic_clear_32
220#define atomic_add_long atomic_add_32
220#define atomic_add_long(p, v) atomic_add_32((u_int32_t *)p, (u_int32_t)v)
221#define atomic_subtract_long atomic_subtract_32
222#define atomic_readandclear_long atomic_readandclear_32
223
224#if 0
225
226/* See above. */
227
228#define atomic_set_long_long atomic_set_64

--- 100 unchanged lines hidden (view full) ---

329/*
330 * Atomically compare the value stored at *p with cmpval and if the
331 * two values are equal, update the value of *p with newval. Returns
332 * zero if the compare failed, nonzero otherwise.
333 */
334static __inline u_int32_t
335atomic_cmpset_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
336{
221#define atomic_subtract_long atomic_subtract_32
222#define atomic_readandclear_long atomic_readandclear_32
223
224#if 0
225
226/* See above. */
227
228#define atomic_set_long_long atomic_set_64

--- 100 unchanged lines hidden (view full) ---

329/*
330 * Atomically compare the value stored at *p with cmpval and if the
331 * two values are equal, update the value of *p with newval. Returns
332 * zero if the compare failed, nonzero otherwise.
333 */
334static __inline u_int32_t
335atomic_cmpset_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
336{
337 u_int32_t ret;
337 u_int32_t ret;
338
338
339 ret = 0;
340
339 __asm __volatile (
341 __asm __volatile (
340 "1:\tlwarx %0, 0, %4\n\t" /* load old value */
341 "cmplw 0, %2, %0\n\t" /* compare */
342 "bne 2\n\t" /* exit if not equal */
343 "mr %0, %3\n\t" /* value to store */
344 "stwcx. %0, 0, %1\n\t" /* attempt to store */
345 "bne- 1\n\t" /* spin if failed */
342 "1:\tlwarx %0, 0, %3\n\t" /* load old value */
343 "cmplw 0, %1, %0\n\t" /* compare */
344 "bne 2f\n\t" /* exit if not equal */
345 "mr %0, %2\n\t" /* value to store */
346 "stwcx. %0, 0, %3\n\t" /* attempt to store */
347 "bne- 1b\n\t" /* spin if failed */
346 "eieio\n" /* memory barrier */
347 "2:\t\n"
348 "eieio\n" /* memory barrier */
349 "2:\t\n"
348 : "=&r" (ret), "=r" (*p)
349 : "r" (cmpval), "r" (newval), "r" (*p)
350 : "=r" (ret)
351 : "r" (cmpval), "r" (newval), "r" (p)
350 : "memory");
351
352 return ret;
353}
354
355#if 0
356
357/*

--- 53 unchanged lines hidden (view full) ---

411#define atomic_cmpset_rel_int atomic_cmpset_rel_32
412#define atomic_cmpset_acq_long atomic_cmpset_acq_32
413#define atomic_cmpset_rel_long atomic_cmpset_rel_32
414
415static __inline int
416atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
417{
418
352 : "memory");
353
354 return ret;
355}
356
357#if 0
358
359/*

--- 53 unchanged lines hidden (view full) ---

413#define atomic_cmpset_rel_int atomic_cmpset_rel_32
414#define atomic_cmpset_acq_long atomic_cmpset_acq_32
415#define atomic_cmpset_rel_long atomic_cmpset_rel_32
416
417static __inline int
418atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
419{
420
419 return (atomic_cmpset_acq_long((volatile u_int32_t *)dst,
421 return (atomic_cmpset_acq_32((volatile u_int32_t *)dst,
420 (u_int32_t)exp, (u_int32_t)src));
421}
422
423static __inline int
424atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src)
425{
426
422 (u_int32_t)exp, (u_int32_t)src));
423}
424
425static __inline int
426atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src)
427{
428
427 return (atomic_cmpset_rel_long((volatile u_int32_t *)dst,
429 return (atomic_cmpset_rel_32((volatile u_int32_t *)dst,
428 (u_int32_t)exp, (u_int32_t)src));
429}
430
431static __inline void *
432atomic_load_acq_ptr(volatile void *p)
433{
434
430 (u_int32_t)exp, (u_int32_t)src));
431}
432
433static __inline void *
434atomic_load_acq_ptr(volatile void *p)
435{
436
435 return (void *)atomic_load_acq_long((volatile u_int32_t *)p);
437 return (void *)atomic_load_acq_32((volatile u_int32_t *)p);
436}
437
438static __inline void
439atomic_store_rel_ptr(volatile void *p, void *v)
440{
441
438}
439
440static __inline void
441atomic_store_rel_ptr(volatile void *p, void *v)
442{
443
442 atomic_store_rel_long((volatile u_int32_t *)p, (u_int32_t)v);
444 atomic_store_rel_32((volatile u_int32_t *)p, (u_int32_t)v);
443}
444
445#define ATOMIC_PTR(NAME) \
446static __inline void \
447atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
448{ \
445}
446
447#define ATOMIC_PTR(NAME) \
448static __inline void \
449atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
450{ \
449 atomic_##NAME##_long((volatile u_int32_t *)p, v); \
451 atomic_##NAME##_32((volatile u_int32_t *)p, v); \
450} \
451 \
452static __inline void \
453atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \
454{ \
452} \
453 \
454static __inline void \
455atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \
456{ \
455 atomic_##NAME##_acq_long((volatile u_int32_t *)p, v); \
457 atomic_##NAME##_acq_32((volatile u_int32_t *)p, v); \
456} \
457 \
458static __inline void \
459atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \
460{ \
458} \
459 \
460static __inline void \
461atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \
462{ \
461 atomic_##NAME##_rel_long((volatile u_int32_t *)p, v); \
463 atomic_##NAME##_rel_32((volatile u_int32_t *)p, v); \
462}
463
464ATOMIC_PTR(set)
465ATOMIC_PTR(clear)
466ATOMIC_PTR(add)
467ATOMIC_PTR(subtract)
468
469#undef ATOMIC_PTR
470#endif /* ! _MACHINE_ATOMIC_H_ */
464}
465
466ATOMIC_PTR(set)
467ATOMIC_PTR(clear)
468ATOMIC_PTR(add)
469ATOMIC_PTR(subtract)
470
471#undef ATOMIC_PTR
472#endif /* ! _MACHINE_ATOMIC_H_ */