atomic.h (302408) | atomic.h (315371) |
---|---|
1/*- 2 * Copyright (c) 1998 Doug Rabson 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright --- 10 unchanged lines hidden (view full) --- 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb | 1/*- 2 * Copyright (c) 1998 Doug Rabson 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright --- 10 unchanged lines hidden (view full) --- 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb |
27 * $FreeBSD: stable/11/sys/mips/include/atomic.h 285283 2015-07-08 18:12:24Z kib $ | 27 * $FreeBSD: stable/11/sys/mips/include/atomic.h 315371 2017-03-16 06:00:27Z mjg $ |
28 */ 29 30#ifndef _MACHINE_ATOMIC_H_ 31#define _MACHINE_ATOMIC_H_ 32 33#ifndef _SYS_CDEFS_H_ 34#error this file needs sys/cdefs.h as a prerequisite 35#endif --- 321 unchanged lines hidden (view full) --- 357#undef ATOMIC_STORE_LOAD 358 359/* 360 * Atomically compare the value stored at *p with cmpval and if the 361 * two values are equal, update the value of *p with newval. Returns 362 * zero if the compare failed, nonzero otherwise. 363 */ 364static __inline uint32_t | 28 */ 29 30#ifndef _MACHINE_ATOMIC_H_ 31#define _MACHINE_ATOMIC_H_ 32 33#ifndef _SYS_CDEFS_H_ 34#error this file needs sys/cdefs.h as a prerequisite 35#endif --- 321 unchanged lines hidden (view full) --- 357#undef ATOMIC_STORE_LOAD 358 359/* 360 * Atomically compare the value stored at *p with cmpval and if the 361 * two values are equal, update the value of *p with newval. Returns 362 * zero if the compare failed, nonzero otherwise. 363 */ 364static __inline uint32_t |
365atomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval) | 365atomic_cmpset_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval) |
366{ 367 uint32_t ret; 368 369 __asm __volatile ( 370 "1:\tll %0, %4\n\t" /* load old value */ 371 "bne %0, %2, 2f\n\t" /* compare */ 372 "move %0, %3\n\t" /* value to store */ 373 "sc %0, %1\n\t" /* attempt to store */ --- 26 unchanged lines hidden (view full) --- 400 401static __inline uint32_t 402atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval) 403{ 404 mips_sync(); 405 return (atomic_cmpset_32(p, cmpval, newval)); 406} 407 | 366{ 367 uint32_t ret; 368 369 __asm __volatile ( 370 "1:\tll %0, %4\n\t" /* load old value */ 371 "bne %0, %2, 2f\n\t" /* compare */ 372 "move %0, %3\n\t" /* value to store */ 373 "sc %0, %1\n\t" /* attempt to store */ --- 26 unchanged lines hidden (view full) --- 400 401static __inline uint32_t 402atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval) 403{ 404 mips_sync(); 405 return (atomic_cmpset_32(p, cmpval, newval)); 406} 407 |
408static __inline uint32_t 409atomic_fcmpset_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval) 410{ 411 uint32_t ret; 412 413 __asm __volatile ( 414 "1:\n\t" 415 "ll %0, %1\n\t" /* load old value */ 416 "bne %0, %4, 2f\n\t" /* compare */ 417 "move %0, %3\n\t" /* value to store */ 418 "sc %0, %1\n\t" /* attempt to store */ 419 "beqz %0, 1b\n\t" /* if it failed, spin */ 420 "j 3f\n\t" 421 "2:\n\t" 422 "sw %0, %2\n\t" /* save old value */ 423 "li %0, 0\n\t" 424 "3:\n" 425 : "=&r" (ret), "+m" (*p), "=m" (*cmpval) 426 : "r" (newval), "r" (*cmpval) 427 : "memory"); 428 return ret; 429} 430 431static __inline uint32_t 432atomic_fcmpset_acq_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval) 433{ 434 int retval; 435 436 retval = atomic_fcmpset_32(p, cmpval, newval); 437 mips_sync(); 438 return (retval); 439} 440 441static __inline uint32_t 442atomic_fcmpset_rel_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval) 443{ 444 mips_sync(); 445 return (atomic_fcmpset_32(p, cmpval, newval)); 446} 447 |
|
408/* 409 * Atomically add the value of v to the integer pointed to by p and return 410 * the previous value of *p. 411 */ 412static __inline uint32_t 413atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v) 414{ 415 uint32_t value, temp; --- 10 unchanged lines hidden (view full) --- 426 427#if defined(__mips_n64) || defined(__mips_n32) 428/* 429 * Atomically compare the value stored at *p with cmpval and if the 430 * two values are equal, update the value of *p with newval. Returns 431 * zero if the compare failed, nonzero otherwise. 432 */ 433static __inline uint64_t | 448/* 449 * Atomically add the value of v to the integer pointed to by p and return 450 * the previous value of *p. 451 */ 452static __inline uint32_t 453atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v) 454{ 455 uint32_t value, temp; --- 10 unchanged lines hidden (view full) --- 466 467#if defined(__mips_n64) || defined(__mips_n32) 468/* 469 * Atomically compare the value stored at *p with cmpval and if the 470 * two values are equal, update the value of *p with newval. Returns 471 * zero if the compare failed, nonzero otherwise. 472 */ 473static __inline uint64_t |
434atomic_cmpset_64(__volatile uint64_t* p, uint64_t cmpval, uint64_t newval) | 474atomic_cmpset_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval) |
435{ 436 uint64_t ret; 437 438 __asm __volatile ( 439 "1:\n\t" 440 "lld %0, %4\n\t" /* load old value */ 441 "bne %0, %2, 2f\n\t" /* compare */ 442 "move %0, %3\n\t" /* value to store */ --- 27 unchanged lines hidden (view full) --- 470 471static __inline uint64_t 472atomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval) 473{ 474 mips_sync(); 475 return (atomic_cmpset_64(p, cmpval, newval)); 476} 477 | 475{ 476 uint64_t ret; 477 478 __asm __volatile ( 479 "1:\n\t" 480 "lld %0, %4\n\t" /* load old value */ 481 "bne %0, %2, 2f\n\t" /* compare */ 482 "move %0, %3\n\t" /* value to store */ --- 27 unchanged lines hidden (view full) --- 510 511static __inline uint64_t 512atomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval) 513{ 514 mips_sync(); 515 return (atomic_cmpset_64(p, cmpval, newval)); 516} 517 |
518static __inline uint32_t 519atomic_fcmpset_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval) 520{ 521 uint32_t ret; 522 523 __asm __volatile ( 524 "1:\n\t" 525 "lld %0, %1\n\t" /* load old value */ 526 "bne %0, %4, 2f\n\t" /* compare */ 527 "move %0, %3\n\t" /* value to store */ 528 "scd %0, %1\n\t" /* attempt to store */ 529 "beqz %0, 1b\n\t" /* if it failed, spin */ 530 "j 3f\n\t" 531 "2:\n\t" 532 "sd %0, %2\n\t" /* save old value */ 533 "li %0, 0\n\t" 534 "3:\n" 535 : "=&r" (ret), "+m" (*p), "=m" (*cmpval) 536 : "r" (newval), "r" (*cmpval) 537 : "memory"); 538 539 return ret; 540} 541 542static __inline uint64_t 543atomic_fcmpset_acq_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval) 544{ 545 int retval; 546 547 retval = atomic_fcmpset_64(p, cmpval, newval); 548 mips_sync(); 549 return (retval); 550} 551 552static __inline uint64_t 553atomic_fcmpset_rel_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval) 554{ 555 mips_sync(); 556 return (atomic_fcmpset_64(p, cmpval, newval)); 557} 558 |
|
478/* 479 * Atomically add the value of v to the integer pointed to by p and return 480 * the previous value of *p. 481 */ 482static __inline uint64_t 483atomic_fetchadd_64(__volatile uint64_t *p, uint64_t v) 484{ 485 uint64_t value, temp; --- 77 unchanged lines hidden (view full) --- 563#define atomic_add_acq_int atomic_add_acq_32 564#define atomic_add_rel_int atomic_add_rel_32 565#define atomic_subtract_int atomic_subtract_32 566#define atomic_subtract_acq_int atomic_subtract_acq_32 567#define atomic_subtract_rel_int atomic_subtract_rel_32 568#define atomic_cmpset_int atomic_cmpset_32 569#define atomic_cmpset_acq_int atomic_cmpset_acq_32 570#define atomic_cmpset_rel_int atomic_cmpset_rel_32 | 559/* 560 * Atomically add the value of v to the integer pointed to by p and return 561 * the previous value of *p. 562 */ 563static __inline uint64_t 564atomic_fetchadd_64(__volatile uint64_t *p, uint64_t v) 565{ 566 uint64_t value, temp; --- 77 unchanged lines hidden (view full) --- 644#define atomic_add_acq_int atomic_add_acq_32 645#define atomic_add_rel_int atomic_add_rel_32 646#define atomic_subtract_int atomic_subtract_32 647#define atomic_subtract_acq_int atomic_subtract_acq_32 648#define atomic_subtract_rel_int atomic_subtract_rel_32 649#define atomic_cmpset_int atomic_cmpset_32 650#define atomic_cmpset_acq_int atomic_cmpset_acq_32 651#define atomic_cmpset_rel_int atomic_cmpset_rel_32 |
652#define atomic_fcmpset_int atomic_fcmpset_32 653#define atomic_fcmpset_acq_int atomic_fcmpset_acq_32 654#define atomic_fcmpset_rel_int atomic_fcmpset_rel_32 |
|
571#define atomic_load_acq_int atomic_load_acq_32 572#define atomic_store_rel_int atomic_store_rel_32 573#define atomic_readandclear_int atomic_readandclear_32 574#define atomic_readandset_int atomic_readandset_32 575#define atomic_fetchadd_int atomic_fetchadd_32 576 577/* 578 * I think the following is right, even for n32. For n32 the pointers --- 13 unchanged lines hidden (view full) --- 592#define atomic_add_acq_long atomic_add_acq_64 593#define atomic_add_rel_long atomic_add_rel_64 594#define atomic_subtract_long atomic_subtract_64 595#define atomic_subtract_acq_long atomic_subtract_acq_64 596#define atomic_subtract_rel_long atomic_subtract_rel_64 597#define atomic_cmpset_long atomic_cmpset_64 598#define atomic_cmpset_acq_long atomic_cmpset_acq_64 599#define atomic_cmpset_rel_long atomic_cmpset_rel_64 | 655#define atomic_load_acq_int atomic_load_acq_32 656#define atomic_store_rel_int atomic_store_rel_32 657#define atomic_readandclear_int atomic_readandclear_32 658#define atomic_readandset_int atomic_readandset_32 659#define atomic_fetchadd_int atomic_fetchadd_32 660 661/* 662 * I think the following is right, even for n32. For n32 the pointers --- 13 unchanged lines hidden (view full) --- 676#define atomic_add_acq_long atomic_add_acq_64 677#define atomic_add_rel_long atomic_add_rel_64 678#define atomic_subtract_long atomic_subtract_64 679#define atomic_subtract_acq_long atomic_subtract_acq_64 680#define atomic_subtract_rel_long atomic_subtract_rel_64 681#define atomic_cmpset_long atomic_cmpset_64 682#define atomic_cmpset_acq_long atomic_cmpset_acq_64 683#define atomic_cmpset_rel_long atomic_cmpset_rel_64 |
684#define atomic_fcmpset_long atomic_fcmpset_64 685#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64 686#define atomic_fcmpset_rel_long atomic_fcmpset_rel_64 |
|
600#define atomic_load_acq_long atomic_load_acq_64 601#define atomic_store_rel_long atomic_store_rel_64 602#define atomic_fetchadd_long atomic_fetchadd_64 603#define atomic_readandclear_long atomic_readandclear_64 604 605#else /* !__mips_n64 */ 606 607/* Operations on longs. */ --- 25 unchanged lines hidden (view full) --- 633 atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval), \ 634 (u_int)(newval)) 635#define atomic_cmpset_acq_long(p, cmpval, newval) \ 636 atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval), \ 637 (u_int)(newval)) 638#define atomic_cmpset_rel_long(p, cmpval, newval) \ 639 atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval), \ 640 (u_int)(newval)) | 687#define atomic_load_acq_long atomic_load_acq_64 688#define atomic_store_rel_long atomic_store_rel_64 689#define atomic_fetchadd_long atomic_fetchadd_64 690#define atomic_readandclear_long atomic_readandclear_64 691 692#else /* !__mips_n64 */ 693 694/* Operations on longs. */ --- 25 unchanged lines hidden (view full) --- 720 atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval), \ 721 (u_int)(newval)) 722#define atomic_cmpset_acq_long(p, cmpval, newval) \ 723 atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval), \ 724 (u_int)(newval)) 725#define atomic_cmpset_rel_long(p, cmpval, newval) \ 726 atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval), \ 727 (u_int)(newval)) |
728#define atomic_fcmpset_long(p, cmpval, newval) \ 729 atomic_fcmpset_32((volatile u_int *)(p), (u_int *)(cmpval), \ 730 (u_int)(newval)) 731#define atomic_fcmpset_acq_long(p, cmpval, newval) \ 732 atomic_fcmpset_acq_32((volatile u_int *)(p), (u_int *)(cmpval), \ 733 (u_int)(newval)) 734#define atomic_fcmpset_rel_long(p, cmpval, newval) \ 735 atomic_fcmpset_rel_32((volatile u_int *)(p), (u_int *)(cmpval), \ 736 (u_int)(newval)) |
|
641#define atomic_load_acq_long(p) \ 642 (u_long)atomic_load_acq_32((volatile u_int *)(p)) 643#define atomic_store_rel_long(p, v) \ 644 atomic_store_rel_32((volatile u_int *)(p), (u_int)(v)) 645#define atomic_fetchadd_long(p, v) \ 646 atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v)) 647#define atomic_readandclear_long(p) \ 648 atomic_readandclear_32((volatile u_int *)(p)) --- 11 unchanged lines hidden (view full) --- 660#define atomic_add_acq_ptr atomic_add_acq_long 661#define atomic_add_rel_ptr atomic_add_rel_long 662#define atomic_subtract_ptr atomic_subtract_long 663#define atomic_subtract_acq_ptr atomic_subtract_acq_long 664#define atomic_subtract_rel_ptr atomic_subtract_rel_long 665#define atomic_cmpset_ptr atomic_cmpset_long 666#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long 667#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long | 737#define atomic_load_acq_long(p) \ 738 (u_long)atomic_load_acq_32((volatile u_int *)(p)) 739#define atomic_store_rel_long(p, v) \ 740 atomic_store_rel_32((volatile u_int *)(p), (u_int)(v)) 741#define atomic_fetchadd_long(p, v) \ 742 atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v)) 743#define atomic_readandclear_long(p) \ 744 atomic_readandclear_32((volatile u_int *)(p)) --- 11 unchanged lines hidden (view full) --- 756#define atomic_add_acq_ptr atomic_add_acq_long 757#define atomic_add_rel_ptr atomic_add_rel_long 758#define atomic_subtract_ptr atomic_subtract_long 759#define atomic_subtract_acq_ptr atomic_subtract_acq_long 760#define atomic_subtract_rel_ptr atomic_subtract_rel_long 761#define atomic_cmpset_ptr atomic_cmpset_long 762#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long 763#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long |
764#define atomic_fcmpset_ptr atomic_fcmpset_long 765#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long 766#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long |
|
668#define atomic_load_acq_ptr atomic_load_acq_long 669#define atomic_store_rel_ptr atomic_store_rel_long 670#define atomic_readandclear_ptr atomic_readandclear_long 671 672#endif /* ! _MACHINE_ATOMIC_H_ */ | 767#define atomic_load_acq_ptr atomic_load_acq_long 768#define atomic_store_rel_ptr atomic_store_rel_long 769#define atomic_readandclear_ptr atomic_readandclear_long 770 771#endif /* ! _MACHINE_ATOMIC_H_ */ |