Lines Matching refs:lhs

88 lhs, TYPE rhs );
92 @param lhs a pointer to the left operand
103 lhs, TYPE rhs, int flag );
107 @param lhs a pointer to the left operand
118 lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out, int flag );
136 void __kmpc_atomic_<type>_wr ( ident_t *id_ref, int gtid, TYPE * lhs, TYPE rhs
609 static inline void operator+=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
610 lhs.q += rhs.q;
612 static inline void operator-=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
613 lhs.q -= rhs.q;
615 static inline void operator*=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
616 lhs.q *= rhs.q;
618 static inline void operator/=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
619 lhs.q /= rhs.q;
621 static inline bool operator<(Quad_a4_t &lhs, Quad_a4_t &rhs) {
622 return lhs.q < rhs.q;
624 static inline bool operator>(Quad_a4_t &lhs, Quad_a4_t &rhs) {
625 return lhs.q > rhs.q;
628 static inline void operator+=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
629 lhs.q += rhs.q;
631 static inline void operator-=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
632 lhs.q -= rhs.q;
634 static inline void operator*=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
635 lhs.q *= rhs.q;
637 static inline void operator/=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
638 lhs.q /= rhs.q;
640 static inline bool operator<(Quad_a16_t &lhs, Quad_a16_t &rhs) {
641 return lhs.q < rhs.q;
643 static inline bool operator>(Quad_a16_t &lhs, Quad_a16_t &rhs) {
644 return lhs.q > rhs.q;
647 static inline void operator+=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
648 lhs.q += rhs.q;
650 static inline void operator-=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
651 lhs.q -= rhs.q;
653 static inline void operator*=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
654 lhs.q *= rhs.q;
656 static inline void operator/=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
657 lhs.q /= rhs.q;
660 static inline void operator+=(kmp_cmplx128_a16_t &lhs,
662 lhs.q += rhs.q;
664 static inline void operator-=(kmp_cmplx128_a16_t &lhs,
666 lhs.q -= rhs.q;
668 static inline void operator*=(kmp_cmplx128_a16_t &lhs,
670 lhs.q *= rhs.q;
672 static inline void operator/=(kmp_cmplx128_a16_t &lhs,
674 lhs.q /= rhs.q;
682 // void __kmpc_atomic_RTYPE_OP( ident_t*, int, TYPE *lhs, TYPE rhs );
696 TYPE *lhs, TYPE rhs) { \
717 // Operation on *lhs, rhs bound by critical section
725 (*lhs) OP(rhs); \
769 // Operation on *lhs, rhs using "compare_and_store" routine
776 old_value = *(TYPE volatile *)lhs; \
779 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
783 old_value = *(TYPE volatile *)lhs; \
804 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
807 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
811 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
826 /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */ \
827 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
855 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
856 /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */ \
857 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
868 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
882 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1028 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1029 OP_CRITICAL(= *lhs OP, LCK_ID) \
1038 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1047 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1048 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1052 OP_CRITICAL(= *lhs OP, LCK_ID) /* unaligned - use critical */ \
1086 if (*lhs OP rhs) { /* still need actions? */ \
1087 *lhs = rhs; \
1108 temp_val = *lhs; \
1112 (kmp_int##BITS *)lhs, \
1116 temp_val = *lhs; \
1125 if (*lhs OP rhs) { /* need actions? */ \
1138 if (*lhs OP rhs) { \
1150 if (*lhs OP rhs) { \
1152 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1225 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1352 // Operation on *lhs, rhs bound by critical section
1360 (*lhs) = (rhs)OP(*lhs); \
1382 TYPE *lhs, TYPE rhs) { \
1387 // Operation on *lhs, rhs using "compare_and_store" routine
1392 // *lhs only once (w/o it the compiler reads *lhs twice)
1397 temp_val = *lhs; \
1401 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1405 temp_val = *lhs; \
1563 /* *lhs = (TYPE)( (_Quad)(*lhs) OP rhs ) */
1569 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs) { \
1601 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1834 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1871 // Operation on *lhs, rhs using "compare_and_store_ret" routine
1876 // *lhs only once (w/o it the compiler reads *lhs twice)
1898 // Operation on *lhs, rhs bound by critical section
2041 KMP_XCHG_FIXED##BITS(lhs, rhs); \
2047 KMP_XCHG_REAL##BITS(lhs, rhs); \
2051 // Operation on *lhs, rhs using "compare_and_store" routine
2056 // *lhs only once (w/o it the compiler reads *lhs twice)
2061 temp_val = *lhs; \
2065 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2069 temp_val = *lhs; \
2151 TYPE *lhs, TYPE rhs, int flag) { \
2156 // Operation on *lhs, rhs bound by critical section
2165 (*lhs) OP rhs; \
2166 new_value = (*lhs); \
2168 new_value = (*lhs); \
2169 (*lhs) OP rhs; \
2187 // Operation on *lhs, rhs using "compare_and_store" routine
2192 // *lhs only once (w/o it the compiler reads *lhs twice)
2197 temp_val = *lhs; \
2201 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2205 temp_val = *lhs; \
2228 /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */ \
2229 old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
2367 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs, int flag) { \
2492 // Operation on *lhs, rhs bound by critical section
2503 new_value = (*lhs); \
2524 OP_GOMP_CRITICAL_L_CPT(= *lhs OP, GOMP_FLAG) \
2557 if (*lhs OP rhs) { /* still need actions? */ \
2558 old_value = *lhs; \
2559 *lhs = rhs; \
2565 new_value = *lhs; \
2586 temp_val = *lhs; \
2590 (kmp_int##BITS *)lhs, \
2594 temp_val = *lhs; \
2608 if (*lhs OP rhs) { /* need actions? */ \
2612 return *lhs; \
2618 if (*lhs OP rhs) { \
2622 return *lhs; \
2719 (*lhs) OP rhs; \
2720 (*out) = (*lhs); \
2722 (*out) = (*lhs); \
2723 (*lhs) OP rhs; \
2742 void __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, TYPE *lhs, \
2841 // Operation on *lhs, rhs bound by critical section
2850 /*temp_val = (*lhs);*/ \
2851 (*lhs) = (rhs)OP(*lhs); \
2852 new_value = (*lhs); \
2854 new_value = (*lhs); \
2855 (*lhs) = (rhs)OP(*lhs); \
2872 // Operation on *lhs, rhs using "compare_and_store" routine
2877 // *lhs only once (w/o it the compiler reads *lhs twice)
2882 temp_val = *lhs; \
2886 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2890 temp_val = *lhs; \
3009 (*lhs) = (rhs)OP(*lhs); \
3010 (*out) = (*lhs); \
3012 (*out) = (*lhs); \
3013 (*lhs) = (rhs)OP(*lhs); \
3159 TYPE __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3167 old_value = (*lhs); \
3168 (*lhs) = rhs; \
3188 old_value = KMP_XCHG_FIXED##BITS(lhs, rhs); \
3196 old_value = KMP_XCHG_REAL##BITS(lhs, rhs); \
3205 temp_val = *lhs; \
3209 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
3213 temp_val = *lhs; \
3262 void __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3270 tmp = (*lhs); \
3271 (*lhs) = (rhs); \
3327 void __kmpc_atomic_1(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3340 old_value = *(kmp_int8 *)lhs;
3344 while (!KMP_COMPARE_AND_STORE_ACQ8((kmp_int8 *)lhs, *(kmp_int8 *)&old_value,
3348 old_value = *(kmp_int8 *)lhs;
3363 (*f)(lhs, lhs, rhs);
3374 void __kmpc_atomic_2(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3382 !((kmp_uintptr_t)lhs & 0x1) /* make sure address is 2-byte aligned */
3387 old_value = *(kmp_int16 *)lhs;
3392 (kmp_int16 *)lhs, *(kmp_int16 *)&old_value, *(kmp_int16 *)&new_value)) {
3395 old_value = *(kmp_int16 *)lhs;
3410 (*f)(lhs, lhs, rhs);
3421 void __kmpc_atomic_4(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3431 !((kmp_uintptr_t)lhs & 0x3) /* make sure address is 4-byte aligned */
3436 old_value = *(kmp_int32 *)lhs;
3441 (kmp_int32 *)lhs, *(kmp_int32 *)&old_value, *(kmp_int32 *)&new_value)) {
3444 old_value = *(kmp_int32 *)lhs;
3460 (*f)(lhs, lhs, rhs);
3471 void __kmpc_atomic_8(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3481 !((kmp_uintptr_t)lhs & 0x7) /* make sure address is 8-byte aligned */
3486 old_value = *(kmp_int64 *)lhs;
3490 (kmp_int64 *)lhs, *(kmp_int64 *)&old_value, *(kmp_int64 *)&new_value)) {
3493 old_value = *(kmp_int64 *)lhs;
3509 (*f)(lhs, lhs, rhs);
3520 void __kmpc_atomic_10(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3531 (*f)(lhs, lhs, rhs);
3541 void __kmpc_atomic_16(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3552 (*f)(lhs, lhs, rhs);
3562 void __kmpc_atomic_20(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3573 (*f)(lhs, lhs, rhs);
3583 void __kmpc_atomic_32(ident_t *id_ref, int gtid, void *lhs, void *rhs,
3594 (*f)(lhs, lhs, rhs);