Lines Matching refs:dst

269 #define va_copy(dst,src)  __va_copy(dst,src)
272 #define va_copy(dst,src) \
273 do { memcpy (&(dst), &(src), sizeof (va_list)); } while (0)
1300 #define mpn_bdiv_dbm1(dst, src, size, divisor) \
1301 mpn_bdiv_dbm1c (dst, src, size, divisor, __GMP_CAST (mp_limb_t, 0))
1328 #define mpn_divexact_by3(dst,src,size) \
1329 (3 & mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 3)))
1333 #define mpn_divexact_by3c(dst,src,size,cy) \
1334 (3 & mpn_bdiv_dbm1c (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 3, GMP_NUMB_MASK / 3 * cy)))
1339 #define mpn_divexact_by5(dst,src,size) \
1340 (7 & 3 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 5)))
1344 #define mpn_divexact_by7(dst,src,size) \
1345 (7 & 1 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 7)))
1349 #define mpn_divexact_by9(dst,src,size) \
1350 (15 & 7 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 9)))
1354 #define mpn_divexact_by11(dst,src,size) \
1355 (15 & 5 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 11)))
1359 #define mpn_divexact_by13(dst,src,size) \
1360 (15 & 3 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 13)))
1364 #define mpn_divexact_by15(dst,src,size) \
1365 (15 & 1 * mpn_bdiv_dbm1 (dst, src, size, __GMP_CAST (mp_limb_t, GMP_NUMB_MASK / 15)))
1384 #define MPN_COPY_INCR(dst, src, n) \
1389 (dst)[__i] = (src)[__i]; \
1400 #define MPN_COPY_INCR(dst, src, size) \
1403 ASSERT (MPN_SAME_OR_INCR_P (dst, src, size)); \
1404 mpn_copyi (dst, src, size); \
1410 #define MPN_COPY_INCR(dst, src, n) \
1413 ASSERT (MPN_SAME_OR_INCR_P (dst, src, n)); \
1417 mp_ptr __dst = (dst); \
1437 #define MPN_COPY_DECR(dst, src, n) \
1442 (dst)[__i] = (src)[__i]; \
1453 #define MPN_COPY_DECR(dst, src, size) \
1456 ASSERT (MPN_SAME_OR_DECR_P (dst, src, size)); \
1457 mpn_copyd (dst, src, size); \
1463 #define MPN_COPY_DECR(dst, src, n) \
1466 ASSERT (MPN_SAME_OR_DECR_P (dst, src, n)); \
1470 mp_ptr __dst = (dst) + __n; \
1498 /* Set {dst,size} to the limbs of {src,size} in reverse order. */
1499 #define MPN_REVERSE(dst, src, size) \
1501 mp_ptr __dst = (dst); \
1506 ASSERT (! MPN_OVERLAP_P (dst, size, src, size)); \
1517 /* Zero n limbs at dst.
1537 #define MPN_ZERO(dst, n) \
1542 mp_ptr __dst = (dst) - 1; \
1552 #define MPN_ZERO(dst, n) \
1557 mp_ptr __dst = (dst); \
1963 /* Return non-zero if dst,dsize and src,ssize are either identical or
1966 #define MPN_SAME_OR_INCR2_P(dst, dsize, src, ssize) \
1967 ((dst) <= (src) || ! MPN_OVERLAP_P (dst, dsize, src, ssize))
1968 #define MPN_SAME_OR_INCR_P(dst, src, size) \
1969 MPN_SAME_OR_INCR2_P(dst, size, src, size)
1970 #define MPN_SAME_OR_DECR2_P(dst, dsize, src, ssize) \
1971 ((dst) >= (src) || ! MPN_OVERLAP_P (dst, dsize, src, ssize))
1972 #define MPN_SAME_OR_DECR_P(dst, src, size) \
1973 MPN_SAME_OR_DECR2_P(dst, size, src, size)
2868 #define MPN_DIVREM_OR_DIVEXACT_1(dst, src, size, divisor) \
2871 ASSERT_NOCARRY (mpn_divrem_1 (dst, (mp_size_t) 0, src, size, divisor)); \
2875 mpn_divexact_1 (dst, src, size, divisor); \
3069 #define BSWAP_LIMB(dst, src) \
3078 (dst) = __tmp1 | __tmp2; /* whole */ \
3090 #define BSWAP_LIMB(dst, src) \
3092 __asm__ ("bswap %0" : "=r" (dst) : "0" (src)); \
3098 #define BSWAP_LIMB(dst, src) \
3100 __asm__ ("bswap %q0" : "=r" (dst) : "0" (src)); \
3106 #define BSWAP_LIMB(dst, src) \
3108 __asm__ ("mux1 %0 = %1, @rev" : "=r" (dst) : "r" (src)); \
3115 #define BSWAP_LIMB(dst, src) \
3121 : "=d" (dst) \
3128 #define BSWAP_LIMB(dst, src) \
3129 do { (dst) = (src); } while (0)
3132 #define BSWAP_LIMB(dst, src) \
3134 (dst) = ((src) << 8) + ((src) >> 8); \
3138 #define BSWAP_LIMB(dst, src) \
3140 (dst) = \
3148 #define BSWAP_LIMB(dst, src) \
3150 (dst) = \
3164 #define BSWAP_LIMB(dst, src) \
3174 (dst) = __dst; \
3212 #define BSWAP_LIMB_STORE(dst, limb) \
3214 mp_ptr __dst = (dst); \
3224 #define BSWAP_LIMB_STORE(dst, limb) BSWAP_LIMB (*(dst), limb)
3228 /* Byte swap limbs from {src,size} and store at {dst,size}. */
3229 #define MPN_BSWAP(dst, src, size) \
3231 mp_ptr __dst = (dst); \
3236 ASSERT (MPN_SAME_OR_SEPARATE_P (dst, src, size)); \
3246 /* Byte swap limbs from {dst,size} and store in reverse order at {src,size}. */
3247 #define MPN_BSWAP_REVERSE(dst, src, size) \
3249 mp_ptr __dst = (dst); \
3254 ASSERT (! MPN_OVERLAP_P (dst, size, src, size)); \