Lines Matching defs:poly64x2_t

60 typedef __Poly64x2_t poly64x2_t;
256 poly64x2_t val[2];
261 poly64x2_t val[3];
266 poly64x2_t val[4];
3297 vgetq_lane_p64 (poly64x2_t __a, const int __b)
3537 vreinterpretq_p8_p64 (poly64x2_t __a)
3726 vreinterpretq_p16_p64 (poly64x2_t __a)
3829 __extension__ extern __inline poly64x2_t
3833 return (poly64x2_t) __a;
3836 __extension__ extern __inline poly64x2_t
3840 return (poly64x2_t) __a;
3843 __extension__ extern __inline poly64x2_t
3847 return (poly64x2_t) __a;
3850 __extension__ extern __inline poly64x2_t
3854 return (poly64x2_t) __a;
3857 __extension__ extern __inline poly64x2_t
3861 return (poly64x2_t) __a;
3864 __extension__ extern __inline poly64x2_t
3868 return (poly64x2_t) __a;
3871 __extension__ extern __inline poly64x2_t
3875 return (poly64x2_t) __a;
3878 __extension__ extern __inline poly64x2_t
3882 return (poly64x2_t)__a;
3885 __extension__ extern __inline poly64x2_t
3889 return (poly64x2_t) __a;
3892 __extension__ extern __inline poly64x2_t
3896 return (poly64x2_t) __a;
3899 __extension__ extern __inline poly64x2_t
3903 return (poly64x2_t)__a;
3906 __extension__ extern __inline poly64x2_t
3910 return (poly64x2_t) __a;
3913 __extension__ extern __inline poly64x2_t
3917 return (poly64x2_t) __a;
3920 __extension__ extern __inline poly64x2_t
3924 return (poly64x2_t) __a;
3957 vreinterpretq_p128_p64 (poly64x2_t __a)
4202 vreinterpretq_f16_p64 (poly64x2_t __a)
4384 vreinterpretq_f32_p64 (poly64x2_t __a)
4518 vreinterpretq_f64_p64 (poly64x2_t __a)
4756 vreinterpretq_s64_p64 (poly64x2_t __a)
4945 vreinterpretq_u64_p64 (poly64x2_t __a)
5134 vreinterpretq_s8_p64 (poly64x2_t __a)
5323 vreinterpretq_s16_p64 (poly64x2_t __a)
5512 vreinterpretq_s32_p64 (poly64x2_t __a)
5701 vreinterpretq_u8_p64 (poly64x2_t __a)
5890 vreinterpretq_u16_p64 (poly64x2_t __a)
6079 vreinterpretq_u32_p64 (poly64x2_t __a)
6242 __extension__ extern __inline poly64x2_t
6244 vsetq_lane_p64 (poly64_t __elem, poly64x2_t __vec, const int __index)
6347 vget_low_p64 (poly64x2_t __a)
6452 vget_high_p64 (poly64x2_t __a)
6611 __extension__ extern __inline poly64x2_t
6615 return (poly64x2_t) __builtin_aarch64_combinedi_ppp (__a[0], __b[0]);
10734 poly64x2_t b_ = (b); \
10735 poly64x2_t a_ = (a); \
10736 poly64x2_t result; \
10785 vtstq_p64 (poly64x2_t __a, poly64x2_t __b)
10911 poly64x2_t)
10995 poly64x2_t)
11084 poly64x2_t)
12006 __extension__ extern __inline poly64x2_t
12008 vbslq_p64 (uint64x2_t __a, poly64x2_t __b, poly64x2_t __c)
12686 vceqq_p64 (poly64x2_t __a, poly64x2_t __b)
12886 vceqzq_p64 (poly64x2_t __a)
14421 poly64x2_t __b, const int __lane2)
14537 __extension__ extern __inline poly64x2_t
14539 vcopyq_lane_p64 (poly64x2_t __a, const int __lane1,
14656 __extension__ extern __inline poly64x2_t
14658 vcopyq_laneq_p64 (poly64x2_t __a, const int __lane1,
14659 poly64x2_t __b, const int __lane2)
15628 __extension__ extern __inline poly64x2_t
15632 return (poly64x2_t) {__a, __a};
15832 vdup_laneq_p64 (poly64x2_t __a, const int __b)
15930 __extension__ extern __inline poly64x2_t
16030 __extension__ extern __inline poly64x2_t
16032 vdupq_laneq_p64 (poly64x2_t __a, const int __b)
16530 __extension__ extern __inline poly64x2_t
16532 vextq_p64 (poly64x2_t __a, poly64x2_t __b, __const int __c)
17465 __i.val[0] = (poly64x2_t) __builtin_aarch64_get_qregciv2di (__o, 0);
17466 __i.val[1] = (poly64x2_t) __builtin_aarch64_get_qregciv2di (__o, 1);
17467 __i.val[2] = (poly64x2_t) __builtin_aarch64_get_qregciv2di (__o, 2);
17510 __extension__ extern __inline poly64x2_t
17514 return (poly64x2_t)
17885 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di (__o, 0);
17886 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di (__o, 1);
18333 __extension__ extern __inline poly64x2_t
18533 __extension__ extern __inline poly64x2_t
18535 vld1q_lane_p64 (const poly64_t *__src, poly64x2_t __vec, const int __lane)
18821 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di_pss (__o, 0);
18822 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di_pss (__o, 1);
19292 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 0);
19293 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 1);
19294 ret.val[2] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 2);
19683 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 0);
19684 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 1);
19685 ret.val[2] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 2);
19686 ret.val[3] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 3);
20031 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di_pss (__o, 0);
20032 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregoiv2di_pss (__o, 1);
20394 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 0);
20395 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 1);
20396 ret.val[2] = (poly64x2_t) __builtin_aarch64_get_qregciv2di_pss (__o, 2);
20785 ret.val[0] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 0);
20786 ret.val[1] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 1);
20787 ret.val[2] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 2);
20788 ret.val[3] = (poly64x2_t) __builtin_aarch64_get_qregxiv2di_pss (__o, 3);
20830 v2di_ssps, di, p64, poly64x2_t)
20871 __LD2Q_LANE_FUNC (poly64x2x2_t, poly64x2_t, poly64_t, v2di, di, p64)
20925 v2di_ssps, di, p64, poly64x2_t)
20968 __LD3Q_LANE_FUNC (poly64x2x3_t, poly64x2_t, poly64_t, v2di, di, p64)
21030 v2di_ssps, di, p64, poly64x2_t)
21075 __LD4Q_LANE_FUNC (poly64x2x4_t, poly64x2_t, poly64_t, v2di, di, p64)
22638 __extension__ extern __inline poly64x2_t
26731 vmull_high_p64 (poly64x2_t __a, poly64x2_t __b)
27325 __extension__ extern __inline poly64x2_t
27327 vsliq_n_p64 (poly64x2_t __a, poly64x2_t __b, const int __c)
27863 vst1q_p64 (poly64_t *__a, poly64x2_t __b)
27866 (poly64x2_t) __b);
28068 vst1q_lane_p64 (poly64_t *__a, poly64x2_t __b, const int __lane)
28325 (poly64x2_t) __temp.val[0], 0);
28327 (poly64x2_t) __temp.val[1], 1);
28467 (poly64x2_t) __val.val[0], 0);
28469 (poly64x2_t) __val.val[1], 1);
28680 (poly64x2_t) __temp.val[0], 0);
28682 (poly64x2_t) __temp.val[1], 1);
28684 (poly64x2_t) __temp.val[2], 2);
28837 (poly64x2_t) __val.val[0], 0);
28839 (poly64x2_t) __val.val[1], 1);
28841 (poly64x2_t) __val.val[2], 2);
29251 (poly64x2_t) __temp.val[0], 0);
29253 (poly64x2_t) __temp.val[1], 1);
29393 (poly64x2_t) __val.val[0], 0);
29395 (poly64x2_t) __val.val[1], 1);
29604 (poly64x2_t) __temp.val[0], 0);
29606 (poly64x2_t) __temp.val[1], 1);
29608 (poly64x2_t) __temp.val[2], 2);
29761 (poly64x2_t) __val.val[0], 0);
29763 (poly64x2_t) __val.val[1], 1);
29765 (poly64x2_t) __val.val[2], 2);
30001 (poly64x2_t) __temp.val[0], 0);
30003 (poly64x2_t) __temp.val[1], 1);
30005 (poly64x2_t) __temp.val[2], 2);
30007 (poly64x2_t) __temp.val[3], 3);
30173 (poly64x2_t) __val.val[0], 0);
30175 (poly64x2_t) __val.val[1], 1);
30177 (poly64x2_t) __val.val[2], 2);
30179 (poly64x2_t) __val.val[3], 3);
30577 __extension__ extern __inline poly64x2_t
30579 vtrn1q_p64 (poly64x2_t __a, poly64x2_t __b)
30582 return __builtin_shuffle (__a, __b, (poly64x2_t) {3, 1});
30584 return __builtin_shuffle (__a, __b, (poly64x2_t) {0, 2});
30859 __extension__ extern __inline poly64x2_t
30861 vtrn2q_p64 (poly64x2_t __a, poly64x2_t __b)
30864 return __builtin_shuffle (__a, __b, (poly64x2_t) {2, 0});
30866 return __builtin_shuffle (__a, __b, (poly64x2_t) {1, 3});
31516 __extension__ extern __inline poly64x2_t
31518 vuzp1q_p64 (poly64x2_t __a, poly64x2_t __b)
31521 return __builtin_shuffle (__a, __b, (poly64x2_t) {3, 1});
31523 return __builtin_shuffle (__a, __b, (poly64x2_t) {0, 2});
31786 __extension__ extern __inline poly64x2_t
31788 vuzp2q_p64 (poly64x2_t __a, poly64x2_t __b)
31791 return __builtin_shuffle (__a, __b, (poly64x2_t) {2, 0});
31793 return __builtin_shuffle (__a, __b, (poly64x2_t) {1, 3});
32065 __extension__ extern __inline poly64x2_t
32067 vzip1q_p64 (poly64x2_t __a, poly64x2_t __b)
32070 return __builtin_shuffle (__a, __b, (poly64x2_t) {3, 1});
32072 return __builtin_shuffle (__a, __b, (poly64x2_t) {0, 2});
32340 __extension__ extern __inline poly64x2_t
32342 vzip2q_p64 (poly64x2_t __a, poly64x2_t __b)
32345 return __builtin_shuffle (__a, __b, (poly64x2_t) {2, 0});
32347 return __builtin_shuffle (__a, __b, (poly64x2_t) {1, 3});
35347 vreinterpretq_bf16_p64 (poly64x2_t __a)
35569 __extension__ extern __inline poly64x2_t
35573 return (poly64x2_t)__a;
35938 __extension__ extern __inline poly64x2_t
35940 vaddq_p64 (poly64x2_t __a, poly64x2_t __b)