Home | History | Annotate | Download | only in clang-include

Lines Matching defs:poly64x1_t

77 typedef __attribute__((neon_polyvector_type(1))) poly64_t poly64x1_t;
189 poly64x1_t val[2];
305 poly64x1_t val[3];
421 poly64x1_t val[4];
34085 __ai poly8x8_t vreinterpret_p8_p64(poly64x1_t __p0) {
34091 __ai poly8x8_t vreinterpret_p8_p64(poly64x1_t __p0) {
34267 __ai poly64x1_t vreinterpret_p64_p8(poly8x8_t __p0) {
34268 poly64x1_t __ret;
34269 __ret = (poly64x1_t)(__p0);
34273 __ai poly64x1_t vreinterpret_p64_p8(poly8x8_t __p0) {
34274 poly64x1_t __ret;
34275 __ret = (poly64x1_t)(__p0);
34281 __ai poly64x1_t vreinterpret_p64_p16(poly16x4_t __p0) {
34282 poly64x1_t __ret;
34283 __ret = (poly64x1_t)(__p0);
34287 __ai poly64x1_t vreinterpret_p64_p16(poly16x4_t __p0) {
34288 poly64x1_t __ret;
34289 __ret = (poly64x1_t)(__p0);
34295 __ai poly64x1_t vreinterpret_p64_u8(uint8x8_t __p0) {
34296 poly64x1_t __ret;
34297 __ret = (poly64x1_t)(__p0);
34301 __ai poly64x1_t vreinterpret_p64_u8(uint8x8_t __p0) {
34302 poly64x1_t __ret;
34303 __ret = (poly64x1_t)(__p0);
34309 __ai poly64x1_t vreinterpret_p64_u32(uint32x2_t __p0) {
34310 poly64x1_t __ret;
34311 __ret = (poly64x1_t)(__p0);
34315 __ai poly64x1_t vreinterpret_p64_u32(uint32x2_t __p0) {
34316 poly64x1_t __ret;
34317 __ret = (poly64x1_t)(__p0);
34323 __ai poly64x1_t vreinterpret_p64_u64(uint64x1_t __p0) {
34324 poly64x1_t __ret;
34325 __ret = (poly64x1_t)(__p0);
34329 __ai poly64x1_t vreinterpret_p64_u64(uint64x1_t __p0) {
34330 poly64x1_t __ret;
34331 __ret = (poly64x1_t)(__p0);
34337 __ai poly64x1_t vreinterpret_p64_u16(uint16x4_t __p0) {
34338 poly64x1_t __ret;
34339 __ret = (poly64x1_t)(__p0);
34343 __ai poly64x1_t vreinterpret_p64_u16(uint16x4_t __p0) {
34344 poly64x1_t __ret;
34345 __ret = (poly64x1_t)(__p0);
34351 __ai poly64x1_t vreinterpret_p64_s8(int8x8_t __p0) {
34352 poly64x1_t __ret;
34353 __ret = (poly64x1_t)(__p0);
34357 __ai poly64x1_t vreinterpret_p64_s8(int8x8_t __p0) {
34358 poly64x1_t __ret;
34359 __ret = (poly64x1_t)(__p0);
34365 __ai poly64x1_t vreinterpret_p64_f64(float64x1_t __p0) {
34366 poly64x1_t __ret;
34367 __ret = (poly64x1_t)(__p0);
34371 __ai poly64x1_t vreinterpret_p64_f64(float64x1_t __p0) {
34372 poly64x1_t __ret;
34373 __ret = (poly64x1_t)(__p0);
34379 __ai poly64x1_t vreinterpret_p64_f32(float32x2_t __p0) {
34380 poly64x1_t __ret;
34381 __ret = (poly64x1_t)(__p0);
34385 __ai poly64x1_t vreinterpret_p64_f32(float32x2_t __p0) {
34386 poly64x1_t __ret;
34387 __ret = (poly64x1_t)(__p0);
34393 __ai poly64x1_t vreinterpret_p64_f16(float16x4_t __p0) {
34394 poly64x1_t __ret;
34395 __ret = (poly64x1_t)(__p0);
34399 __ai poly64x1_t vreinterpret_p64_f16(float16x4_t __p0) {
34400 poly64x1_t __ret;
34401 __ret = (poly64x1_t)(__p0);
34407 __ai poly64x1_t vreinterpret_p64_s32(int32x2_t __p0) {
34408 poly64x1_t __ret;
34409 __ret = (poly64x1_t)(__p0);
34413 __ai poly64x1_t vreinterpret_p64_s32(int32x2_t __p0) {
34414 poly64x1_t __ret;
34415 __ret = (poly64x1_t)(__p0);
34421 __ai poly64x1_t vreinterpret_p64_s64(int64x1_t __p0) {
34422 poly64x1_t __ret;
34423 __ret = (poly64x1_t)(__p0);
34427 __ai poly64x1_t vreinterpret_p64_s64(int64x1_t __p0) {
34428 poly64x1_t __ret;
34429 __ret = (poly64x1_t)(__p0);
34435 __ai poly64x1_t vreinterpret_p64_s16(int16x4_t __p0) {
34436 poly64x1_t __ret;
34437 __ret = (poly64x1_t)(__p0);
34441 __ai poly64x1_t vreinterpret_p64_s16(int16x4_t __p0) {
34442 poly64x1_t __ret;
34443 __ret = (poly64x1_t)(__p0);
34463 __ai poly16x4_t vreinterpret_p16_p64(poly64x1_t __p0) {
34469 __ai poly16x4_t vreinterpret_p16_p64(poly64x1_t __p0) {
37585 __ai uint8x8_t vreinterpret_u8_p64(poly64x1_t __p0) {
37591 __ai uint8x8_t vreinterpret_u8_p64(poly64x1_t __p0) {
37767 __ai uint32x2_t vreinterpret_u32_p64(poly64x1_t __p0) {
37773 __ai uint32x2_t vreinterpret_u32_p64(poly64x1_t __p0) {
37949 __ai uint64x1_t vreinterpret_u64_p64(poly64x1_t __p0) {
37955 __ai uint64x1_t vreinterpret_u64_p64(poly64x1_t __p0) {
38131 __ai uint16x4_t vreinterpret_u16_p64(poly64x1_t __p0) {
38137 __ai uint16x4_t vreinterpret_u16_p64(poly64x1_t __p0) {
38313 __ai int8x8_t vreinterpret_s8_p64(poly64x1_t __p0) {
38319 __ai int8x8_t vreinterpret_s8_p64(poly64x1_t __p0) {
38495 __ai float64x1_t vreinterpret_f64_p64(poly64x1_t __p0) {
38501 __ai float64x1_t vreinterpret_f64_p64(poly64x1_t __p0) {
38677 __ai float32x2_t vreinterpret_f32_p64(poly64x1_t __p0) {
38683 __ai float32x2_t vreinterpret_f32_p64(poly64x1_t __p0) {
38859 __ai float16x4_t vreinterpret_f16_p64(poly64x1_t __p0) {
38865 __ai float16x4_t vreinterpret_f16_p64(poly64x1_t __p0) {
39041 __ai int32x2_t vreinterpret_s32_p64(poly64x1_t __p0) {
39047 __ai int32x2_t vreinterpret_s32_p64(poly64x1_t __p0) {
39223 __ai int64x1_t vreinterpret_s64_p64(poly64x1_t __p0) {
39229 __ai int64x1_t vreinterpret_s64_p64(poly64x1_t __p0) {
39405 __ai int16x4_t vreinterpret_s16_p64(poly64x1_t __p0) {
39411 __ai int16x4_t vreinterpret_s16_p64(poly64x1_t __p0) {
40549 __ai poly64x1_t vbsl_p64(uint64x1_t __p0, poly64x1_t __p1, poly64x1_t __p2) {
40550 poly64x1_t __ret;
40551 __ret = (poly64x1_t) __builtin_neon_vbsl_v((int8x8_t)__p0, (int8x8_t)__p1, (int8x8_t)__p2, 6);
40555 __ai poly64x1_t vbsl_p64(uint64x1_t __p0, poly64x1_t __p1, poly64x1_t __p2) {
40556 poly64x1_t __ret;
40557 __ret = (poly64x1_t) __builtin_neon_vbsl_v((int8x8_t)__p0, (int8x8_t)__p1, (int8x8_t)__p2, 6);
40849 __ai uint64x1_t vceq_p64(poly64x1_t __p0, poly64x1_t __p1) {
40855 __ai uint64x1_t vceq_p64(poly64x1_t __p0, poly64x1_t __p1) {
41045 __ai uint64x1_t vceqz_p64(poly64x1_t __p0) {
41051 __ai uint64x1_t vceqz_p64(poly64x1_t __p0) {
43009 __ai poly64x2_t vcombine_p64(poly64x1_t __p0, poly64x1_t __p1) {
43015 __ai poly64x2_t vcombine_p64(poly64x1_t __p0, poly64x1_t __p1) {
43951 __ai poly64x1_t vcreate_p64(uint64_t __p0) {
43952 poly64x1_t __ret;
43953 __ret = (poly64x1_t)(__p0);
43957 __ai poly64x1_t vcreate_p64(uint64_t __p0) {
43958 poly64x1_t __ret;
43959 __ret = (poly64x1_t)(__p0);
45126 poly64x1_t __s0 = __p0; \
45127 poly64x1_t __ret; \
45133 poly64x1_t __s0 = __p0; \
45134 poly64x1_t __ret; \
45142 poly64x1_t __s0 = __p0; \
45149 poly64x1_t __s0 = __p0; \
45451 poly64x1_t __ret; \
45459 poly64x1_t __ret; \
45931 __ai poly64x1_t vdup_n_p64(poly64_t __p0) {
45932 poly64x1_t __ret;
45933 __ret = (poly64x1_t) {__p0};
45937 __ai poly64x1_t vdup_n_p64(poly64_t __p0) {
45938 poly64x1_t __ret;
45939 __ret = (poly64x1_t) {__p0};
45990 poly64x1_t __s0 = __p0; \
45991 poly64x1_t __s1 = __p1; \
45992 poly64x1_t __ret; \
45993 __ret = (poly64x1_t) __builtin_neon_vext_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
45998 poly64x1_t __s0 = __p0; \
45999 poly64x1_t __s1 = __p1; \
46000 poly64x1_t __ret; \
46001 __ret = (poly64x1_t) __builtin_neon_vext_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
46919 __ai poly64x1_t vget_high_p64(poly64x2_t __p0) {
46920 poly64x1_t __ret;
46925 __ai poly64x1_t vget_high_p64(poly64x2_t __p0) {
46927 poly64x1_t __ret;
46931 __ai poly64x1_t __noswap_vget_high_p64(poly64x2_t __p0) {
46932 poly64x1_t __ret;
46955 poly64x1_t __s0 = __p0; \
46962 poly64x1_t __s0 = __p0; \
46968 poly64x1_t __s0 = __p0; \
47086 __ai poly64x1_t vget_low_p64(poly64x2_t __p0) {
47087 poly64x1_t __ret;
47092 __ai poly64x1_t vget_low_p64(poly64x2_t __p0) {
47094 poly64x1_t __ret;
47117 poly64x1_t __ret; \
47118 __ret = (poly64x1_t) __builtin_neon_vld1_v(__p0, 6); \
47123 poly64x1_t __ret; \
47124 __ret = (poly64x1_t) __builtin_neon_vld1_v(__p0, 6); \
47175 poly64x1_t __ret; \
47176 __ret = (poly64x1_t) __builtin_neon_vld1_dup_v(__p0, 6); \
47181 poly64x1_t __ret; \
47182 __ret = (poly64x1_t) __builtin_neon_vld1_dup_v(__p0, 6); \
47233 poly64x1_t __s1 = __p1; \
47234 poly64x1_t __ret; \
47235 __ret = (poly64x1_t) __builtin_neon_vld1_lane_v(__p0, (int8x8_t)__s1, __p2, 6); \
47240 poly64x1_t __s1 = __p1; \
47241 poly64x1_t __ret; \
47242 __ret = (poly64x1_t) __builtin_neon_vld1_lane_v(__p0, (int8x8_t)__s1, __p2, 6); \
60128 poly64x1_t __s1 = __p1; \
60129 poly64x1_t __ret; \
60130 __ret = (poly64x1_t) __builtin_neon_vset_lane_i64(__s0, (int8x8_t)__s1, __p2); \
60136 poly64x1_t __s1 = __p1; \
60137 poly64x1_t __ret; \
60138 __ret = (poly64x1_t) __builtin_neon_vset_lane_i64(__s0, (int8x8_t)__s1, __p2); \
60143 poly64x1_t __s1 = __p1; \
60144 poly64x1_t __ret; \
60145 __ret = (poly64x1_t) __builtin_neon_vset_lane_i64(__s0, (int8x8_t)__s1, __p2); \
60593 poly64x1_t __s0 = __p0; \
60594 poly64x1_t __s1 = __p1; \
60595 poly64x1_t __ret; \
60596 __ret = (poly64x1_t) __builtin_neon_vsli_n_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
60601 poly64x1_t __s0 = __p0; \
60602 poly64x1_t __s1 = __p1; \
60603 poly64x1_t __ret; \
60604 __ret = (poly64x1_t) __builtin_neon_vsli_n_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
60955 poly64x1_t __s0 = __p0; \
60956 poly64x1_t __s1 = __p1; \
60957 poly64x1_t __ret; \
60958 __ret = (poly64x1_t) __builtin_neon_vsri_n_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
60963 poly64x1_t __s0 = __p0; \
60964 poly64x1_t __s1 = __p1; \
60965 poly64x1_t __ret; \
60966 __ret = (poly64x1_t) __builtin_neon_vsri_n_v((int8x8_t)__s0, (int8x8_t)__s1, __p2, 6); \
60994 poly64x1_t __s1 = __p1; \
60999 poly64x1_t __s1 = __p1; \
61044 poly64x1_t __s1 = __p1; \
61049 poly64x1_t __s1 = __p1; \
64264 __ai uint64x1_t vtst_p64(poly64x1_t __p0, poly64x1_t __p1) {
64270 __ai uint64x1_t vtst_p64(poly64x1_t __p0, poly64x1_t __p1) {
67576 poly64x1_t __s2_250 = __p2_250; \
67584 poly64x1_t __s2_251 = __p2_251; \
67615 poly64x1_t __s0_254 = __p0_254; \
67616 poly64x1_t __s2_254 = __p2_254; \
67617 poly64x1_t __ret_254; \
67623 poly64x1_t __s0_255 = __p0_255; \
67624 poly64x1_t __s2_255 = __p2_255; \
67625 poly64x1_t __ret_255; \
67693 poly64x1_t __s0_262 = __p0_262; \
67695 poly64x1_t __ret_262; \
67701 poly64x1_t __s0_263 = __p0_263; \
67704 poly64x1_t __ret_263; \