Lines Matching full:v2i64
1396 defm : ScalToVecROLoadPat<ro64, load, i64, v2i64, LDRDroW, LDRDroX, dsub>;
1439 defm : VecROLoadPat<ro128, v2i64, LDRQroW, LDRQroX>;
1558 def : Pat <(v2i64 (scalar_to_vector (i64
1560 (INSERT_SUBREG (v2i64 (IMPLICIT_DEF)),
1595 def : Pat<(v2i64 (load (am_indexed128 GPR64sp:$Rn, uimm12s16:$offset))),
1747 def : Pat<(v2i64 (load (am_unscaled128 GPR64sp:$Rn, simm9:$offset))),
2064 defm : VecROStorePat<ro128, v2i64, FPR128, STRQroW, STRQroX>;
2097 defm : VecROStoreLane0Pat<ro64, store , v2i64, i64, dsub, STRDroW, STRDroX>;
2177 def : Pat<(store (v2i64 FPR128:$Rt),
2272 def : Pat<(store (v2i64 FPR128:$Rt),
2368 def : Pat<(pre_store (v2i64 FPR128:$Rt), GPR64sp:$addr, simm9:$off),
2422 def : Pat<(post_store (v2i64 FPR128:$Rt), GPR64sp:$addr, simm9:$off),
2734 def : Pat<(xor (v2i64 (AArch64vashr v2i64:$src, (i32 63))),
2735 (v2i64 (add (sub (zext (v2i32 V64:$opA)),
2737 (AArch64vashr v2i64:$src, (i32 63))))),
2739 def : Pat<(xor (v2i64 (AArch64vashr v2i64:$src, (i32 63))),
2740 (v2i64 (add (sub (zext (extract_high_v4i32 V128:$opA)),
2742 (AArch64vashr v2i64:$src, (i32 63))))),
2764 def : Pat<(xor (v2i64 (AArch64vashr V128:$src, (i32 63))),
2765 (v2i64 (add V128:$src, (AArch64vashr V128:$src, (i32 63))))),
2826 def : Pat<(v2i64 (int_aarch64_neon_fcvtzs v2f64:$Rn)), (FCVTZSv2f64 $Rn)>;
2832 def : Pat<(v2i64 (int_aarch64_neon_fcvtzu v2f64:$Rn)), (FCVTZUv2f64 $Rn)>;
2860 def : Pat<(AArch64neg (v2i64 V128:$Rn)), (NEGv2i64 V128:$Rn)>;
2869 def : Pat<(AArch64not (v2i64 V128:$Rn)), (NOTv16i8 V128:$Rn)>;
2875 def : Pat<(vnot (v2i64 V128:$Rn)), (NOTv16i8 V128:$Rn)>;
2921 def : Pat<(AArch64vshl (v2i64 (ext (v2i32 V64:$Rn))), (i32 32)),
2923 def : Pat<(AArch64vshl (v2i64 (ext (extract_high_v4i32 V128:$Rn))), (i32 32)),
3058 def : Pat<(AArch64bsl (v2i64 V128:$Rd), V128:$Rn, V128:$Rm),
3584 def : Pat<(v2i64 (opnode (v2i32 V64:$Rn), (v2i32 V64:$Rm))),
3600 def : Pat<(v2i64 (opnode (v2i64 V128:$Rd), (v2i32 V64:$Rn), (v2i32 V64:$Rm))),
3620 def : Pat<(int_aarch64_neon_pmull64 (extractelt (v2i64 V128:$Rn), (i64 1)),
3621 (extractelt (v2i64 V128:$Rm), (i64 1))),
3633 def : Pat<(v2i32 (trunc (v2i64 (AArch64vlshr (add V128:$Rn, V128:$Rm),
3647 (trunc (v2i64 (AArch64vlshr (add V128:$Rn, V128:$Rm),
3658 def : Pat<(v2i32 (trunc (v2i64 (AArch64vlshr (sub V128:$Rn, V128:$Rm),
3672 (trunc (v2i64 (AArch64vlshr (sub V128:$Rn, V128:$Rm),
3695 def : Pat<(v2i64 (AArch64ext V128:$Rn, V128:$Rm, (i32 imm:$imm))),
3769 def : Pat<(v2i64 (AArch64saddv V128:$Rn)),
3770 (INSERT_SUBREG (v2i64 (IMPLICIT_DEF)), (ADDPv2i64p V128:$Rn), dsub)>;
3771 def : Pat<(v2i64 (AArch64uaddv V128:$Rn)),
3772 (INSERT_SUBREG (v2i64 (IMPLICIT_DEF)), (ADDPv2i64p V128:$Rn), dsub)>;
3806 def DUPv2i64gpr : SIMDDupFromMain<1, {?,1,0,0,0}, ".2d", v2i64, V128, GPR64>;
3884 def : Pat<(ResVT (AArch64dup (i32 (trunc (extractelt (v2i64 V128:$Rn),
3958 def : Pat<(v2i64 (scalar_to_vector (i64 FPR64:$Rn))),
3959 (v2i64 (INSERT_SUBREG (v2i64 (IMPLICIT_DEF)),
4035 def : Pat<(v2i64 (int_aarch64_neon_vcopy_lane
4036 (v2i64 V128:$Vd), VectorIndexD:$idx, (v2i64 V128:$Vs),
4038 (v2i64 (INSvi64lane
4101 def : ConcatPat<v2i64, v1i64>;
4114 def : ConcatUndefPat<v2i64, v1i64>;
4429 [(set (v2i64 V128:$Rd), (AArch64movi_edit imm0_255:$imm8))]>;
4431 def : Pat<(v2i64 immAllZerosV), (MOVIv2d_ns (i32 0))>;
4436 def : Pat<(v2i64 immAllOnesV), (MOVIv2d_ns (i32 255))>;
4813 def : Pat<(v2i32 (trunc (AArch64vlshr (v2i64 V128:$Rn), vecshiftR64Narrow:$imm))),
4827 (trunc (AArch64vlshr (v2i64 V128:$Rn),
4840 def : Pat<(v2i64 (sext (v2i32 V64:$Rn))), (SSHLLv2i32_shift V64:$Rn, (i32 0))>;
4841 def : Pat<(v2i64 (zext (v2i32 V64:$Rn))), (USHLLv2i32_shift V64:$Rn, (i32 0))>;
4842 def : Pat<(v2i64 (anyext (v2i32 V64:$Rn))), (USHLLv2i32_shift V64:$Rn, (i32 0))>;
4856 def : Pat<(v2i64 (anyext (v2i32 (extract_subvector V128:$Rn, (i64 2)) ))),
4858 def : Pat<(v2i64 (zext (v2i32 (extract_subvector V128:$Rn, (i64 2)) ))),
4860 def : Pat<(v2i64 (sext (v2i32 (extract_subvector V128:$Rn, (i64 2)) ))),
5053 def : Ld1Pat<v2i64, LD1Onev2d>;
5066 def : St1Pat<v2i64, ST1Onev2d>;
5111 def : Pat<(v2i64 (AArch64dup (i64 (load GPR64sp:$Rn)))),
5139 def : Ld1Lane128Pat<load, VectorIndexD, v2i64, i64, LD1i64>;
5182 def : St1Lane128Pat<store, VectorIndexD, v2i64, i64, ST1i64>;
5246 defm : St1LanePost128Pat<post_store, VectorIndexD, v2i64, i64, ST1i64_POST, 8>;
5467 def : Pat<(v2i64 (AArch64NvCast (v4i32 FPR128:$src))), (v2i64 FPR128:$src)>;
5474 def : Pat<(v2i64 (AArch64NvCast (v8i16 FPR128:$src))), (v2i64 FPR128:$src)>;
5482 def : Pat<(v2i64 (AArch64NvCast (v16i8 FPR128:$src))), (v2i64 FPR128:$src)>;
5486 def : Pat<(v16i8 (AArch64NvCast (v2i64 FPR128:$src))), (v16i8 FPR128:$src)>;
5487 def : Pat<(v8i16 (AArch64NvCast (v2i64 FPR128:$src))), (v8i16 FPR128:$src)>;
5488 def : Pat<(v8f16 (AArch64NvCast (v2i64 FPR128:$src))), (v8f16 FPR128:$src)>;
5489 def : Pat<(v4i32 (AArch64NvCast (v2i64 FPR128:$src))), (v4i32 FPR128:$src)>;
5490 def : Pat<(v2i64 (AArch64NvCast (v2i64 FPR128:$src))), (v2i64 FPR128:$src)>;
5491 def : Pat<(v4f32 (AArch64NvCast (v2i64 FPR128:$src))), (v4f32 FPR128:$src)>;
5492 def : Pat<(v2f64 (AArch64NvCast (v2i64 FPR128:$src))), (v2f64 FPR128:$src)>;
5498 def : Pat<(v2i64 (AArch64NvCast (v4f32 FPR128:$src))), (v2i64 FPR128:$src)>;
5505 def : Pat<(v2i64 (AArch64NvCast (v2f64 FPR128:$src))), (v2i64 FPR128:$src)>;
5769 def : Pat<(f128 (bitconvert (v2i64 FPR128:$src))), (f128 FPR128:$src)>;
5778 def : Pat<(f128 (bitconvert (v2i64 FPR128:$src))),
5822 def : Pat<(v2f64 (bitconvert (v2i64 FPR128:$src))), (v2f64 FPR128:$src)>;
5829 def : Pat<(v4f32 (bitconvert (v2i64 FPR128:$src))), (v4f32 FPR128:$src)>;
5842 def : Pat<(v4f32 (bitconvert (v2i64 FPR128:$src))),
5850 def : Pat<(v2i64 (bitconvert (f128 FPR128:$src))), (v2i64 FPR128:$src)>;
5851 def : Pat<(v2i64 (bitconvert (v4i32 FPR128:$src))), (v2i64 FPR128:$src)>;
5852 def : Pat<(v2i64 (bitconvert (v8i16 FPR128:$src))), (v2i64 FPR128:$src)>;
5853 def : Pat<(v2i64 (bitconvert (v16i8 FPR128:$src))), (v2i64 FPR128:$src)>;
5854 def : Pat<(v2i64 (bitconvert (v4f32 FPR128:$src))), (v2i64 FPR128:$src)>;
5855 def : Pat<(v2i64 (bitconvert (v8f16 FPR128:$src))), (v2i64 FPR128:$src)>;
5858 def : Pat<(v2i64 (bitconvert (f128 FPR128:$src))),
5859 (v2i64 (EXTv16i8 FPR128:$src,
5861 def : Pat<(v2i64 (bitconvert (v4i32 FPR128:$src))),
5862 (v2i64 (REV64v4i32 FPR128:$src))>;
5863 def : Pat<(v2i64 (bitconvert (v8i16 FPR128:$src))),
5864 (v2i64 (REV64v8i16 FPR128:$src))>;
5865 def : Pat<(v2i64 (bitconvert (v16i8 FPR128:$src))),
5866 (v2i64 (REV64v16i8 FPR128:$src))>;
5867 def : Pat<(v2i64 (bitconvert (v4f32 FPR128:$src))),
5868 (v2i64 (REV64v4i32 FPR128:$src))>;
5869 def : Pat<(v2i64 (bitconvert (v8f16 FPR128:$src))),
5870 (v2i64 (REV64v8i16 FPR128:$src))>;
5872 def : Pat<(v2i64 (bitconvert (v2f64 FPR128:$src))), (v2i64 FPR128:$src)>;
5876 def : Pat<(v4i32 (bitconvert (v2i64 FPR128:$src))), (v4i32 FPR128:$src)>;
5887 def : Pat<(v4i32 (bitconvert (v2i64 FPR128:$src))),
5902 def : Pat<(v8i16 (bitconvert (v2i64 FPR128:$src))), (v8i16 FPR128:$src)>;
5914 def : Pat<(v8i16 (bitconvert (v2i64 FPR128:$src))),
5930 def : Pat<(v8f16 (bitconvert (v2i64 FPR128:$src))), (v8f16 FPR128:$src)>;
5942 def : Pat<(v8f16 (bitconvert (v2i64 FPR128:$src))),
5958 def : Pat<(v16i8 (bitconvert (v2i64 FPR128:$src))), (v16i8 FPR128:$src)>;
5970 def : Pat<(v16i8 (bitconvert (v2i64 FPR128:$src))),
6005 def : Pat<(v1i64 (extract_subvector (v2i64 FPR128:$Rn), (i64 1))),
6011 (INSERT_SUBREG (v2i64 (IMPLICIT_DEF)), FPR64:$src, dsub)>;
6025 // Use pair-wise add instructions when summing up the lanes for v2f64, v2i64
6027 def : Pat<(i64 (add (vector_extract (v2i64 FPR128:$Rn), (i64 0)),
6028 (vector_extract (v2i64 FPR128:$Rn), (i64 1)))),
6029 (i64 (ADDPv2i64p (v2i64 FPR128:$Rn)))>;
6062 def : NTStore128Pat<v2i64>;