Lines Matching refs:MVT
139 unsigned FastEmitInst_extractsubreg(MVT RetVT,
179 bool isTypeLegal(Type *Ty, MVT &VT);
180 bool isLoadTypeLegal(Type *Ty, MVT &VT);
183 bool ARMEmitLoad(MVT VT, unsigned &ResultReg, Address &Addr,
186 bool ARMEmitStore(MVT VT, unsigned SrcReg, Address &Addr,
189 void ARMSimplifyAddress(Address &Addr, MVT VT, bool useAM3);
193 unsigned ARMEmitIntExt(MVT SrcVT, unsigned SrcReg, MVT DestVT, bool isZExt);
194 unsigned ARMMaterializeFP(const ConstantFP *CFP, MVT VT);
195 unsigned ARMMaterializeInt(const Constant *C, MVT VT);
196 unsigned ARMMaterializeGV(const GlobalValue *GV, MVT VT);
197 unsigned ARMMoveToFPReg(MVT VT, unsigned SrcReg);
198 unsigned ARMMoveToIntReg(MVT VT, unsigned SrcReg);
200 unsigned ARMLowerPICELF(const GlobalValue *GV, unsigned Align, MVT VT);
209 SmallVectorImpl<MVT> &ArgVTs,
216 bool FinishCall(MVT RetVT, SmallVectorImpl<unsigned> &UsedRegs,
226 void AddLoadStoreOperands(MVT VT, Address &Addr,
477 unsigned ARMFastISel::FastEmitInst_extractsubreg(MVT RetVT,
492 unsigned ARMFastISel::ARMMoveToFPReg(MVT VT, unsigned SrcReg) {
493 if (VT == MVT::f64) return 0;
502 unsigned ARMFastISel::ARMMoveToIntReg(MVT VT, unsigned SrcReg) {
503 if (VT == MVT::i64) return 0;
515 unsigned ARMFastISel::ARMMaterializeFP(const ConstantFP *CFP, MVT VT) {
517 bool is64bit = VT == MVT::f64;
559 unsigned ARMFastISel::ARMMaterializeInt(const Constant *C, MVT VT) {
561 if (VT != MVT::i32 && VT != MVT::i16 && VT != MVT::i8 && VT != MVT::i1)
579 if (VT == MVT::i32 && Subtarget->hasV6T2Ops() && CI->isNegative()) {
585 unsigned ImmReg = createResultReg(TLI.getRegClassFor(MVT::i32));
594 if (VT != MVT::i32)
621 unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {
623 if (VT != MVT::i32) return 0;
733 MVT VT = CEVT.getSimpleVT();
751 MVT VT;
773 bool ARMFastISel::isTypeLegal(Type *Ty, MVT &VT) {
777 if (evt == MVT::Other || !evt.isSimple()) return false;
785 bool ARMFastISel::isLoadTypeLegal(Type *Ty, MVT &VT) {
790 if (VT == MVT::i1 || VT == MVT::i8 || VT == MVT::i16)
906 void ARMFastISel::ARMSimplifyAddress(Address &Addr, MVT VT, bool useAM3) {
910 case MVT::i1:
911 case MVT::i8:
912 case MVT::i16:
913 case MVT::i32:
926 case MVT::f32:
927 case MVT::f64:
953 Addr.Base.Reg = FastEmit_ri_(MVT::i32, ISD::ADD, Addr.Base.Reg,
954 /*Op0IsKill*/false, Addr.Offset, MVT::i32);
959 void ARMFastISel::AddLoadStoreOperands(MVT VT, Address &Addr,
964 if (VT.SimpleTy == MVT::f32 || VT.SimpleTy == MVT::f64)
1007 bool ARMFastISel::ARMEmitLoad(MVT VT, unsigned &ResultReg, Address &Addr,
1016 case MVT::i1:
1017 case MVT::i8:
1033 case MVT::i16:
1048 case MVT::i32:
1062 case MVT::f32:
1067 VT = MVT::i32;
1075 case MVT::f64:
1100 unsigned MoveReg = createResultReg(TLI.getRegClassFor(MVT::f32));
1115 MVT VT;
1130 bool ARMFastISel::ARMEmitStore(MVT VT, unsigned SrcReg, Address &Addr,
1137 case MVT::i1: {
1147 case MVT::i8:
1157 case MVT::i16:
1171 case MVT::i32:
1184 case MVT::f32:
1188 unsigned MoveReg = createResultReg(TLI.getRegClassFor(MVT::i32));
1193 VT = MVT::i32;
1199 case MVT::f64:
1229 MVT VT;
1331 MVT SourceVT;
1410 MVT SrcVT = SrcEVT.getSimpleVT();
1424 if (SrcVT == MVT::i32 || SrcVT == MVT::i16 || SrcVT == MVT::i8 ||
1425 SrcVT == MVT::i1) {
1439 if (SrcVT == MVT::f32 || SrcVT == MVT::f64)
1450 case MVT::f32:
1454 case MVT::f64:
1458 case MVT::i1:
1459 case MVT::i8:
1460 case MVT::i16:
1463 case MVT::i32:
1489 SrcReg1 = ARMEmitIntExt(SrcVT, SrcReg1, MVT::i32, isZExt);
1492 SrcReg2 = ARMEmitIntExt(SrcVT, SrcReg2, MVT::i32, isZExt);
1593 MVT DstVT;
1602 MVT SrcVT = SrcEVT.getSimpleVT();
1603 if (SrcVT != MVT::i32 && SrcVT != MVT::i16 && SrcVT != MVT::i8)
1610 if (SrcVT == MVT::i16 || SrcVT == MVT::i8) {
1611 SrcReg = ARMEmitIntExt(SrcVT, SrcReg, MVT::i32,
1618 unsigned FP = ARMMoveToFPReg(MVT::f32, SrcReg);
1638 MVT DstVT;
1653 unsigned ResultReg = createResultReg(TLI.getRegClassFor(MVT::f32));
1668 MVT VT;
1673 if (VT != MVT::i32) return false;
1685 assert (VT == MVT::i32 && "Expecting an i32.");
1729 MVT VT;
1741 if (VT == MVT::i8)
1743 else if (VT == MVT::i16)
1745 else if (VT == MVT::i32)
1747 else if (VT == MVT::i64)
1749 else if (VT == MVT::i128)
1757 MVT VT;
1763 if (VT == MVT::i8)
1765 else if (VT == MVT::i16)
1767 else if (VT == MVT::i32)
1769 else if (VT == MVT::i64)
1771 else if (VT == MVT::i128)
1783 if (DestVT != MVT::i16 && DestVT != MVT::i8 && DestVT != MVT::i1)
1819 MVT VT = FPVT.getSimpleVT();
1831 bool is64bit = VT == MVT::f64 || VT == MVT::i64;
1905 SmallVectorImpl<MVT> &ArgVTs,
1920 MVT ArgVT = ArgVTs[VA.getValNo()];
1931 if (VA.getLocVT() != MVT::f64 ||
1939 case MVT::i1:
1940 case MVT::i8:
1941 case MVT::i16:
1942 case MVT::i32:
1944 case MVT::f32:
1948 case MVT::f64:
1971 MVT ArgVT = ArgVTs[VA.getValNo()];
1980 MVT DestVT = VA.getLocVT();
1989 MVT DestVT = VA.getLocVT();
2014 assert(VA.getLocVT() == MVT::f64 &&
2044 bool ARMFastISel::FinishCall(MVT RetVT, SmallVectorImpl<unsigned> &UsedRegs,
2054 if (RetVT != MVT::isVoid) {
2060 if (RVLocs.size() == 2 && RetVT == MVT::f64) {
2063 MVT DestVT = RVLocs[0].getValVT();
2078 MVT CopyVT = RVLocs[0].getValVT();
2081 if (RetVT == MVT::i1 || RetVT == MVT::i8 || RetVT == MVT::i16)
2082 CopyVT = MVT::i32;
2141 MVT RVVT = RVEVT.getSimpleVT();
2142 MVT DestVT = VA.getValVT();
2145 if (RVVT != MVT::i1 && RVVT != MVT::i8 && RVVT != MVT::i16)
2148 assert(DestVT == MVT::i32 && "ARM should always ext to i32");
2211 MVT RetVT;
2213 RetVT = MVT::isVoid;
2218 if (RetVT != MVT::isVoid && RetVT != MVT::i32) {
2222 if (RVLocs.size() >= 2 && RetVT != MVT::f64)
2229 SmallVector<MVT, 8> ArgVTs;
2241 MVT ArgVT;
2320 MVT RetVT;
2322 RetVT = MVT::isVoid;
2323 else if (!isTypeLegal(RetTy, RetVT) && RetVT != MVT::i16 &&
2324 RetVT != MVT::i8 && RetVT != MVT::i1)
2328 if (RetVT != MVT::isVoid && RetVT != MVT::i1 && RetVT != MVT::i8 &&
2329 RetVT != MVT::i16 && RetVT != MVT::i32) {
2333 if (RVLocs.size() >= 2 && RetVT != MVT::f64)
2340 SmallVector<MVT, 8> ArgVTs;
2369 MVT ArgVT;
2370 if (!isTypeLegal(ArgTy, ArgVT) && ArgVT != MVT::i16 && ArgVT != MVT::i8 &&
2371 ArgVT != MVT::i1)
2453 MVT VT;
2456 VT = MVT::i32;
2458 VT = MVT::i16;
2461 VT = MVT::i8;
2466 VT = MVT::i16;
2468 VT = MVT::i8;
2594 if (SrcVT != MVT::i32 && SrcVT != MVT::i16 && SrcVT != MVT::i8)
2596 if (DestVT != MVT::i16 && DestVT != MVT::i8 && DestVT != MVT::i1)
2608 unsigned ARMFastISel::ARMEmitIntExt(MVT SrcVT, unsigned SrcReg, MVT DestVT,
2610 if (DestVT != MVT::i32 && DestVT != MVT::i16 && DestVT != MVT::i8)
2612 if (SrcVT != MVT::i16 && SrcVT != MVT::i8 && SrcVT != MVT::i1)
2761 MVT SrcVT = SrcEVT.getSimpleVT();
2762 MVT DestVT = DestEVT.getSimpleVT();
2778 if (DestVT != MVT::i32)
2906 { { ARM::SXTH, ARM::t2SXTH }, 0, 0, MVT::i16 },
2907 { { ARM::UXTH, ARM::t2UXTH }, 0, 1, MVT::i16 },
2908 { { ARM::ANDri, ARM::t2ANDri }, 255, 1, MVT::i8 },
2909 { { ARM::SXTB, ARM::t2SXTB }, 0, 0, MVT::i8 },
2910 { { ARM::UXTB, ARM::t2UXTB }, 0, 1, MVT::i8 }
2921 MVT VT;
2939 MVT((MVT::SimpleValueType)FoldableLoadExtends[i].ExpectedVT) == VT) {
2958 unsigned Align, MVT VT) {
3038 case MVT::i8:
3039 case MVT::i16:
3040 case MVT::i32:
3052 const TargetRegisterClass *RC = TLI.getRegClassFor(MVT::i32);