Lines Matching refs:AArch64
1 //===-- AArch64ISelLowering.cpp - AArch64 DAG Lowering Implementation -----===//
10 // This file defines the interfaces that AArch64 uses to lower LLVM code into a
15 #define DEBUG_TYPE "aarch64-isel"
16 #include "AArch64.h"
53 addRegisterClass(MVT::i32, &AArch64::GPR32RegClass);
54 addRegisterClass(MVT::i64, &AArch64::GPR64RegClass);
55 addRegisterClass(MVT::f16, &AArch64::FPR16RegClass);
56 addRegisterClass(MVT::f32, &AArch64::FPR32RegClass);
57 addRegisterClass(MVT::f64, &AArch64::FPR64RegClass);
58 addRegisterClass(MVT::f128, &AArch64::FPR128RegClass);
63 // instructions on AArch64. It's marginally simpler to let LLVM expand
75 // AArch64 does not have i1 loads, or much of anything for i1 really.
80 setStackPointerRegisterToSaveRestore(AArch64::XSP);
266 setExceptionPointerRegister(AArch64::X0);
267 setExceptionSelectorRegister(AArch64::X1);
283 ldrOpc = AArch64::LDXR_byte;
284 strOpc = AArch64::STXR_byte;
287 ldrOpc = AArch64::LDXR_hword;
288 strOpc = AArch64::STXR_hword;
291 ldrOpc = AArch64::LDXR_word;
292 strOpc = AArch64::STXR_word;
295 ldrOpc = AArch64::LDXR_dword;
296 strOpc = AArch64::STXR_dword;
335 = Size == 8 ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
356 if (BinOpcode == AArch64::BICwww_lsl || BinOpcode == AArch64::BICxxx_lsl)
365 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
366 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
369 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
406 TRC = &AArch64::GPR64RegClass;
407 TRCsp = &AArch64::GPR64xspRegClass;
409 TRC = &AArch64::GPR32RegClass;
410 TRCsp = &AArch64::GPR32wspRegClass;
450 BuildMI(BB, dl, TII->get(Size == 8 ? AArch64::CSELxxxc : AArch64::CSELwwwc),
454 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
455 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
459 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
487 TRCsp = Size == 8 ? &AArch64::GPR64xspRegClass : &AArch64::GPR32wspRegClass;
522 unsigned CmpOp = Size == 8 ? AArch64::CMPxx_lsl : AArch64::CMPww_lsl;
526 BuildMI(BB, dl, TII->get(AArch64::Bcc))
535 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
536 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
539 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
610 BuildMI(MBB, DL, TII->get(AArch64::LSFP128_STR))
614 BuildMI(MBB, DL, TII->get(AArch64::Bcc))
617 BuildMI(MBB, DL, TII->get(AArch64::Bimm))
624 BuildMI(TrueBB, DL, TII->get(AArch64::LSFP128_STR))
637 EndBB->addLiveIn(AArch64::NZCV);
639 BuildMI(*EndBB, StartOfEnd, DL, TII->get(AArch64::LSFP128_LDR), DestReg)
652 case AArch64::F128CSEL:
654 case AArch64::ATOMIC_LOAD_ADD_I8:
655 return emitAtomicBinary(MI, MBB, 1, AArch64::ADDwww_lsl);
656 case AArch64::ATOMIC_LOAD_ADD_I16:
657 return emitAtomicBinary(MI, MBB, 2, AArch64::ADDwww_lsl);
658 case AArch64::ATOMIC_LOAD_ADD_I32:
659 return emitAtomicBinary(MI, MBB, 4, AArch64::ADDwww_lsl);
660 case AArch64::ATOMIC_LOAD_ADD_I64:
661 return emitAtomicBinary(MI, MBB, 8, AArch64::ADDxxx_lsl);
663 case AArch64::ATOMIC_LOAD_SUB_I8:
664 return emitAtomicBinary(MI, MBB, 1, AArch64::SUBwww_lsl);
665 case AArch64::ATOMIC_LOAD_SUB_I16:
666 return emitAtomicBinary(MI, MBB, 2, AArch64::SUBwww_lsl);
667 case AArch64::ATOMIC_LOAD_SUB_I32:
668 return emitAtomicBinary(MI, MBB, 4, AArch64::SUBwww_lsl);
669 case AArch64::ATOMIC_LOAD_SUB_I64:
670 return emitAtomicBinary(MI, MBB, 8, AArch64::SUBxxx_lsl);
672 case AArch64::ATOMIC_LOAD_AND_I8:
673 return emitAtomicBinary(MI, MBB, 1, AArch64::ANDwww_lsl);
674 case AArch64::ATOMIC_LOAD_AND_I16:
675 return emitAtomicBinary(MI, MBB, 2, AArch64::ANDwww_lsl);
676 case AArch64::ATOMIC_LOAD_AND_I32:
677 return emitAtomicBinary(MI, MBB, 4, AArch64::ANDwww_lsl);
678 case AArch64::ATOMIC_LOAD_AND_I64:
679 return emitAtomicBinary(MI, MBB, 8, AArch64::ANDxxx_lsl);
681 case AArch64::ATOMIC_LOAD_OR_I8:
682 return emitAtomicBinary(MI, MBB, 1, AArch64::ORRwww_lsl);
683 case AArch64::ATOMIC_LOAD_OR_I16:
684 return emitAtomicBinary(MI, MBB, 2, AArch64::ORRwww_lsl);
685 case AArch64::ATOMIC_LOAD_OR_I32:
686 return emitAtomicBinary(MI, MBB, 4, AArch64::ORRwww_lsl);
687 case AArch64::ATOMIC_LOAD_OR_I64:
688 return emitAtomicBinary(MI, MBB, 8, AArch64::ORRxxx_lsl);
690 case AArch64::ATOMIC_LOAD_XOR_I8:
691 return emitAtomicBinary(MI, MBB, 1, AArch64::EORwww_lsl);
692 case AArch64::ATOMIC_LOAD_XOR_I16:
693 return emitAtomicBinary(MI, MBB, 2, AArch64::EORwww_lsl);
694 case AArch64::ATOMIC_LOAD_XOR_I32:
695 return emitAtomicBinary(MI, MBB, 4, AArch64::EORwww_lsl);
696 case AArch64::ATOMIC_LOAD_XOR_I64:
697 return emitAtomicBinary(MI, MBB, 8, AArch64::EORxxx_lsl);
699 case AArch64::ATOMIC_LOAD_NAND_I8:
700 return emitAtomicBinary(MI, MBB, 1, AArch64::BICwww_lsl);
701 case AArch64::ATOMIC_LOAD_NAND_I16:
702 return emitAtomicBinary(MI, MBB, 2, AArch64::BICwww_lsl);
703 case AArch64::ATOMIC_LOAD_NAND_I32:
704 return emitAtomicBinary(MI, MBB, 4, AArch64::BICwww_lsl);
705 case AArch64::ATOMIC_LOAD_NAND_I64:
706 return emitAtomicBinary(MI, MBB, 8, AArch64::BICxxx_lsl);
708 case AArch64::ATOMIC_LOAD_MIN_I8:
709 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_sxtb, A64CC::GT);
710 case AArch64::ATOMIC_LOAD_MIN_I16:
711 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_sxth, A64CC::GT);
712 case AArch64::ATOMIC_LOAD_MIN_I32:
713 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::GT);
714 case AArch64::ATOMIC_LOAD_MIN_I64:
715 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::GT);
717 case AArch64::ATOMIC_LOAD_MAX_I8:
718 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_sxtb, A64CC::LT);
719 case AArch64::ATOMIC_LOAD_MAX_I16:
720 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_sxth, A64CC::LT);
721 case AArch64::ATOMIC_LOAD_MAX_I32:
722 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::LT);
723 case AArch64::ATOMIC_LOAD_MAX_I64:
724 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::LT);
726 case AArch64::ATOMIC_LOAD_UMIN_I8:
727 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_uxtb, A64CC::HI);
728 case AArch64::ATOMIC_LOAD_UMIN_I16:
729 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_uxth, A64CC::HI);
730 case AArch64::ATOMIC_LOAD_UMIN_I32:
731 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::HI);
732 case AArch64::ATOMIC_LOAD_UMIN_I64:
733 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::HI);
735 case AArch64::ATOMIC_LOAD_UMAX_I8:
736 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_uxtb, A64CC::LO);
737 case AArch64::ATOMIC_LOAD_UMAX_I16:
738 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_uxth, A64CC::LO);
739 case AArch64::ATOMIC_LOAD_UMAX_I32:
740 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::LO);
741 case AArch64::ATOMIC_LOAD_UMAX_I64:
742 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::LO);
744 case AArch64::ATOMIC_SWAP_I8:
746 case AArch64::ATOMIC_SWAP_I16:
748 case AArch64::ATOMIC_SWAP_I32:
750 case AArch64::ATOMIC_SWAP_I64:
753 case AArch64::ATOMIC_CMP_SWAP_I8:
755 case AArch64::ATOMIC_CMP_SWAP_I16:
757 case AArch64::ATOMIC_CMP_SWAP_I32:
759 case AArch64::ATOMIC_CMP_SWAP_I64:
787 AArch64::Q0, AArch64::Q1, AArch64::Q2, AArch64::Q3,
788 AArch64::Q4, AArch64::Q5, AArch64::Q6, AArch64::Q7
793 AArch64::X0, AArch64::X1, AArch64::X2, AArch64::X3,
794 AArch64::X4, AArch64::X5, AArch64::X6, AArch64::X7
845 unsigned VReg = MF.addLiveIn(AArch64ArgRegs[i], &AArch64::GPR64RegClass);
865 AArch64::FPR128RegClass);
962 case 8: DestSubReg = AArch64::sub_8; break;
963 case 16: DestSubReg = AArch64::sub_16; break;
964 case 32: DestSubReg = AArch64::sub_32; break;
965 case 64: DestSubReg = AArch64::sub_64; break;
1116 // On AArch64 (and all other architectures I'm aware of) the most this has to
1151 SDValue StackPtr = DAG.getCopyFromReg(Chain, dl, AArch64::XSP,
1176 case 8: SrcSubReg = AArch64::sub_8; break;
1177 case 16: SrcSubReg = AArch64::sub_16; break;
1178 case 32: SrcSubReg = AArch64::sub_32; break;
1179 case 64: SrcSubReg = AArch64::sub_64; break;
1687 AArch64 BooleanContents is the default UndefinedBooleanContent, which means
1906 // AArch64's small model demands the following sequence:
1943 Chain = DAG.getCopyToReg(DAG.getEntryNode(), DL, AArch64::X0, DescAddr, Glue);
1958 Ops.push_back(DAG.getRegister(AArch64::X0, PtrVT));
1969 return DAG.getCopyFromReg(Chain, DL, AArch64::X0, PtrVT, Glue);
2003 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVZxii, DL, PtrVT, HiVar,
2005 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVKxii, DL, PtrVT,
2051 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVZxii, DL, PtrVT, HiVar,
2053 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVKxii, DL, PtrVT,
2159 // AArch64 BooleanContents is the default UndefinedBooleanContent, which means
2243 // The layout of the va_list struct is specified in the AArch64 Procedure Call
2951 return std::make_pair(0U, &AArch64::GPR32RegClass);
2953 return std::make_pair(0U, &AArch64::GPR64RegClass);
2957 return std::make_pair(0U, &AArch64::FPR16RegClass);
2959 return std::make_pair(0U, &AArch64::FPR32RegClass);
2961 return std::make_pair(0U, &AArch64::FPR64RegClass);
2963 return std::make_pair(0U, &AArch64::VPR64RegClass);
2965 return std::make_pair(0U, &AArch64::FPR128RegClass);
2967 return std::make_pair(0U, &AArch64::VPR128RegClass);