Lines Matching refs:MBB
147 static unsigned findDeadCallerSavedReg(MachineBasicBlock &MBB,
151 const MachineFunction *MF = MBB.getParent();
212 flagsNeedToBePreservedBeforeTheTerminators(const MachineBasicBlock &MBB) {
213 for (const MachineInstr &MI : MBB.terminators()) {
239 for (const MachineBasicBlock *Succ : MBB.successors())
248 void X86FrameLowering::emitSPUpdate(MachineBasicBlock &MBB,
255 DebugLoc DL = MBB.findDebugLoc(MBBI);
263 if (isSub && !isEAXLiveIn(*MBB.getParent()))
266 Reg = findDeadCallerSavedReg(MBB, MBBI, TRI, Is64Bit);
270 BuildMI(MBB, MBBI, DL, TII.get(Opc), Reg)
275 MachineInstr *MI = BuildMI(MBB, MBBI, DL, TII.get(Opc), StackPtr)
289 : findDeadCallerSavedReg(MBB, MBBI, TRI, Is64Bit);
294 MachineInstr *MI = BuildMI(MBB, MBBI, DL, TII.get(Opc))
306 MBB, MBBI, DL, isSub ? -ThisVal : ThisVal, InEpilogue);
317 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc DL,
326 // of MBB would require to use LEA operations.
329 UseLEA = STI.useLeaForSP() || MBB.isLiveIn(X86::EFLAGS);
336 UseLEA = canUseLEAForSPInEpilogue(*MBB.getParent());
338 UseLEA = flagsNeedToBePreservedBeforeTheTerminators(MBB);
341 assert((UseLEA || !flagsNeedToBePreservedBeforeTheTerminators(MBB)) &&
347 MI = addRegOffset(BuildMI(MBB, MBBI, DL,
356 MI = BuildMI(MBB, MBBI, DL, TII.get(Opc), StackPtr)
364 int X86FrameLowering::mergeSPUpdates(MachineBasicBlock &MBB,
367 if ((doMergeWithPrevious && MBBI == MBB.begin()) ||
368 (!doMergeWithPrevious && MBBI == MBB.end()))
382 MBB.erase(PI);
388 MBB.erase(PI);
395 void X86FrameLowering::BuildCFI(MachineBasicBlock &MBB,
398 MachineFunction &MF = *MBB.getParent();
400 BuildMI(MBB, MBBI, DL, TII.get(TargetOpcode::CFI_INSTRUCTION))
405 X86FrameLowering::emitCalleeSavedFrameMoves(MachineBasicBlock &MBB,
408 MachineFunction &MF = *MBB.getParent();
424 BuildCFI(MBB, MBBI, DL,
447 MachineBasicBlock &MBB,
454 return emitStackProbeInlineStub(MF, MBB, MBBI, DL, true);
456 return emitStackProbeInline(MF, MBB, MBBI, DL, false);
459 return emitStackProbeCall(MF, MBB, MBBI, DL, InProlog);
487 MachineFunction &MF, MachineBasicBlock &MBB,
493 const BasicBlock *LLVM_BB = MBB.getBasicBlock();
503 // MBB:
520 // [rest of original MBB]
527 MachineFunction::iterator MBBIter = std::next(MBB.getIterator());
532 // Split MBB and move the tail portion down to ContinueMBB.
534 ContinueMBB->splice(ContinueMBB->begin(), &MBB, MBBI, MBB.end());
535 ContinueMBB->transferSuccessorsAndUpdatePHIs(&MBB);
581 addRegOffset(BuildMI(&MBB, DL, TII.get(X86::MOV64mr)), X86::RSP, false,
584 addRegOffset(BuildMI(&MBB, DL, TII.get(X86::MOV64mr)), X86::RSP, false,
589 BuildMI(&MBB, DL, TII.get(X86::MOV64rr), SizeReg).addReg(X86::RAX);
592 // Add code to MBB to check for overflow and set the new target stack pointer
594 BuildMI(&MBB, DL, TII.get(X86::XOR64rr), ZeroReg)
597 BuildMI(&MBB, DL, TII.get(X86::MOV64rr), CopyReg).addReg(X86::RSP);
598 BuildMI(&MBB, DL, TII.get(X86::SUB64rr), TestReg)
601 BuildMI(&MBB, DL, TII.get(X86::CMOVB64rr), FinalReg)
612 BuildMI(&MBB, DL, TII.get(X86::MOV64rm), LimitReg)
618 BuildMI(&MBB, DL, TII.get(X86::CMP64rr)).addReg(FinalReg).addReg(LimitReg);
620 BuildMI(&MBB, DL, TII.get(X86::JAE_1)).addMBB(ContinueMBB);
674 MBB.addSuccessor(ContinueMBB);
675 MBB.addSuccessor(RoundMBB);
682 for (++BeforeMBBI; BeforeMBBI != MBB.end(); ++BeforeMBBI) {
703 MachineFunction &MF, MachineBasicBlock &MBB,
733 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri), X86::R11)
735 CI = BuildMI(MBB, MBBI, DL, TII.get(CallOp)).addReg(X86::R11);
737 CI = BuildMI(MBB, MBBI, DL, TII.get(CallOp)).addExternalSymbol(Symbol);
752 BuildMI(MBB, MBBI, DL, TII.get(X86::SUB64rr), X86::RSP)
767 MachineFunction &MF, MachineBasicBlock &MBB,
772 BuildMI(MBB, MBBI, DL, TII.get(X86::CALLpcrel32))
804 void X86FrameLowering::BuildStackAlignAND(MachineBasicBlock &MBB,
810 MachineInstr *MI = BuildMI(MBB, MBBI, DL, TII.get(AndOp), Reg)
904 MachineBasicBlock &MBB) const {
907 MachineBasicBlock::iterator MBBI = MBB.begin();
914 bool IsFunclet = MBB.isEHFuncletEntry();
979 BuildStackAdjustment(MBB, MBBI, DL, TailCallReturnAddrDelta,
1013 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(MOVmr)), StackPtr, true, 16)
1016 MBB.addLiveIn(Establisher);
1042 BuildMI(MBB, MBBI, DL, TII.get(Is64Bit ? X86::PUSH64r : X86::PUSH32r))
1050 BuildCFI(MBB, MBBI, DL,
1055 BuildCFI(MBB, MBBI, DL, MCCFIInstruction::createOffset(
1060 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg))
1067 BuildMI(MBB, MBBI, DL,
1077 BuildCFI(MBB, MBBI, DL, MCCFIInstruction::createDefCfaRegister(
1103 while (MBBI != MBB.end() &&
1115 BuildCFI(MBB, MBBI, DL,
1121 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg)).addImm(Reg).setMIFlag(
1131 BuildStackAlignAND(MBB, MBBI, DL, StackPtr, MaxAlign);
1137 NumBytes -= mergeSPUpdates(MBB, MBBI, true);
1162 BuildMI(MBB, MBBI, DL, TII.get(X86::PUSH32r))
1171 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri), X86::EAX)
1175 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri32), X86::RAX)
1179 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri), X86::RAX)
1186 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri), X86::EAX)
1192 emitStackProbe(MF, MBB, MBBI, DL, true);
1200 MBB.insert(MBBI, MI);
1203 emitSPUpdate(MBB, MBBI, -(int64_t)NumBytes, /*InEpilogue=*/false);
1207 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_StackAlloc))
1221 MBB.addLiveIn(Establisher);
1222 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64rm), Establisher),
1229 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64mr)), StackPtr,
1248 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::LEA64r), FramePtr),
1251 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64rr), FramePtr)
1256 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SetFrame))
1265 MBBI = restoreWin32EHStackPointers(MBB, MBBI, DL);
1268 if (!MBB.isCleanupFuncletEntry()) {
1274 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32mr)), FrameReg,
1280 while (MBBI != MBB.end() && MBBI->getFlag(MachineInstr::FrameSetup)) {
1292 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SaveXMM))
1302 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_EndPrologue))
1312 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64mr)), StackPtr, false,
1325 BuildStackAlignAND(MBB, MBBI, DL, SPOrEstablisher, MaxAlign);
1339 BuildMI(MBB, MBBI, DL, TII.get(Opc), BasePtr)
1346 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(Opm)),
1362 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(Opm)), UsedReg, true, Offset)
1373 BuildCFI(MBB, MBBI, DL, MCCFIInstruction::createDefCfaOffset(
1379 emitCalleeSavedFrameMoves(MBB, MBBI, DL);
1458 MachineBasicBlock &MBB) const {
1461 MachineBasicBlock::iterator MBBI = MBB.getFirstTerminator();
1463 if (MBBI != MBB.end())
1495 BuildMI(MBB, MBBI, DL, TII.get(Is64Bit ? X86::POP64r : X86::POP32r),
1501 BuildMI(MBB, MBBI, DL, TII.get(Is64Bit ? X86::POP64r : X86::POP32r),
1515 BuildMI(MBB, MBBI, DL,
1524 while (MBBI != MBB.begin()) {
1542 BuildMI(MBB, FirstCSPop, DL, TII.get(X86::LEA64r), ReturnReg)
1550 BuildMI(MBB, FirstCSPop, DL, TII.get(X86::MOV32ri), ReturnReg)
1558 if (MBBI != MBB.end())
1564 NumBytes += mergeSPUpdates(MBB, MBBI, true);
1587 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(Opc), StackPtr),
1592 BuildMI(MBB, MBBI, DL, TII.get(Opc), StackPtr)
1598 emitSPUpdate(MBB, MBBI, NumBytes, /*InEpilogue=*/true);
1609 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_Epilogue));
1615 MBBI = MBB.getFirstTerminator();
1618 Offset += mergeSPUpdates(MBB, MBBI, true);
1619 emitSPUpdate(MBB, MBBI, Offset, /*InEpilogue=*/true);
1862 MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
1865 DebugLoc DL = MBB.findDebugLoc(MI);
1869 if (MBB.isEHFuncletEntry() && STI.is32Bit() && STI.isOSWindows())
1880 MBB.addLiveIn(Reg);
1882 BuildMI(MBB, MI, DL, TII.get(Opc)).addReg(Reg, RegState::Kill)
1893 MBB.addLiveIn(Reg);
1896 TII.storeRegToStackSlot(MBB, MI, Reg, true, CSI[i - 1].getFrameIdx(), RC,
1906 bool X86FrameLowering::restoreCalleeSavedRegisters(MachineBasicBlock &MBB,
1921 const Function *Func = MBB.getParent()->getFunction();
1929 DebugLoc DL = MBB.findDebugLoc(MI);
1939 TII.loadRegFromStackSlot(MBB, MI, Reg, CSI[i].getFrameIdx(), RC, TRI);
1950 BuildMI(MBB, MI, DL, TII.get(Opc), Reg)
2386 // Create new MBB for StackCheck:
2394 // Create new MBB for IncStack:
2413 bool X86FrameLowering::adjustStackWithPops(MachineBasicBlock &MBB,
2429 if (MBBI == MBB.begin())
2475 BuildMI(MBB, MBBI, DL,
2482 eliminateCallFramePseudoInstr(MachineFunction &MF, MachineBasicBlock &MBB,
2490 I = MBB.erase(I);
2521 BuildCFI(MBB, I, DL,
2535 BuildCFI(MBB, I, DL,
2543 adjustStackWithPops(MBB, I, DL, Offset)))
2544 BuildStackAdjustment(MBB, I, DL, Offset, /*InEpilogue=*/false);
2560 BuildCFI(MBB, I, DL,
2575 MachineBasicBlock::iterator B = MBB.begin();
2578 BuildStackAdjustment(MBB, I, DL, -InternalAmt, /*InEpilogue=*/false);
2582 bool X86FrameLowering::canUseAsEpilogue(const MachineBasicBlock &MBB) const {
2583 assert(MBB.getParent() && "Block is not attached to a function!");
2589 if (STI.isTargetWin64() && !MBB.succ_empty() && !MBB.isReturnBlock())
2592 if (canUseLEAForSPInEpilogue(*MBB.getParent()))
2599 return !flagsNeedToBePreservedBeforeTheTerminators(MBB);
2609 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI,
2616 MachineFunction &MF = *MBB.getParent();
2630 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32rm), X86::ESP),
2643 BuildMI(MBB, MBBI, DL, TII.get(ADDri), FramePtr)
2653 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::LEA32r), BasePtr),
2661 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32rm), FramePtr),
2708 MachineBasicBlock &MBB = MF.front();
2709 auto MBBI = MBB.begin();
2710 while (MBBI != MBB.end() && MBBI->getFlag(MachineInstr::FrameSetup))
2713 DebugLoc DL = MBB.findDebugLoc(MBBI);
2714 addFrameReference(BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64mi32)),