Home | History | Annotate | Download | only in X86

Lines Matching refs:X86

1 //===-- X86MCInstLower.cpp - Convert X86 MachineInstr to an MCInst --------===//
10 // This file contains code to lower X86 MachineInstrs to their corresponding
250 if (Reg != X86::AL && Reg != X86::AX && Reg != X86::EAX && Reg != X86::RAX)
268 case X86::MOVSX16rr8: // movsbw %al, %ax --> cbtw
269 if (Op0 == X86::AX && Op1 == X86::AL)
270 NewOpcode = X86::CBW;
272 case X86::MOVSX32rr16: // movswl %ax, %eax --> cwtl
273 if (Op0 == X86::EAX && Op1 == X86::AX)
274 NewOpcode = X86::CWDE;
276 case X86::MOVSX64rr32: // movslq %eax, %rax --> cltq
277 if (Op0 == X86::RAX && Op1 == X86::EAX)
278 NewOpcode = X86::CDQE;
301 Inst.getOperand(AddrBase + X86::AddrBaseReg).isReg() &&
302 Inst.getOperand(AddrBase + X86::AddrScaleAmt).isImm() &&
303 Inst.getOperand(AddrBase + X86::AddrIndexReg).isReg() &&
304 Inst.getOperand(AddrBase + X86::AddrSegmentReg).isReg() &&
311 if (Reg != X86::AL && Reg != X86::AX && Reg != X86::EAX && Reg != X86::RAX)
326 (Inst.getOperand(AddrBase + X86::AddrBaseReg).getReg() != 0 ||
327 Inst.getOperand(AddrBase + X86::AddrScaleAmt).getImm() != 1 ||
328 Inst.getOperand(AddrBase + X86::AddrIndexReg).getReg() != 0))
333 MCOperand Seg = Inst.getOperand(AddrBase + X86::AddrSegmentReg);
342 return Subtarget.is64Bit() ? X86::RETQ : X86::RETL;
390 case X86::LEA64_32r:
391 case X86::LEA64r:
392 case X86::LEA16r:
393 case X86::LEA32r:
395 assert(OutMI.getNumOperands() == 1+X86::AddrNumOperands &&
397 assert(OutMI.getOperand(1+X86::AddrSegmentReg).getReg() == 0 &&
401 case X86::MOV32ri64:
402 OutMI.setOpcode(X86::MOV32ri);
407 case X86::VMOVAPDrr:
408 case X86::VMOVAPDYrr:
409 case X86::VMOVAPSrr:
410 case X86::VMOVAPSYrr:
411 case X86::VMOVDQArr:
412 case X86::VMOVDQAYrr:
413 case X86::VMOVDQUrr:
414 case X86::VMOVDQUYrr:
415 case X86::VMOVUPDrr:
416 case X86::VMOVUPDYrr:
417 case X86::VMOVUPSrr:
418 case X86::VMOVUPSYrr: {
424 case X86::VMOVAPDrr: NewOpc = X86::VMOVAPDrr_REV; break;
425 case X86::VMOVAPDYrr: NewOpc = X86::VMOVAPDYrr_REV; break;
426 case X86::VMOVAPSrr: NewOpc = X86::VMOVAPSrr_REV; break;
427 case X86::VMOVAPSYrr: NewOpc = X86::VMOVAPSYrr_REV; break;
428 case X86::VMOVDQArr: NewOpc = X86::VMOVDQArr_REV; break;
429 case X86::VMOVDQAYrr: NewOpc = X86::VMOVDQAYrr_REV; break;
430 case X86::VMOVDQUrr: NewOpc = X86::VMOVDQUrr_REV; break;
431 case X86::VMOVDQUYrr: NewOpc = X86::VMOVDQUYrr_REV; break;
432 case X86::VMOVUPDrr: NewOpc = X86::VMOVUPDrr_REV; break;
433 case X86::VMOVUPDYrr: NewOpc = X86::VMOVUPDYrr_REV; break;
434 case X86::VMOVUPSrr: NewOpc = X86::VMOVUPSrr_REV; break;
435 case X86::VMOVUPSYrr: NewOpc = X86::VMOVUPSYrr_REV; break;
441 case X86::VMOVSDrr:
442 case X86::VMOVSSrr: {
448 case X86::VMOVSDrr: NewOpc = X86::VMOVSDrr_REV; break;
449 case X86::VMOVSSrr: NewOpc = X86::VMOVSSrr_REV; break;
459 case X86::TAILJMPr64:
460 case X86::CALL64r:
461 case X86::CALL64pcrel32: {
470 case X86::EH_RETURN:
471 case X86::EH_RETURN64: {
478 case X86::TAILJMPr:
479 case X86::TAILJMPd:
480 case X86::TAILJMPd64: {
484 case X86::TAILJMPr: Opcode = X86::JMP32r; break;
485 case X86::TAILJMPd:
486 case X86::TAILJMPd64: Opcode = X86::JMP_1; break;
499 case X86::ADD16rr_DB: OutMI.setOpcode(X86::OR16rr); goto ReSimplify;
500 case X86::ADD32rr_DB: OutMI.setOpcode(X86::OR32rr); goto ReSimplify;
501 case X86::ADD64rr_DB: OutMI.setOpcode(X86::OR64rr); goto ReSimplify;
502 case X86::ADD16ri_DB: OutMI.setOpcode(X86::OR16ri); goto ReSimplify;
503 case X86::ADD32ri_DB: OutMI.setOpcode(X86::OR32ri); goto ReSimplify;
504 case X86::ADD64ri32_DB: OutMI.setOpcode(X86::OR64ri32); goto ReSimplify;
505 case X86::ADD16ri8_DB: OutMI.setOpcode(X86::OR16ri8); goto ReSimplify;
506 case X86::ADD32ri8_DB: OutMI.setOpcode(X86::OR32ri8); goto ReSimplify;
507 case X86::ADD64ri8_DB: OutMI.setOpcode(X86::OR64ri8); goto ReSimplify;
513 case X86::JMP_4: OutMI.setOpcode(X86::JMP_1); break;
514 case X86::JO_4: OutMI.setOpcode(X86::JO_1); break;
515 case X86::JNO_4: OutMI.setOpcode(X86::JNO_1); break;
516 case X86::JB_4: OutMI.setOpcode(X86::JB_1); break;
517 case X86::JAE_4: OutMI.setOpcode(X86::JAE_1); break;
518 case X86::JE_4: OutMI.setOpcode(X86::JE_1); break;
519 case X86::JNE_4: OutMI.setOpcode(X86::JNE_1); break;
520 case X86::JBE_4: OutMI.setOpcode(X86::JBE_1); break;
521 case X86::JA_4: OutMI.setOpcode(X86::JA_1); break;
522 case X86::JS_4: OutMI.setOpcode(X86::JS_1); break;
523 case X86::JNS_4: OutMI.setOpcode(X86::JNS_1); break;
524 case X86::JP_4: OutMI.setOpcode(X86::JP_1); break;
525 case X86::JNP_4: OutMI.setOpcode(X86::JNP_1); break;
526 case X86::JL_4: OutMI.setOpcode(X86::JL_1); break;
527 case X86::JGE_4: OutMI.setOpcode(X86::JGE_1); break;
528 case X86::JLE_4: OutMI.setOpcode(X86::JLE_1); break;
529 case X86::JG_4: OutMI.setOpcode(X86::JG_1); break;
534 case X86::ACQUIRE_MOV8rm: OutMI.setOpcode(X86::MOV8rm); goto ReSimplify;
535 case X86::ACQUIRE_MOV16rm: OutMI.setOpcode(X86::MOV16rm); goto ReSimplify;
536 case X86::ACQUIRE_MOV32rm: OutMI.setOpcode(X86::MOV32rm); goto ReSimplify;
537 case X86::ACQUIRE_MOV64rm: OutMI.setOpcode(X86::MOV64rm); goto ReSimplify;
538 case X86::RELEASE_MOV8mr: OutMI.setOpcode(X86::MOV8mr); goto ReSimplify;
539 case X86::RELEASE_MOV16mr: OutMI.setOpcode(X86::MOV16mr); goto ReSimplify;
540 case X86::RELEASE_MOV32mr: OutMI.setOpcode(X86::MOV32mr); goto ReSimplify;
541 case X86::RELEASE_MOV64mr: OutMI.setOpcode(X86::MOV64mr); goto ReSimplify;
550 case X86::MOV8mr_NOREX:
551 case X86::MOV8mr: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV8ao8); break;
552 case X86::MOV8rm_NOREX:
553 case X86::MOV8rm: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV8o8a); break;
554 case X86::MOV16mr: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV16ao16); break;
555 case X86::MOV16rm: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV16o16a); break;
556 case X86::MOV32mr: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV32ao32); break;
557 case X86::MOV32rm: SimplifyShortMoveForm(AsmPrinter, OutMI, X86::MOV32o32a); break;
559 case X86::ADC8ri: SimplifyShortImmForm(OutMI, X86::ADC8i8); break;
560 case X86::ADC16ri: SimplifyShortImmForm(OutMI, X86::ADC16i16); break;
561 case X86::ADC32ri: SimplifyShortImmForm(OutMI, X86::ADC32i32); break;
562 case X86::ADC64ri32: SimplifyShortImmForm(OutMI, X86::ADC64i32); break;
563 case X86::ADD8ri: SimplifyShortImmForm(OutMI, X86::ADD8i8); break;
564 case X86::ADD16ri: SimplifyShortImmForm(OutMI, X86::ADD16i16); break;
565 case X86::ADD32ri: SimplifyShortImmForm(OutMI, X86::ADD32i32); break;
566 case X86::ADD64ri32: SimplifyShortImmForm(OutMI, X86::ADD64i32); break;
567 case X86::AND8ri: SimplifyShortImmForm(OutMI, X86::AND8i8); break;
568 case X86::AND16ri: SimplifyShortImmForm(OutMI, X86::AND16i16); break;
569 case X86::AND32ri: SimplifyShortImmForm(OutMI, X86::AND32i32); break;
570 case X86::AND64ri32: SimplifyShortImmForm(OutMI, X86::AND64i32); break;
571 case X86::CMP8ri: SimplifyShortImmForm(OutMI, X86::CMP8i8); break;
572 case X86::CMP16ri: SimplifyShortImmForm(OutMI, X86::CMP16i16); break;
573 case X86::CMP32ri: SimplifyShortImmForm(OutMI, X86::CMP32i32); break;
574 case X86::CMP64ri32: SimplifyShortImmForm(OutMI, X86::CMP64i32); break;
575 case X86::OR8ri: SimplifyShortImmForm(OutMI, X86::OR8i8); break;
576 case X86::OR16ri: SimplifyShortImmForm(OutMI, X86::OR16i16); break;
577 case X86::OR32ri: SimplifyShortImmForm(OutMI, X86::OR32i32); break;
578 case X86::OR64ri32: SimplifyShortImmForm(OutMI, X86::OR64i32); break;
579 case X86::SBB8ri: SimplifyShortImmForm(OutMI, X86::SBB8i8); break;
580 case X86::SBB16ri: SimplifyShortImmForm(OutMI, X86::SBB16i16); break;
581 case X86::SBB32ri: SimplifyShortImmForm(OutMI, X86::SBB32i32); break;
582 case X86::SBB64ri32: SimplifyShortImmForm(OutMI, X86::SBB64i32); break;
583 case X86::SUB8ri: SimplifyShortImmForm(OutMI, X86::SUB8i8); break;
584 case X86::SUB16ri: SimplifyShortImmForm(OutMI, X86::SUB16i16); break;
585 case X86::SUB32ri: SimplifyShortImmForm(OutMI, X86::SUB32i32); break;
586 case X86::SUB64ri32: SimplifyShortImmForm(OutMI, X86::SUB64i32); break;
587 case X86::TEST8ri: SimplifyShortImmForm(OutMI, X86::TEST8i8); break;
588 case X86::TEST16ri: SimplifyShortImmForm(OutMI, X86::TEST16i16); break;
589 case X86::TEST32ri: SimplifyShortImmForm(OutMI, X86::TEST32i32); break;
590 case X86::TEST64ri32: SimplifyShortImmForm(OutMI, X86::TEST64i32); break;
591 case X86::XOR8ri: SimplifyShortImmForm(OutMI, X86::XOR8i8); break;
592 case X86::XOR16ri: SimplifyShortImmForm(OutMI, X86::XOR16i16); break;
593 case X86::XOR32ri: SimplifyShortImmForm(OutMI, X86::XOR32i32); break;
594 case X86::XOR64ri32: SimplifyShortImmForm(OutMI, X86::XOR64i32); break;
597 case X86::MOVSX16rr8:
598 case X86::MOVSX32rr16:
599 case X86::MOVSX64rr32:
610 bool is64Bits = MI.getOpcode() == X86::TLS_addr64 ||
611 MI.getOpcode() == X86::TLS_base_addr64;
613 bool needsPadding = MI.getOpcode() == X86::TLS_addr64;
618 OutStreamer.EmitInstruction(MCInstBuilder(X86::DATA16_PREFIX), STI);
622 case X86::TLS_addr32:
623 case X86::TLS_addr64:
626 case X86::TLS_base_addr32:
629 case X86::TLS_base_addr64:
641 LEA.setOpcode(X86::LEA64r);
642 LEA.addOperand(MCOperand::CreateReg(X86::RDI)); // dest
643 LEA.addOperand(MCOperand::CreateReg(X86::RIP)); // base
649 LEA.setOpcode(X86::LEA32r);
650 LEA.addOperand(MCOperand::CreateReg(X86::EAX)); // dest
651 LEA.addOperand(MCOperand::CreateReg(X86::EBX)); // base
657 LEA.setOpcode(X86::LEA32r);
658 LEA.addOperand(MCOperand::CreateReg(X86::EAX)); // dest
661 LEA.addOperand(MCOperand::CreateReg(X86::EBX)); // index
668 OutStreamer.EmitInstruction(MCInstBuilder(X86::DATA16_PREFIX), STI);
669 OutStreamer.EmitInstruction(MCInstBuilder(X86::DATA16_PREFIX), STI);
670 OutStreamer.EmitInstruction(MCInstBuilder(X86::REX64_PREFIX), STI);
680 OutStreamer.EmitInstruction(MCInstBuilder(is64Bits ? X86::CALL64pcrel32
681 : X86::CALLpcrel32)
685 /// \brief Emit the optimal amount of multi-byte nops on X86.
689 assert(Is64Bit && "EmitNops only supports X86-64");
693 BaseReg = X86::RAX; ScaleVal = 1;
696 case 1: NumBytes -= 1; Opc = X86::NOOP; break;
697 case 2: NumBytes -= 2; Opc = X86::XCHG16ar; break;
698 case 3: NumBytes -= 3; Opc = X86::NOOPL; break;
699 case 4: NumBytes -= 4; Opc = X86::NOOPL; Displacement = 8; break;
700 case 5: NumBytes -= 5; Opc = X86::NOOPL; Displacement = 8;
701 IndexReg = X86::RAX; break;
702 case 6: NumBytes -= 6; Opc = X86::NOOPW; Displacement = 8;
703 IndexReg = X86::RAX; break;
704 case 7: NumBytes -= 7; Opc = X86::NOOPL; Displacement = 512; break;
705 case 8: NumBytes -= 8; Opc = X86::NOOPL; Displacement = 512;
706 IndexReg = X86::RAX; break;
707 case 9: NumBytes -= 9; Opc = X86::NOOPW; Displacement = 512;
708 IndexReg = X86::RAX; break;
709 default: NumBytes -= 10; Opc = X86::NOOPW; Displacement = 512;
710 IndexReg = X86::RAX; SegmentReg = X86::CS; break;
720 case X86::NOOP:
723 case X86::XCHG16ar:
724 OS.EmitInstruction(MCInstBuilder(Opc).addReg(X86::AX), STI);
726 case X86::NOOPL:
727 case X86::NOOPW:
754 assert(Is64Bit && "Patchpoint currently only supports X86-64");
769 OS.EmitInstruction(MCInstBuilder(X86::MOV64ri).addReg(ScratchReg)
771 OS.EmitInstruction(MCInstBuilder(X86::CALL64r).addReg(ScratchReg), STI);
791 case X86::Int_MemBarrier:
796 case X86::EH_RETURN:
797 case X86::EH_RETURN64: {
804 case X86::TAILJMPr:
805 case X86::TAILJMPd:
806 case X86::TAILJMPd64:
811 case X86::TLS_addr32:
812 case X86::TLS_addr64:
813 case X86::TLS_base_addr32:
814 case X86::TLS_base_addr64:
817 case X86::MOVPC32r: {
828 EmitToStreamer(OutStreamer, MCInstBuilder(X86::CALLpcrel32)
835 EmitToStreamer(OutStreamer, MCInstBuilder(X86::POP32r)
840 case X86::ADD32ri: {
866 EmitToStreamer(OutStreamer, MCInstBuilder(X86::ADD32ri)
879 case X86::MORESTACK_RET:
883 case X86::MORESTACK_RET_RESTORE_R10:
886 EmitToStreamer(OutStreamer, MCInstBuilder(X86::MOV64rr)
887 .addReg(X86::R10)
888 .addReg(X86::RAX));
891 case X86::SEH_PushReg:
895 case X86::SEH_SaveReg:
900 case X86::SEH_SaveXMM:
905 case X86::SEH_StackAlloc:
909 case X86::SEH_SetFrame:
914 case X86::SEH_PushFrame:
918 case X86::SEH_EndPrologue: