HomeSort by relevance Sort by last modified time
    Searched refs:XZR (Results 1 - 23 of 23) sorted by null

  /art/runtime/arch/arm64/
registers_arm64.h 58 SP = 31, // SP and XZR are encoded in instructions using the register
59 XZR = 32, // code `31`, the context deciding which is used. We use a
  /external/llvm/lib/Target/AArch64/
AArch64DeadRegisterDefinitionsPass.cpp 119 NewReg = AArch64::XZR;
AArch64A53Fix835769.cpp 70 // non-accumulating multiplies, i.e. when Ra=XZR='11111'
71 return MI->getOperand(3).getReg() != AArch64::XZR;
AArch64RegisterInfo.cpp 110 Reserved.set(AArch64::XZR);
140 case AArch64::XZR:
416 return 32 - 1 // XZR/SP
AArch64InstrInfo.cpp 325 // not x -> csinv, represented as orn dst, xzr, src.
327 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR)
342 // neg x -> csneg, represented as sub dst, xzr, src.
344 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR)
441 // cmp reg, #0 is actually subs xzr, reg, #0.
443 BuildMI(MBB, I, DL, get(AArch64::SUBSXri), AArch64::XZR)
470 // cmp reg, #foo is actually ands xzr, reg, #1<<foo.
477 BuildMI(MBB, I, DL, get(AArch64::ANDSXri), AArch64::XZR)
751 if (MI->definesRegister(AArch64::WZR) || MI->definesRegister(AArch64::XZR))
835 CmpInstr->definesRegister(AArch64::XZR)) {
    [all...]
AArch64ExpandPseudoInsts.cpp 112 .addReg(AArch64::XZR)
179 .addReg(AArch64::XZR)
362 .addReg(AArch64::XZR)
417 .addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
446 // ORR x0, xzr, |A|X|A|X|
AArch64FastISel.cpp 346 unsigned ZeroReg = (VT == MVT::i64) ? AArch64::XZR : AArch64::WZR;
490 unsigned ZReg = Is64Bit ? AArch64::XZR : AArch64::WZR;
    [all...]
AArch64AsmPrinter.cpp 273 unsigned Reg = ExtraCode[0] == 'w' ? AArch64::WZR : AArch64::XZR;
AArch64ConditionalCompares.cpp 260 if (DstReg == AArch64::WZR || DstReg == AArch64::XZR)
AArch64ISelDAGToDAG.cpp     [all...]
AArch64ISelLowering.cpp     [all...]
  /art/compiler/optimizing/
common_arm64.h 32 static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
39 if (code == XZR) {
50 return XZR;
  /art/compiler/utils/arm64/
managed_register_arm64.h 191 return IsXRegister() && (id_ == XZR);
assembler_arm64.h 230 } else if (code == XZR) {
231 return vixl::xzr;
managed_register_arm64_test.cc 298 EXPECT_TRUE(!reg_SP.Equals(Arm64ManagedRegister::FromXRegister(XZR)));
441 reg = Arm64ManagedRegister::FromXRegister(XZR);
449 EXPECT_EQ(XZR, reg_o.AsOverlappingXRegister());
632 EXPECT_TRUE(vixl::xzr.Is(Arm64Assembler::reg_x(XZR)));
    [all...]
assembler_arm64.cc 215 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond);
  /art/compiler/jni/quick/arm64/
calling_convention_arm64.cc 161 DCHECK_EQ(XZR, kNumberOfXRegisters - 1); // Exclude XZR from the loop (avoid 1 << 32).
  /external/llvm/lib/Target/AArch64/InstPrinter/
AArch64InstPrinter.cpp 165 if ((Op2.getReg() == AArch64::WZR || Op2.getReg() == AArch64::XZR) &&
    [all...]
  /external/llvm/test/MC/AArch64/
arm64-aliases.s 27 orr x2, xzr, x9
44 ands xzr, x1, x2, lsl #3
58 ; ADDS to WZR/XZR is a CMN
80 ; SUBS to WZR/XZR is a CMP
110 ; SUB/SUBS from WZR/XZR is a NEG
  /external/llvm/lib/Target/AArch64/Utils/
AArch64BaseInfo.h 64 case AArch64::XZR: return AArch64::WZR;
104 case AArch64::WZR: return AArch64::XZR;
    [all...]
  /external/llvm/lib/Target/AArch64/Disassembler/
AArch64Disassembler.cpp 375 AArch64::LR, AArch64::XZR
395 if (Register == AArch64::XZR)
    [all...]
  /external/llvm/lib/Target/AArch64/AsmParser/
AArch64AsmParser.cpp     [all...]
  /toolchain/binutils/binutils-2.25/gas/config/
tc-aarch64.c 285 BASIC_REG_TYPE(Z_64) /* xzr */ \
293 /* Typecheck: any 64-bit int reg (inc SP exc XZR) */ \
695 case REG_TYPE_R64_SP: /* 64-bit integer reg (inc SP exc XZR). */
    [all...]

Completed in 1791 milliseconds