HomeSort by relevance Sort by last modified time
    Searched refs:CPURegister (Results 1 - 25 of 32) sorted by null

1 2

  /external/vixl/examples/
custom-disassembler.h 49 const CPURegister& reg);
custom-disassembler.cc 38 const CPURegister& reg) {
  /external/v8/src/arm64/
assembler-arm64.h 65 // Some CPURegister methods can return Register and FPRegister types, so we
71 struct CPURegister {
89 static CPURegister Create(int code, int size, RegisterType type) {
90 CPURegister r = {code, size, type};
106 bool Is(const CPURegister& other) const;
107 bool Aliases(const CPURegister& other) const;
120 bool IsSameSizeAndType(const CPURegister& other) const;
123 bool is(const CPURegister& other) const { return Is(other); }
132 struct Register : public CPURegister {
134 return Register(CPURegister::Create(code, size, CPURegister::kRegister))
    [all...]
assembler-arm64-inl.h 44 inline int CPURegister::code() const {
50 inline CPURegister::RegisterType CPURegister::type() const {
56 inline RegList CPURegister::Bit() const {
62 inline int CPURegister::SizeInBits() const {
68 inline int CPURegister::SizeInBytes() const {
75 inline bool CPURegister::Is32Bits() const {
81 inline bool CPURegister::Is64Bits() const {
87 inline bool CPURegister::IsValid() const {
98 inline bool CPURegister::IsValidRegister() const
    [all...]
macro-assembler-arm64.h 62 V(Ldr, CPURegister&, rt, LoadOpFor(rt)) \
63 V(Str, CPURegister&, rt, StoreOpFor(rt)) \
67 V(Ldp, CPURegister&, rt, rt2, LoadPairOpFor(rt, rt2)) \
68 V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \
69 V(Ldpsw, CPURegister&, rt, rt2, LDPSW_x)
301 void LoadStoreMacro(const CPURegister& rt,
310 void LoadStorePairMacro(const CPURegister& rt, const CPURegister& rt2,
479 inline void Ldnp(const CPURegister& rt,
480 const CPURegister& rt2
    [all...]
macro-assembler-arm64.cc 562 void MacroAssembler::LoadStoreMacro(const CPURegister& rt,
593 void MacroAssembler::LoadStorePairMacro(const CPURegister& rt,
594 const CPURegister& rt2,
868 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1,
869 const CPURegister& src2, const CPURegister& src3) {
880 void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1,
881 const CPURegister& src2, const CPURegister& src3
    [all...]
deoptimizer-arm64.cc 99 CPURegister::kFPRegister, kDRegSizeInBits,
104 CPURegList saved_registers(CPURegister::kRegister, kXRegSizeInBits, 0, 27);
162 CPURegister current_reg = copy_to_input.PopLowestIndex();
173 CPURegister reg = copy_fp_to_input.PopLowestIndex();
248 const CPURegister reg = saved_fp_registers.PopLowestIndex();
282 CPURegister current_reg = saved_registers.PopLowestIndex();
assembler-arm64.cc 65 CPURegister CPURegList::PopLowestIndex() {
73 return CPURegister::Create(index, size_, type_);
77 CPURegister CPURegList::PopHighestIndex() {
86 return CPURegister::Create(index, size_, type_);
91 if (type() == CPURegister::kRegister) {
93 } else if (type() == CPURegister::kFPRegister) {
96 DCHECK(type() == CPURegister::kNoRegister);
104 return CPURegList(CPURegister::kRegister, size, 19, 29);
109 return CPURegList(CPURegister::kFPRegister, size, 8, 15);
115 CPURegList list = CPURegList(CPURegister::kRegister, size, 0, 18)
    [all...]
code-stubs-arm64.h 265 CPURegList list(CPURegister::kRegister, kXRegSizeInBits, 0, 25);
  /external/v8/src/crankshaft/arm64/
delayed-masm-arm64.h 46 bool IsScratchRegister(const CPURegister& reg) {
80 void Load(const CPURegister& rd, const MemOperand& operand);
81 void Store(const CPURegister& rd, const MemOperand& operand);
141 CPURegister pending_register_;
delayed-masm-arm64.cc 98 void DelayedMasm::Load(const CPURegister& rd, const MemOperand& operand) {
133 void DelayedMasm::Store(const CPURegister& rd, const MemOperand& operand) {
  /external/vixl/src/vixl/a64/
assembler-a64.h 45 // Some CPURegister methods can return Register or VRegister types, so we need
50 class CPURegister {
62 CPURegister() : code_(0), size_(0), type_(kNoRegister) {
67 CPURegister(unsigned code, unsigned size, RegisterType type)
167 bool Aliases(const CPURegister& other) const {
172 bool Is(const CPURegister& other) const {
224 bool IsSameSizeAndType(const CPURegister& other) const {
240 class Register : public CPURegister {
242 Register() : CPURegister() {}
243 explicit Register(const CPURegister& other
    [all...]
macro-assembler-a64.cc     [all...]
macro-assembler-a64.h 47 V(Ldr, CPURegister&, rt, LoadOpFor(rt)) \
48 V(Str, CPURegister&, rt, StoreOpFor(rt)) \
53 V(Ldp, CPURegister&, rt, rt2, LoadPairOpFor(rt, rt2)) \
54 V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \
55 V(Ldpsw, CPURegister&, rt, rt2, LDPSW_x)
737 void LoadStoreMacro(const CPURegister& rt,
746 void LoadStorePairMacro(const CPURegister& rt,
747 const CPURegister& rt2,
779 void Push(const CPURegister& src0, const CPURegister& src1 = NoReg
    [all...]
assembler-a64.cc 35 CPURegister CPURegList::PopLowestIndex() {
42 return CPURegister(index, size_, type_);
46 CPURegister CPURegList::PopHighestIndex() {
55 return CPURegister(index, size_, type_);
60 if ((type_ == CPURegister::kRegister) ||
61 (type_ == CPURegister::kVRegister)) {
63 // Try to create a CPURegister for each element in the list.
66 is_valid &= CPURegister(i, size_, type_).IsValid();
70 } else if (type_ == CPURegister::kNoRegister) {
80 if (type() == CPURegister::kRegister)
    [all...]
disasm-a64.h 60 const CPURegister& reg);
  /art/compiler/optimizing/
common_arm64.h 103 static inline vixl::CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
104 return Primitive::IsFloatingPointType(type) ? vixl::CPURegister(FPRegisterFrom(location, type))
105 : vixl::CPURegister(RegisterFrom(location, type));
108 static inline vixl::CPURegister OutputCPURegister(HInstruction* instr) {
110 ? static_cast<vixl::CPURegister>(OutputFPRegister(instr))
111 : static_cast<vixl::CPURegister>(OutputRegister(instr));
114 static inline vixl::CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
116 ? static_cast<vixl::CPURegister>(InputFPRegisterAt(instr, index))
117 : static_cast<vixl::CPURegister>(InputRegisterAt(instr, index));
code_generator_arm64.h 58 const vixl::CPURegList callee_saved_core_registers(vixl::CPURegister::kRegister,
62 const vixl::CPURegList callee_saved_fp_registers(vixl::CPURegister::kFPRegister,
428 void MoveConstant(vixl::CPURegister destination, HConstant* constant);
433 void Load(Primitive::Type type, vixl::CPURegister dst, const vixl::MemOperand& src);
434 void Store(Primitive::Type type, vixl::CPURegister src, const vixl::MemOperand& dst);
436 vixl::CPURegister dst,
439 void StoreRelease(Primitive::Type type, vixl::CPURegister src, const vixl::MemOperand& dst);
  /art/disassembler/
disassembler_arm64.h 45 const vixl::CPURegister& reg) OVERRIDE;
disassembler_arm64.cc 43 const vixl::CPURegister& reg) {
  /art/compiler/utils/arm64/
assembler_arm64.cc 639 static inline dwarf::Reg DWARFReg(CPURegister reg) {
652 const CPURegister& dst0 = registers.PopLowestIndex();
653 const CPURegister& dst1 = registers.PopLowestIndex();
660 const CPURegister& dst0 = registers.PopLowestIndex();
671 const CPURegister& dst0 = registers.PopLowestIndex();
672 const CPURegister& dst1 = registers.PopLowestIndex();
679 const CPURegister& dst0 = registers.PopLowestIndex();
690 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
691 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
746 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0)
    [all...]
  /external/v8/test/cctest/
test-utils-arm64.cc 310 if (reg_list.type() == CPURegister::kRegister) {
313 } else if (reg_list.type() == CPURegister::kFPRegister) {
test-assembler-arm64.cc     [all...]
  /external/vixl/test/
test-utils-a64.cc 342 if (reg_list.type() == CPURegister::kRegister) {
345 } else if (reg_list.type() == CPURegister::kVRegister) {
  /external/v8/src/compiler/arm64/
code-generator-arm64.cc 36 CPURegister InputFloat32OrZeroRegister(size_t index) {
45 CPURegister InputFloat64OrZeroRegister(size_t index) {
    [all...]

Completed in 604 milliseconds

1 2