/art/compiler/utils/arm/ |
assembler_thumb2_test.cc | 94 GetAssembler()->sbfx(arm::R0, arm::R1, 0, 1); 95 GetAssembler()->sbfx(arm::R0, arm::R1, 0, 8); 96 GetAssembler()->sbfx(arm::R0, arm::R1, 0, 16); 97 GetAssembler()->sbfx(arm::R0, arm::R1, 0, 32); 99 GetAssembler()->sbfx(arm::R0, arm::R1, 8, 1); 100 GetAssembler()->sbfx(arm::R0, arm::R1, 8, 8); 101 GetAssembler()->sbfx(arm::R0, arm::R1, 8, 16); 102 GetAssembler()->sbfx(arm::R0, arm::R1, 8, 24); 104 GetAssembler()->sbfx(arm::R0, arm::R1, 16, 1); 105 GetAssembler()->sbfx(arm::R0, arm::R1, 16, 8) [all...] |
assembler_arm32_test.cc | 402 return std::bind(f, GetAssembler(), _1, _2); 407 return std::bind(f, GetAssembler(), _1, _2, _3); 413 return std::bind(f, GetAssembler(), _1, _2, _3, _4); 419 return std::bind(f, GetAssembler(), _1, _2, _3, _4, _5); 693 GetAssembler()->vmstat(); 701 GetAssembler()->ldrexd(arm::R0, arm::R1, arm::R0); 702 GetAssembler()->ldrexd(arm::R0, arm::R1, arm::R1); 703 GetAssembler()->ldrexd(arm::R0, arm::R1, arm::R2); 713 GetAssembler()->strexd(arm::R9, arm::R0, arm::R1, arm::R0); 714 GetAssembler()->strexd(arm::R9, arm::R0, arm::R1, arm::R1) [all...] |
assembler_arm_test.h | 147 (Base::GetAssembler()->*f)(*reg1, *reg2, i, j, c); 239 (Base::GetAssembler()->*f)(*reg1, *reg2, i, j, c); 302 (Base::GetAssembler()->*f)(*reg1, *reg2, c); 377 (Base::GetAssembler()->*f)(*reg1, *reg2, *reg3, c); 432 (Base::GetAssembler()->*f)(*reg, shift, c); 497 (Base::GetAssembler()->*f)(*reg1, *reg2, shift, c);
|
/art/compiler/utils/x86/ |
assembler_x86_test.cc | 103 GetAssembler()->movl(x86::EAX, x86::EBX); 109 GetAssembler()->psrlq(x86::XMM0, CreateImmediate(32)); 115 GetAssembler()->punpckldq(x86::XMM0, x86::XMM1); 121 GetAssembler()->LoadLongConstant(x86::XMM0, 51); 131 GetAssembler()->LockCmpxchgl(x86::Address( 134 GetAssembler()->LockCmpxchgl(x86::Address( 137 GetAssembler()->LockCmpxchgl(x86::Address( 140 GetAssembler()->LockCmpxchgl(x86::Address( 142 GetAssembler()->LockCmpxchgl(x86::Address( 156 GetAssembler()->LockCmpxchg8b(x86::Address [all...] |
/art/compiler/optimizing/ |
intrinsics_x86_64.cc | 41 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { 42 return reinterpret_cast<X86_64Assembler*>(codegen_->GetAssembler()); 55 #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> 197 MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); 200 MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); 211 MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); 214 MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); 253 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); 261 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler()); 269 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); [all...] |
intrinsics_arm.cc | 33 ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() { 34 return codegen_->GetAssembler(); 41 #define __ codegen->GetAssembler()-> 188 MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); 191 MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); 202 MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); 205 MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); 241 MathAbsFP(invoke->GetLocations(), true, GetAssembler()); 249 MathAbsFP(invoke->GetLocations(), false, GetAssembler()); 298 GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); [all...] |
intrinsics_arm.h | 76 ArmAssembler* GetAssembler();
|
intrinsics_x86.h | 72 X86Assembler* GetAssembler();
|
intrinsics_x86_64.h | 72 X86_64Assembler* GetAssembler();
|
intrinsics_x86.cc | 45 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { 46 return reinterpret_cast<X86Assembler*>(codegen_->GetAssembler()); 59 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> 232 MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); 235 MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); 246 MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); 249 MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); 301 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); 317 X86Assembler* assembler = GetAssembler(); 330 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); [all...] |
code_generator_arm64.h | 151 Arm64Assembler* GetAssembler() const { return assembler_; } 152 vixl::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; } 211 Arm64Assembler* GetAssembler() const; 213 return GetAssembler()->vixl_masm_; 267 Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; } 268 vixl::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; } 359 inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const { 360 return codegen_->GetAssembler();
|
code_generator_arm.h | 109 ArmAssembler* GetAssembler() const; 170 ArmAssembler* GetAssembler() const { return assembler_; } 235 ArmAssembler* GetAssembler() OVERRIDE {
|
code_generator_mips64.h | 120 Mips64Assembler* GetAssembler() const; 179 Mips64Assembler* GetAssembler() const { return assembler_; } 229 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
|
code_generator_x86.h | 103 X86Assembler* GetAssembler() const; 153 X86Assembler* GetAssembler() const { return assembler_; } 228 X86Assembler* GetAssembler() OVERRIDE {
|
code_generator_x86_64.h | 112 X86_64Assembler* GetAssembler() const; 163 X86_64Assembler* GetAssembler() const { return assembler_; } 228 X86_64Assembler* GetAssembler() OVERRIDE {
|
optimizing_cfi_test.cc | 82 Assembler* opt_asm = code_gen->GetAssembler();
|
optimizing_compiler.cc | 436 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), 473 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), 551 codegen->GetAssembler()->cfi().SetEnabled(
|
code_generator.cc | 167 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); 203 size_t code_size = GetAssembler()->CodeSize(); 207 GetAssembler()->FinalizeInstructions(code); 663 pc_info.native_pc = GetAssembler()->CodeSize(); [all...] |
intrinsics_arm64.cc | 59 return codegen_->GetAssembler()->vixl_masm_; 66 #define __ codegen->GetAssembler()->vixl_masm_-> 666 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; 776 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; [all...] |
code_generator_arm64.cc | 506 GetAssembler()->cfi().AdjustCFAOffset(frame_size); 507 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(), 509 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(), 516 GetAssembler()->cfi().RememberState(); 519 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(), 521 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(), 524 GetAssembler()->cfi().AdjustCFAOffset(-frame_size); 527 GetAssembler()->cfi().RestoreState(); 528 GetAssembler()->cfi().DefCFAOffset(GetFrameSize()); [all...] |
code_generator.h | 158 virtual Assembler* GetAssembler() = 0;
|
code_generator_arm.cc | 56 #define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 319 #define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 493 assembler_(codegen->GetAssembler()), [all...] |
code_generator_mips64.cc | 106 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> 417 #define __ down_cast<Mips64Assembler*>(GetAssembler())-> 424 Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const { 425 return codegen_->GetAssembler(); [all...] |
/art/compiler/utils/x86_64/ |
assembler_x86_64_test.cc | 593 GetAssembler()->LockCmpxchgl(x86_64::Address( 596 GetAssembler()->LockCmpxchgl(x86_64::Address( 599 GetAssembler()->LockCmpxchgl(x86_64::Address( 602 GetAssembler()->LockCmpxchgl(x86_64::Address( 604 GetAssembler()->LockCmpxchgl(x86_64::Address( 618 GetAssembler()->LockCmpxchgq(x86_64::Address( 621 GetAssembler()->LockCmpxchgq(x86_64::Address( 624 GetAssembler()->LockCmpxchgq(x86_64::Address( 627 GetAssembler()->LockCmpxchgq(x86_64::Address( 629 GetAssembler()->LockCmpxchgq(x86_64::Address [all...] |
/art/compiler/utils/ |
assembler_test.h | 48 Ass* GetAssembler() {
|