/art/compiler/utils/x86/ |
assembler_x86_test.cc | 110 GetAssembler()->movl(x86::EAX, x86::EBX); 116 GetAssembler()->movntl(x86::Address(x86::EDI, x86::EBX, x86::TIMES_4, 12), x86::EAX); 117 GetAssembler()->movntl(x86::Address(x86::EDI, 0), x86::EAX); 126 GetAssembler()->LoadLongConstant(x86::XMM0, 51); 136 GetAssembler()->LockCmpxchgl(x86::Address( 139 GetAssembler()->LockCmpxchgl(x86::Address( 142 GetAssembler()->LockCmpxchgl(x86::Address( 145 GetAssembler()->LockCmpxchgl(x86::Address( 147 GetAssembler()->LockCmpxchgl(x86::Address( 161 GetAssembler()->LockCmpxchg8b(x86::Address [all...] |
/art/compiler/utils/x86_64/ |
assembler_x86_64_test.cc | 689 GetAssembler()->LockCmpxchgl(x86_64::Address( 692 GetAssembler()->LockCmpxchgl(x86_64::Address( 695 GetAssembler()->LockCmpxchgl(x86_64::Address( 698 GetAssembler()->LockCmpxchgl(x86_64::Address( 700 GetAssembler()->LockCmpxchgl(x86_64::Address( 714 GetAssembler()->LockCmpxchgq(x86_64::Address( 717 GetAssembler()->LockCmpxchgq(x86_64::Address( 720 GetAssembler()->LockCmpxchgq(x86_64::Address( 723 GetAssembler()->LockCmpxchgq(x86_64::Address( 725 GetAssembler()->LockCmpxchgq(x86_64::Address [all...] |
/art/compiler/optimizing/ |
intrinsics_arm_vixl.h | 72 ArmVIXLAssembler* GetAssembler();
|
intrinsics_mips.h | 76 MipsAssembler* GetAssembler();
|
intrinsics_mips64.h | 72 Mips64Assembler* GetAssembler();
|
intrinsics_x86.h | 72 X86Assembler* GetAssembler();
|
intrinsics_x86_64.h | 72 X86_64Assembler* GetAssembler();
|
intrinsics_arm_vixl.cc | 62 ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() { 63 return codegen_->GetAssembler(); 92 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler()); 181 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); 250 assembler_(codegen->GetAssembler()), 306 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 309 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 320 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 323 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 345 ArmVIXLAssembler* assembler = codegen->GetAssembler(); [all...] |
optimizing_cfi_test.cc | 63 code_gen_->GetAssembler()->cfi().SetEnabled(true); 102 Assembler* opt_asm = code_gen_->GetAssembler(); 207 ->GetAssembler())->GetVIXLAssembler()-> 237 #define __ down_cast<mips::MipsAssembler*>(GetCodeGenerator()->GetAssembler())-> 267 #define __ down_cast<mips64::Mips64Assembler*>(GetCodeGenerator()->GetAssembler())->
|
code_generator_arm_vixl.cc | 296 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); 330 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); [all...] |
intrinsics_utils.h | 51 Assembler* assembler = codegen->GetAssembler();
|
intrinsics_x86.cc | 53 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { 54 return down_cast<X86Assembler*>(codegen_->GetAssembler()); 78 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT 239 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 242 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 253 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 256 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 308 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); 324 X86Assembler* assembler = GetAssembler(); 337 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); [all...] |
intrinsics_x86_64.cc | 46 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { 47 return down_cast<X86_64Assembler*>(codegen_->GetAssembler()); 71 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT 166 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 169 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 180 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 183 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 222 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); 230 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler()); 238 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); [all...] |
intrinsics_mips64.cc | 39 Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() { 40 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler()); 47 #define __ codegen->GetAssembler()-> 167 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 176 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 204 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 213 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 255 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); 264 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler()); 273 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); [all...] |
intrinsics_mips.cc | 39 MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() { 40 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler()); 59 #define __ codegen->GetAssembler()-> 183 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 192 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 225 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 234 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 412 GetAssembler()); 426 GetAssembler()); 440 GetAssembler()); [all...] |
code_generator_arm64.h | 261 Arm64Assembler* GetAssembler() const { return assembler_; } 262 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } 392 Arm64Assembler* GetAssembler() const; 394 return GetAssembler()->GetVIXLAssembler(); 442 Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; } 443 const Arm64Assembler& GetAssembler() const OVERRIDE { return assembler_; } 444 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } [all...] |
code_generator_mips.h | 149 MipsAssembler* GetAssembler() const; 224 MipsAssembler* GetAssembler() const { return assembler_; } 393 MipsAssembler* GetAssembler() OVERRIDE { return &assembler_; } 394 const MipsAssembler& GetAssembler() const OVERRIDE { return assembler_; }
|
code_generator_mips64.h | 146 Mips64Assembler* GetAssembler() const; 221 Mips64Assembler* GetAssembler() const { return assembler_; } 356 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; } 357 const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
|
code_generator_x86.h | 138 X86Assembler* GetAssembler() const; 201 X86Assembler* GetAssembler() const { return assembler_; } 364 X86Assembler* GetAssembler() OVERRIDE { 368 const X86Assembler& GetAssembler() const OVERRIDE {
|
code_generator_x86_64.h | 137 X86_64Assembler* GetAssembler() const; 202 X86_64Assembler* GetAssembler() const { return assembler_; } 342 X86_64Assembler* GetAssembler() OVERRIDE { 346 const X86_64Assembler& GetAssembler() const OVERRIDE {
|
code_generator.cc | 193 start_offset_ = codegen_.GetAssembler().CodeSize(); 201 instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); 218 code_start = GetAssembler()->CodeSize(); 224 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); 239 size_t frame_start = GetAssembler()->CodeSize(); 241 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); 243 disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); 284 size_t code_size = GetAssembler()->CodeSize(); 288 GetAssembler()->FinalizeInstructions(code); 811 uint32_t native_pc = GetAssembler()->CodePosition() [all...] |
code_generator_arm_vixl.h | 247 ArmVIXLAssembler* GetAssembler() const; 316 ArmVIXLAssembler* GetAssembler() const { return assembler_; } 317 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } 455 ArmVIXLAssembler* GetAssembler() OVERRIDE { return &assembler_; } 457 const ArmVIXLAssembler& GetAssembler() const OVERRIDE { return assembler_; } 459 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } [all...] |
emit_swap_mips_test.cc | 96 assembler_ = codegen_->GetAssembler();
|
/art/compiler/utils/ |
jni_macro_assembler_test.h | 36 Ass* GetAssembler() {
|
/art/compiler/utils/arm/ |
assembler_arm_test.h | 153 (Base::GetAssembler()->*f)(*reg1, *reg2, i, j, c); 245 (Base::GetAssembler()->*f)(*reg1, *reg2, i, j, c); 308 (Base::GetAssembler()->*f)(*reg1, *reg2, c); 383 (Base::GetAssembler()->*f)(*reg1, *reg2, *reg3, c); 438 (Base::GetAssembler()->*f)(*reg, shift, c); 503 (Base::GetAssembler()->*f)(*reg1, *reg2, shift, c);
|