/art/runtime/ |
instruction_set.cc | 30 case kX86_64: 52 return kX86_64; 70 case kX86_64: 107 case kX86_64:
|
instruction_set_test.cc | 29 EXPECT_EQ(kX86_64, GetInstructionSetFromString("x86_64")); 40 EXPECT_STREQ("x86_64", GetInstructionSetString(kX86_64));
|
instruction_set.h | 35 kX86_64, 50 static constexpr InstructionSet kRuntimeISA = kX86_64; 92 case kX86_64: 116 case kX86_64: 142 case kX86_64: 165 case kX86_64:
|
vmap_table.h | 68 bool target64 = (kRuntimeISA == kArm64) || (kRuntimeISA == kX86_64);
|
/art/runtime/entrypoints/quick/ |
callee_save_frame.h | 54 isa == kX86_64 ? x86_64::X86_64CalleeSaveFrameSize(type) : 66 isa == kX86_64 ? kX86_64PointerSize :
|
quick_trampoline_entrypoints_test.cc | 80 CHECK_FRAME_SIZE(kX86_64); 90 EXPECT_EQ(GetInstructionSetPointerSize(kX86_64), GetConstExprPointerSize(kX86_64));
|
/art/disassembler/ |
disassembler.cc | 39 } else if (instruction_set == kX86_64) {
|
/art/compiler/ |
compilers.cc | 115 case kX86_64: 136 if (driver.GetInstructionSet() == kX86_64) {
|
compiled_method.cc | 96 case kX86_64: 115 case kX86_64:
|
/art/compiler/jni/quick/ |
calling_convention.cc | 43 case kX86_64: 116 case kX86_64:
|
/art/runtime/arch/ |
arch_test.cc | 285 CheckFrameSize(InstructionSet::kX86_64, Runtime::kSaveAll, FRAME_SIZE_SAVE_ALL_CALLEE_SAVE); 290 CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsOnly, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE); 295 CheckFrameSize(InstructionSet::kX86_64, Runtime::kRefsAndArgs, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE);
|
/art/compiler/optimizing/ |
register_allocator.h | 71 || instruction_set == kX86_64
|
code_generator_x86_64.h | 176 return InstructionSet::kX86_64;
|
optimizing_compiler.cc | 89 if (instruction_set != kX86 && instruction_set != kX86_64 && instruction_set != kThumb2) {
|
codegen_test.cc | 92 codegen = CodeGenerator::Create(&arena, graph, kX86_64);
|
code_generator.cc | 267 case kX86_64: {
|
/art/compiler/trampolines/ |
trampoline_compiler.cc | 145 assembler(static_cast<x86_64::X86_64Assembler*>(Assembler::Create(kX86_64))); 165 case kX86_64:
|
/art/compiler/dex/quick/ |
gen_invoke.cc | 78 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) { 223 if (cu_->instruction_set == kArm64 || cu_->instruction_set == kX86_64) { 479 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { 504 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { 507 } else if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { 535 } else if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { 553 } else if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { 663 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) { [all...] |
gen_common.cc | 453 case kX86_64: 482 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) { [all...] |
mir_to_lir.cc | 81 if (cu_->instruction_set == kX86_64) { 174 if (cu_->instruction_set == kX86_64) { [all...] |
/art/compiler/utils/ |
assembler.cc | 119 case kX86_64:
|
/art/runtime/arch/x86/ |
fault_handler_x86.cc | 259 reinterpret_cast<uint8_t*>(*out_sp) - GetStackOverflowReservedBytes(kX86_64)); 406 uintptr_t overflow_addr = sp - GetStackOverflowReservedBytes(kX86_64);
|
/art/compiler/dex/quick/x86/ |
call_x86.cc | 221 InstructionSet isa = cu_->target64 ? kX86_64 : kX86;
|
/art/compiler/dex/ |
frontend.cc | 89 COMPILE_ASSERT(5U == static_cast<size_t>(kX86_64), kX86_64_not_5); 106 // 5 = kX86_64. 148 // 5 = kX86_64. 444 // 5 = kX86_64. 465 // 5 = kX86_64. 662 (cu.instruction_set == kX86_64) ||
|
/art/runtime/base/ |
mutex.cc | 849 if (kRuntimeISA == kX86 || kRuntimeISA == kX86_64) { [all...] |