1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_ARM64 6 7 #include "src/codegen.h" 8 #include "src/ic/ic.h" 9 #include "src/ic/stub-cache.h" 10 #include "src/interface-descriptors.h" 11 12 namespace v8 { 13 namespace internal { 14 15 16 #define __ ACCESS_MASM(masm) 17 18 19 // Probe primary or secondary table. 20 // If the entry is found in the cache, the generated code jump to the first 21 // instruction of the stub in the cache. 22 // If there is a miss the code fall trough. 23 // 24 // 'receiver', 'name' and 'offset' registers are preserved on miss. 25 static void ProbeTable(Isolate* isolate, MacroAssembler* masm, 26 Code::Flags flags, StubCache::Table table, 27 Register receiver, Register name, Register offset, 28 Register scratch, Register scratch2, Register scratch3) { 29 // Some code below relies on the fact that the Entry struct contains 30 // 3 pointers (name, code, map). 31 STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize)); 32 33 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 34 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 35 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 36 37 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); 38 uintptr_t value_off_addr = 39 reinterpret_cast<uintptr_t>(value_offset.address()); 40 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address()); 41 42 Label miss; 43 44 DCHECK(!AreAliased(name, offset, scratch, scratch2, scratch3)); 45 46 // Multiply by 3 because there are 3 fields per entry. 47 __ Add(scratch3, offset, Operand(offset, LSL, 1)); 48 49 // Calculate the base address of the entry. 50 __ Mov(scratch, key_offset); 51 __ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2)); 52 53 // Check that the key in the entry matches the name. 54 __ Ldr(scratch2, MemOperand(scratch)); 55 __ Cmp(name, scratch2); 56 __ B(ne, &miss); 57 58 // Check the map matches. 59 __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr)); 60 __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset)); 61 __ Cmp(scratch2, scratch3); 62 __ B(ne, &miss); 63 64 // Get the code entry from the cache. 65 __ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr)); 66 67 // Check that the flags match what we're looking for. 68 __ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset)); 69 __ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup); 70 __ Cmp(scratch2.W(), flags); 71 __ B(ne, &miss); 72 73 #ifdef DEBUG 74 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 75 __ B(&miss); 76 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 77 __ B(&miss); 78 } 79 #endif 80 81 // Jump to the first instruction in the code stub. 82 __ Add(scratch, scratch, Code::kHeaderSize - kHeapObjectTag); 83 __ Br(scratch); 84 85 // Miss: fall through. 86 __ Bind(&miss); 87 } 88 89 90 void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, 91 Code::Flags flags, Register receiver, 92 Register name, Register scratch, Register extra, 93 Register extra2, Register extra3) { 94 Isolate* isolate = masm->isolate(); 95 Label miss; 96 97 // Make sure that there are no register conflicts. 98 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); 99 100 // Make sure extra and extra2 registers are valid. 101 DCHECK(!extra.is(no_reg)); 102 DCHECK(!extra2.is(no_reg)); 103 DCHECK(!extra3.is(no_reg)); 104 105 #ifdef DEBUG 106 // If vector-based ics are in use, ensure that scratch, extra, extra2 and 107 // extra3 don't conflict with the vector and slot registers, which need 108 // to be preserved for a handler call or miss. 109 if (IC::ICUseVector(ic_kind)) { 110 Register vector, slot; 111 if (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC) { 112 vector = VectorStoreICDescriptor::VectorRegister(); 113 slot = VectorStoreICDescriptor::SlotRegister(); 114 } else { 115 vector = LoadWithVectorDescriptor::VectorRegister(); 116 slot = LoadWithVectorDescriptor::SlotRegister(); 117 } 118 DCHECK(!AreAliased(vector, slot, scratch, extra, extra2, extra3)); 119 } 120 #endif 121 122 Counters* counters = masm->isolate()->counters(); 123 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, 124 extra3); 125 126 // Check that the receiver isn't a smi. 127 __ JumpIfSmi(receiver, &miss); 128 129 // Compute the hash for primary table. 130 __ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); 131 __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset)); 132 __ Add(scratch, scratch, extra); 133 __ Eor(scratch, scratch, flags); 134 // We shift out the last two bits because they are not part of the hash. 135 __ Ubfx(scratch, scratch, kCacheIndexShift, 136 CountTrailingZeros(kPrimaryTableSize, 64)); 137 138 // Probe the primary table. 139 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch, extra, 140 extra2, extra3); 141 142 // Primary miss: Compute hash for secondary table. 143 __ Sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift)); 144 __ Add(scratch, scratch, flags >> kCacheIndexShift); 145 __ And(scratch, scratch, kSecondaryTableSize - 1); 146 147 // Probe the secondary table. 148 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch, extra, 149 extra2, extra3); 150 151 // Cache miss: Fall-through and let caller handle the miss by 152 // entering the runtime system. 153 __ Bind(&miss); 154 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, 155 extra3); 156 } 157 } // namespace internal 158 } // namespace v8 159 160 #endif // V8_TARGET_ARCH_ARM64 161