1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_ARM 6 7 #include "src/codegen.h" 8 #include "src/ic/ic.h" 9 #include "src/ic/stub-cache.h" 10 #include "src/interface-descriptors.h" 11 12 namespace v8 { 13 namespace internal { 14 15 #define __ ACCESS_MASM(masm) 16 17 18 static void ProbeTable(Isolate* isolate, MacroAssembler* masm, 19 Code::Kind ic_kind, Code::Flags flags, 20 StubCache::Table table, Register receiver, Register name, 21 // Number of the cache entry, not scaled. 22 Register offset, Register scratch, Register scratch2, 23 Register offset_scratch) { 24 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 25 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 26 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 27 28 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); 29 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); 30 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); 31 32 // Check the relative positions of the address fields. 33 DCHECK(value_off_addr > key_off_addr); 34 DCHECK((value_off_addr - key_off_addr) % 4 == 0); 35 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); 36 DCHECK(map_off_addr > key_off_addr); 37 DCHECK((map_off_addr - key_off_addr) % 4 == 0); 38 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); 39 40 Label miss; 41 Register base_addr = scratch; 42 scratch = no_reg; 43 44 // Multiply by 3 because there are 3 fields per entry (name, code, map). 45 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); 46 47 // Calculate the base address of the entry. 48 __ mov(base_addr, Operand(key_offset)); 49 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2)); 50 51 // Check that the key in the entry matches the name. 52 __ ldr(ip, MemOperand(base_addr, 0)); 53 __ cmp(name, ip); 54 __ b(ne, &miss); 55 56 // Check the map matches. 57 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); 58 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); 59 __ cmp(ip, scratch2); 60 __ b(ne, &miss); 61 62 // Get the code entry from the cache. 63 Register code = scratch2; 64 scratch2 = no_reg; 65 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr)); 66 67 // Check that the flags match what we're looking for. 68 Register flags_reg = base_addr; 69 base_addr = no_reg; 70 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); 71 // It's a nice optimization if this constant is encodable in the bic insn. 72 73 uint32_t mask = Code::kFlagsNotUsedInLookup; 74 DCHECK(__ ImmediateFitsAddrMode1Instruction(mask)); 75 __ bic(flags_reg, flags_reg, Operand(mask)); 76 __ cmp(flags_reg, Operand(flags)); 77 __ b(ne, &miss); 78 79 #ifdef DEBUG 80 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 81 __ jmp(&miss); 82 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 83 __ jmp(&miss); 84 } 85 #endif 86 87 // Jump to the first instruction in the code stub. 88 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag)); 89 90 // Miss: fall through. 91 __ bind(&miss); 92 } 93 94 95 void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, 96 Code::Flags flags, Register receiver, 97 Register name, Register scratch, Register extra, 98 Register extra2, Register extra3) { 99 Isolate* isolate = masm->isolate(); 100 Label miss; 101 102 // Make sure that code is valid. The multiplying code relies on the 103 // entry size being 12. 104 DCHECK(sizeof(Entry) == 12); 105 106 // Make sure the flags does not name a specific type. 107 DCHECK(Code::ExtractTypeFromFlags(flags) == 0); 108 109 // Make sure that there are no register conflicts. 110 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); 111 112 // Check scratch, extra and extra2 registers are valid. 113 DCHECK(!scratch.is(no_reg)); 114 DCHECK(!extra.is(no_reg)); 115 DCHECK(!extra2.is(no_reg)); 116 DCHECK(!extra3.is(no_reg)); 117 118 #ifdef DEBUG 119 // If vector-based ics are in use, ensure that scratch, extra, extra2 and 120 // extra3 don't conflict with the vector and slot registers, which need 121 // to be preserved for a handler call or miss. 122 if (IC::ICUseVector(ic_kind)) { 123 Register vector, slot; 124 if (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC) { 125 vector = VectorStoreICDescriptor::VectorRegister(); 126 slot = VectorStoreICDescriptor::SlotRegister(); 127 } else { 128 vector = LoadWithVectorDescriptor::VectorRegister(); 129 slot = LoadWithVectorDescriptor::SlotRegister(); 130 } 131 DCHECK(!AreAliased(vector, slot, scratch, extra, extra2, extra3)); 132 } 133 #endif 134 135 Counters* counters = masm->isolate()->counters(); 136 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2, 137 extra3); 138 139 // Check that the receiver isn't a smi. 140 __ JumpIfSmi(receiver, &miss); 141 142 // Get the map of the receiver and compute the hash. 143 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); 144 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); 145 __ add(scratch, scratch, Operand(ip)); 146 uint32_t mask = kPrimaryTableSize - 1; 147 // We shift out the last two bits because they are not part of the hash and 148 // they are always 01 for maps. 149 __ mov(scratch, Operand(scratch, LSR, kCacheIndexShift)); 150 // Mask down the eor argument to the minimum to keep the immediate 151 // ARM-encodable. 152 __ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); 153 // Prefer and_ to ubfx here because ubfx takes 2 cycles. 154 __ and_(scratch, scratch, Operand(mask)); 155 156 // Probe the primary table. 157 ProbeTable(isolate, masm, ic_kind, flags, kPrimary, receiver, name, scratch, 158 extra, extra2, extra3); 159 160 // Primary miss: Compute hash for secondary probe. 161 __ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift)); 162 uint32_t mask2 = kSecondaryTableSize - 1; 163 __ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); 164 __ and_(scratch, scratch, Operand(mask2)); 165 166 // Probe the secondary table. 167 ProbeTable(isolate, masm, ic_kind, flags, kSecondary, receiver, name, scratch, 168 extra, extra2, extra3); 169 170 // Cache miss: Fall-through and let caller handle the miss by 171 // entering the runtime system. 172 __ bind(&miss); 173 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2, 174 extra3); 175 } 176 177 178 #undef __ 179 } // namespace internal 180 } // namespace v8 181 182 #endif // V8_TARGET_ARCH_ARM 183