1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #if V8_TARGET_ARCH_IA32 8 9 #include "src/codegen.h" 10 #include "src/ic-inl.h" 11 #include "src/runtime.h" 12 #include "src/stub-cache.h" 13 14 namespace v8 { 15 namespace internal { 16 17 // ---------------------------------------------------------------------------- 18 // Static IC stub generators. 19 // 20 21 #define __ ACCESS_MASM(masm) 22 23 24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, 25 Register type, 26 Label* global_object) { 27 // Register usage: 28 // type: holds the receiver instance type on entry. 29 __ cmp(type, JS_GLOBAL_OBJECT_TYPE); 30 __ j(equal, global_object); 31 __ cmp(type, JS_BUILTINS_OBJECT_TYPE); 32 __ j(equal, global_object); 33 __ cmp(type, JS_GLOBAL_PROXY_TYPE); 34 __ j(equal, global_object); 35 } 36 37 38 // Generated code falls through if the receiver is a regular non-global 39 // JS object with slow properties and no interceptors. 40 static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm, 41 Register receiver, 42 Register r0, 43 Register r1, 44 Label* miss) { 45 // Register usage: 46 // receiver: holds the receiver on entry and is unchanged. 47 // r0: used to hold receiver instance type. 48 // Holds the property dictionary on fall through. 49 // r1: used to hold receivers map. 50 51 // Check that the receiver isn't a smi. 52 __ JumpIfSmi(receiver, miss); 53 54 // Check that the receiver is a valid JS object. 55 __ mov(r1, FieldOperand(receiver, HeapObject::kMapOffset)); 56 __ movzx_b(r0, FieldOperand(r1, Map::kInstanceTypeOffset)); 57 __ cmp(r0, FIRST_SPEC_OBJECT_TYPE); 58 __ j(below, miss); 59 60 // If this assert fails, we have to check upper bound too. 61 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); 62 63 GenerateGlobalInstanceTypeCheck(masm, r0, miss); 64 65 // Check for non-global object that requires access check. 66 __ test_b(FieldOperand(r1, Map::kBitFieldOffset), 67 (1 << Map::kIsAccessCheckNeeded) | 68 (1 << Map::kHasNamedInterceptor)); 69 __ j(not_zero, miss); 70 71 __ mov(r0, FieldOperand(receiver, JSObject::kPropertiesOffset)); 72 __ CheckMap(r0, masm->isolate()->factory()->hash_table_map(), miss, 73 DONT_DO_SMI_CHECK); 74 } 75 76 77 // Helper function used to load a property from a dictionary backing 78 // storage. This function may fail to load a property even though it is 79 // in the dictionary, so code at miss_label must always call a backup 80 // property load that is complete. This function is safe to call if 81 // name is not internalized, and will jump to the miss_label in that 82 // case. The generated code assumes that the receiver has slow 83 // properties, is not a global object and does not have interceptors. 84 static void GenerateDictionaryLoad(MacroAssembler* masm, 85 Label* miss_label, 86 Register elements, 87 Register name, 88 Register r0, 89 Register r1, 90 Register result) { 91 // Register use: 92 // 93 // elements - holds the property dictionary on entry and is unchanged. 94 // 95 // name - holds the name of the property on entry and is unchanged. 96 // 97 // Scratch registers: 98 // 99 // r0 - used for the index into the property dictionary 100 // 101 // r1 - used to hold the capacity of the property dictionary. 102 // 103 // result - holds the result on exit. 104 105 Label done; 106 107 // Probe the dictionary. 108 NameDictionaryLookupStub::GeneratePositiveLookup(masm, 109 miss_label, 110 &done, 111 elements, 112 name, 113 r0, 114 r1); 115 116 // If probing finds an entry in the dictionary, r0 contains the 117 // index into the dictionary. Check that the value is a normal 118 // property. 119 __ bind(&done); 120 const int kElementsStartOffset = 121 NameDictionary::kHeaderSize + 122 NameDictionary::kElementsStartIndex * kPointerSize; 123 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; 124 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag), 125 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize)); 126 __ j(not_zero, miss_label); 127 128 // Get the value at the masked, scaled index. 129 const int kValueOffset = kElementsStartOffset + kPointerSize; 130 __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); 131 } 132 133 134 // Helper function used to store a property to a dictionary backing 135 // storage. This function may fail to store a property eventhough it 136 // is in the dictionary, so code at miss_label must always call a 137 // backup property store that is complete. This function is safe to 138 // call if name is not internalized, and will jump to the miss_label in 139 // that case. The generated code assumes that the receiver has slow 140 // properties, is not a global object and does not have interceptors. 141 static void GenerateDictionaryStore(MacroAssembler* masm, 142 Label* miss_label, 143 Register elements, 144 Register name, 145 Register value, 146 Register r0, 147 Register r1) { 148 // Register use: 149 // 150 // elements - holds the property dictionary on entry and is clobbered. 151 // 152 // name - holds the name of the property on entry and is unchanged. 153 // 154 // value - holds the value to store and is unchanged. 155 // 156 // r0 - used for index into the property dictionary and is clobbered. 157 // 158 // r1 - used to hold the capacity of the property dictionary and is clobbered. 159 Label done; 160 161 162 // Probe the dictionary. 163 NameDictionaryLookupStub::GeneratePositiveLookup(masm, 164 miss_label, 165 &done, 166 elements, 167 name, 168 r0, 169 r1); 170 171 // If probing finds an entry in the dictionary, r0 contains the 172 // index into the dictionary. Check that the value is a normal 173 // property that is not read only. 174 __ bind(&done); 175 const int kElementsStartOffset = 176 NameDictionary::kHeaderSize + 177 NameDictionary::kElementsStartIndex * kPointerSize; 178 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; 179 const int kTypeAndReadOnlyMask = 180 (PropertyDetails::TypeField::kMask | 181 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; 182 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag), 183 Immediate(kTypeAndReadOnlyMask)); 184 __ j(not_zero, miss_label); 185 186 // Store the value at the masked, scaled index. 187 const int kValueOffset = kElementsStartOffset + kPointerSize; 188 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); 189 __ mov(Operand(r0, 0), value); 190 191 // Update write barrier. Make sure not to clobber the value. 192 __ mov(r1, value); 193 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); 194 } 195 196 197 // Checks the receiver for special cases (value type, slow case bits). 198 // Falls through for regular JS object. 199 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, 200 Register receiver, 201 Register map, 202 int interceptor_bit, 203 Label* slow) { 204 // Register use: 205 // receiver - holds the receiver and is unchanged. 206 // Scratch registers: 207 // map - used to hold the map of the receiver. 208 209 // Check that the object isn't a smi. 210 __ JumpIfSmi(receiver, slow); 211 212 // Get the map of the receiver. 213 __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset)); 214 215 // Check bit field. 216 __ test_b(FieldOperand(map, Map::kBitFieldOffset), 217 (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)); 218 __ j(not_zero, slow); 219 // Check that the object is some kind of JS object EXCEPT JS Value type. 220 // In the case that the object is a value-wrapper object, 221 // we enter the runtime system to make sure that indexing 222 // into string objects works as intended. 223 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); 224 225 __ CmpInstanceType(map, JS_OBJECT_TYPE); 226 __ j(below, slow); 227 } 228 229 230 // Loads an indexed element from a fast case array. 231 // If not_fast_array is NULL, doesn't perform the elements map check. 232 static void GenerateFastArrayLoad(MacroAssembler* masm, 233 Register receiver, 234 Register key, 235 Register scratch, 236 Register result, 237 Label* not_fast_array, 238 Label* out_of_range) { 239 // Register use: 240 // receiver - holds the receiver and is unchanged. 241 // key - holds the key and is unchanged (must be a smi). 242 // Scratch registers: 243 // scratch - used to hold elements of the receiver and the loaded value. 244 // result - holds the result on exit if the load succeeds and 245 // we fall through. 246 247 __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset)); 248 if (not_fast_array != NULL) { 249 // Check that the object is in fast mode and writable. 250 __ CheckMap(scratch, 251 masm->isolate()->factory()->fixed_array_map(), 252 not_fast_array, 253 DONT_DO_SMI_CHECK); 254 } else { 255 __ AssertFastElements(scratch); 256 } 257 // Check that the key (index) is within bounds. 258 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); 259 __ j(above_equal, out_of_range); 260 // Fast case: Do the load. 261 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); 262 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); 263 __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value())); 264 // In case the loaded value is the_hole we have to consult GetProperty 265 // to ensure the prototype chain is searched. 266 __ j(equal, out_of_range); 267 if (!result.is(scratch)) { 268 __ mov(result, scratch); 269 } 270 } 271 272 273 // Checks whether a key is an array index string or a unique name. 274 // Falls through if the key is a unique name. 275 static void GenerateKeyNameCheck(MacroAssembler* masm, 276 Register key, 277 Register map, 278 Register hash, 279 Label* index_string, 280 Label* not_unique) { 281 // Register use: 282 // key - holds the key and is unchanged. Assumed to be non-smi. 283 // Scratch registers: 284 // map - used to hold the map of the key. 285 // hash - used to hold the hash of the key. 286 Label unique; 287 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map); 288 __ j(above, not_unique); 289 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); 290 __ j(equal, &unique); 291 292 // Is the string an array index, with cached numeric value? 293 __ mov(hash, FieldOperand(key, Name::kHashFieldOffset)); 294 __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask)); 295 __ j(zero, index_string); 296 297 // Is the string internalized? We already know it's a string so a single 298 // bit test is enough. 299 STATIC_ASSERT(kNotInternalizedTag != 0); 300 __ test_b(FieldOperand(map, Map::kInstanceTypeOffset), 301 kIsNotInternalizedMask); 302 __ j(not_zero, not_unique); 303 304 __ bind(&unique); 305 } 306 307 308 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm, 309 Register object, 310 Register key, 311 Register scratch1, 312 Register scratch2, 313 Label* unmapped_case, 314 Label* slow_case) { 315 Heap* heap = masm->isolate()->heap(); 316 Factory* factory = masm->isolate()->factory(); 317 318 // Check that the receiver is a JSObject. Because of the elements 319 // map check later, we do not need to check for interceptors or 320 // whether it requires access checks. 321 __ JumpIfSmi(object, slow_case); 322 // Check that the object is some kind of JSObject. 323 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); 324 __ j(below, slow_case); 325 326 // Check that the key is a positive smi. 327 __ test(key, Immediate(0x80000001)); 328 __ j(not_zero, slow_case); 329 330 // Load the elements into scratch1 and check its map. 331 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); 332 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset)); 333 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); 334 335 // Check if element is in the range of mapped arguments. If not, jump 336 // to the unmapped lookup with the parameter map in scratch1. 337 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); 338 __ sub(scratch2, Immediate(Smi::FromInt(2))); 339 __ cmp(key, scratch2); 340 __ j(above_equal, unmapped_case); 341 342 // Load element index and check whether it is the hole. 343 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; 344 __ mov(scratch2, FieldOperand(scratch1, 345 key, 346 times_half_pointer_size, 347 kHeaderSize)); 348 __ cmp(scratch2, factory->the_hole_value()); 349 __ j(equal, unmapped_case); 350 351 // Load value from context and return it. We can reuse scratch1 because 352 // we do not jump to the unmapped lookup (which requires the parameter 353 // map in scratch1). 354 const int kContextOffset = FixedArray::kHeaderSize; 355 __ mov(scratch1, FieldOperand(scratch1, kContextOffset)); 356 return FieldOperand(scratch1, 357 scratch2, 358 times_half_pointer_size, 359 Context::kHeaderSize); 360 } 361 362 363 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, 364 Register key, 365 Register parameter_map, 366 Register scratch, 367 Label* slow_case) { 368 // Element is in arguments backing store, which is referenced by the 369 // second element of the parameter_map. 370 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; 371 Register backing_store = parameter_map; 372 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); 373 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); 374 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); 375 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); 376 __ cmp(key, scratch); 377 __ j(greater_equal, slow_case); 378 return FieldOperand(backing_store, 379 key, 380 times_half_pointer_size, 381 FixedArray::kHeaderSize); 382 } 383 384 385 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 386 // ----------- S t a t e ------------- 387 // -- ecx : key 388 // -- edx : receiver 389 // -- esp[0] : return address 390 // ----------------------------------- 391 Label slow, check_name, index_smi, index_name, property_array_property; 392 Label probe_dictionary, check_number_dictionary; 393 394 // Check that the key is a smi. 395 __ JumpIfNotSmi(ecx, &check_name); 396 __ bind(&index_smi); 397 // Now the key is known to be a smi. This place is also jumped to from 398 // where a numeric string is converted to a smi. 399 400 GenerateKeyedLoadReceiverCheck( 401 masm, edx, eax, Map::kHasIndexedInterceptor, &slow); 402 403 // Check the receiver's map to see if it has fast elements. 404 __ CheckFastElements(eax, &check_number_dictionary); 405 406 GenerateFastArrayLoad(masm, edx, ecx, eax, eax, NULL, &slow); 407 Isolate* isolate = masm->isolate(); 408 Counters* counters = isolate->counters(); 409 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); 410 __ ret(0); 411 412 __ bind(&check_number_dictionary); 413 __ mov(ebx, ecx); 414 __ SmiUntag(ebx); 415 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset)); 416 417 // Check whether the elements is a number dictionary. 418 // edx: receiver 419 // ebx: untagged index 420 // ecx: key 421 // eax: elements 422 __ CheckMap(eax, 423 isolate->factory()->hash_table_map(), 424 &slow, 425 DONT_DO_SMI_CHECK); 426 Label slow_pop_receiver; 427 // Push receiver on the stack to free up a register for the dictionary 428 // probing. 429 __ push(edx); 430 __ LoadFromNumberDictionary(&slow_pop_receiver, eax, ecx, ebx, edx, edi, eax); 431 // Pop receiver before returning. 432 __ pop(edx); 433 __ ret(0); 434 435 __ bind(&slow_pop_receiver); 436 // Pop the receiver from the stack and jump to runtime. 437 __ pop(edx); 438 439 __ bind(&slow); 440 // Slow case: jump to runtime. 441 // edx: receiver 442 // ecx: key 443 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); 444 GenerateRuntimeGetProperty(masm); 445 446 __ bind(&check_name); 447 GenerateKeyNameCheck(masm, ecx, eax, ebx, &index_name, &slow); 448 449 GenerateKeyedLoadReceiverCheck( 450 masm, edx, eax, Map::kHasNamedInterceptor, &slow); 451 452 // If the receiver is a fast-case object, check the keyed lookup 453 // cache. Otherwise probe the dictionary. 454 __ mov(ebx, FieldOperand(edx, JSObject::kPropertiesOffset)); 455 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 456 Immediate(isolate->factory()->hash_table_map())); 457 __ j(equal, &probe_dictionary); 458 459 // The receiver's map is still in eax, compute the keyed lookup cache hash 460 // based on 32 bits of the map pointer and the string hash. 461 if (FLAG_debug_code) { 462 __ cmp(eax, FieldOperand(edx, HeapObject::kMapOffset)); 463 __ Check(equal, kMapIsNoLongerInEax); 464 } 465 __ mov(ebx, eax); // Keep the map around for later. 466 __ shr(eax, KeyedLookupCache::kMapHashShift); 467 __ mov(edi, FieldOperand(ecx, String::kHashFieldOffset)); 468 __ shr(edi, String::kHashShift); 469 __ xor_(eax, edi); 470 __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); 471 472 // Load the key (consisting of map and internalized string) from the cache and 473 // check for match. 474 Label load_in_object_property; 475 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; 476 Label hit_on_nth_entry[kEntriesPerBucket]; 477 ExternalReference cache_keys = 478 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); 479 480 for (int i = 0; i < kEntriesPerBucket - 1; i++) { 481 Label try_next_entry; 482 __ mov(edi, eax); 483 __ shl(edi, kPointerSizeLog2 + 1); 484 if (i != 0) { 485 __ add(edi, Immediate(kPointerSize * i * 2)); 486 } 487 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); 488 __ j(not_equal, &try_next_entry); 489 __ add(edi, Immediate(kPointerSize)); 490 __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys)); 491 __ j(equal, &hit_on_nth_entry[i]); 492 __ bind(&try_next_entry); 493 } 494 495 __ lea(edi, Operand(eax, 1)); 496 __ shl(edi, kPointerSizeLog2 + 1); 497 __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2)); 498 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); 499 __ j(not_equal, &slow); 500 __ add(edi, Immediate(kPointerSize)); 501 __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys)); 502 __ j(not_equal, &slow); 503 504 // Get field offset. 505 // edx : receiver 506 // ebx : receiver's map 507 // ecx : key 508 // eax : lookup cache index 509 ExternalReference cache_field_offsets = 510 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); 511 512 // Hit on nth entry. 513 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { 514 __ bind(&hit_on_nth_entry[i]); 515 if (i != 0) { 516 __ add(eax, Immediate(i)); 517 } 518 __ mov(edi, 519 Operand::StaticArray(eax, times_pointer_size, cache_field_offsets)); 520 __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); 521 __ sub(edi, eax); 522 __ j(above_equal, &property_array_property); 523 if (i != 0) { 524 __ jmp(&load_in_object_property); 525 } 526 } 527 528 // Load in-object property. 529 __ bind(&load_in_object_property); 530 __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset)); 531 __ add(eax, edi); 532 __ mov(eax, FieldOperand(edx, eax, times_pointer_size, 0)); 533 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 534 __ ret(0); 535 536 // Load property array property. 537 __ bind(&property_array_property); 538 __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset)); 539 __ mov(eax, FieldOperand(eax, edi, times_pointer_size, 540 FixedArray::kHeaderSize)); 541 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 542 __ ret(0); 543 544 // Do a quick inline probe of the receiver's dictionary, if it 545 // exists. 546 __ bind(&probe_dictionary); 547 548 __ mov(eax, FieldOperand(edx, JSObject::kMapOffset)); 549 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); 550 GenerateGlobalInstanceTypeCheck(masm, eax, &slow); 551 552 GenerateDictionaryLoad(masm, &slow, ebx, ecx, eax, edi, eax); 553 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); 554 __ ret(0); 555 556 __ bind(&index_name); 557 __ IndexFromHash(ebx, ecx); 558 // Now jump to the place where smi keys are handled. 559 __ jmp(&index_smi); 560 } 561 562 563 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 564 // ----------- S t a t e ------------- 565 // -- ecx : key (index) 566 // -- edx : receiver 567 // -- esp[0] : return address 568 // ----------------------------------- 569 Label miss; 570 571 Register receiver = edx; 572 Register index = ecx; 573 Register scratch = ebx; 574 Register result = eax; 575 576 StringCharAtGenerator char_at_generator(receiver, 577 index, 578 scratch, 579 result, 580 &miss, // When not a string. 581 &miss, // When not a number. 582 &miss, // When index out of range. 583 STRING_INDEX_IS_ARRAY_INDEX); 584 char_at_generator.GenerateFast(masm); 585 __ ret(0); 586 587 StubRuntimeCallHelper call_helper; 588 char_at_generator.GenerateSlow(masm, call_helper); 589 590 __ bind(&miss); 591 GenerateMiss(masm); 592 } 593 594 595 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 596 // ----------- S t a t e ------------- 597 // -- ecx : key 598 // -- edx : receiver 599 // -- esp[0] : return address 600 // ----------------------------------- 601 Label slow; 602 603 // Check that the receiver isn't a smi. 604 __ JumpIfSmi(edx, &slow); 605 606 // Check that the key is an array index, that is Uint32. 607 __ test(ecx, Immediate(kSmiTagMask | kSmiSignMask)); 608 __ j(not_zero, &slow); 609 610 // Get the map of the receiver. 611 __ mov(eax, FieldOperand(edx, HeapObject::kMapOffset)); 612 613 // Check that it has indexed interceptor and access checks 614 // are not enabled for this object. 615 __ movzx_b(eax, FieldOperand(eax, Map::kBitFieldOffset)); 616 __ and_(eax, Immediate(kSlowCaseBitFieldMask)); 617 __ cmp(eax, Immediate(1 << Map::kHasIndexedInterceptor)); 618 __ j(not_zero, &slow); 619 620 // Everything is fine, call runtime. 621 __ pop(eax); 622 __ push(edx); // receiver 623 __ push(ecx); // key 624 __ push(eax); // return address 625 626 // Perform tail call to the entry. 627 ExternalReference ref = 628 ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), 629 masm->isolate()); 630 __ TailCallExternalReference(ref, 2, 1); 631 632 __ bind(&slow); 633 GenerateMiss(masm); 634 } 635 636 637 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { 638 // ----------- S t a t e ------------- 639 // -- ecx : key 640 // -- edx : receiver 641 // -- esp[0] : return address 642 // ----------------------------------- 643 Label slow, notin; 644 Factory* factory = masm->isolate()->factory(); 645 Operand mapped_location = 646 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, ¬in, &slow); 647 __ mov(eax, mapped_location); 648 __ Ret(); 649 __ bind(¬in); 650 // The unmapped lookup expects that the parameter map is in ebx. 651 Operand unmapped_location = 652 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow); 653 __ cmp(unmapped_location, factory->the_hole_value()); 654 __ j(equal, &slow); 655 __ mov(eax, unmapped_location); 656 __ Ret(); 657 __ bind(&slow); 658 GenerateMiss(masm); 659 } 660 661 662 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { 663 // ----------- S t a t e ------------- 664 // -- eax : value 665 // -- ecx : key 666 // -- edx : receiver 667 // -- esp[0] : return address 668 // ----------------------------------- 669 Label slow, notin; 670 Operand mapped_location = 671 GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, ¬in, &slow); 672 __ mov(mapped_location, eax); 673 __ lea(ecx, mapped_location); 674 __ mov(edx, eax); 675 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs); 676 __ Ret(); 677 __ bind(¬in); 678 // The unmapped lookup expects that the parameter map is in ebx. 679 Operand unmapped_location = 680 GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow); 681 __ mov(unmapped_location, eax); 682 __ lea(edi, unmapped_location); 683 __ mov(edx, eax); 684 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs); 685 __ Ret(); 686 __ bind(&slow); 687 GenerateMiss(masm); 688 } 689 690 691 static void KeyedStoreGenerateGenericHelper( 692 MacroAssembler* masm, 693 Label* fast_object, 694 Label* fast_double, 695 Label* slow, 696 KeyedStoreCheckMap check_map, 697 KeyedStoreIncrementLength increment_length) { 698 Label transition_smi_elements; 699 Label finish_object_store, non_double_value, transition_double_elements; 700 Label fast_double_without_map_check; 701 // eax: value 702 // ecx: key (a smi) 703 // edx: receiver 704 // ebx: FixedArray receiver->elements 705 // edi: receiver map 706 // Fast case: Do the store, could either Object or double. 707 __ bind(fast_object); 708 if (check_map == kCheckMap) { 709 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset)); 710 __ cmp(edi, masm->isolate()->factory()->fixed_array_map()); 711 __ j(not_equal, fast_double); 712 } 713 714 // HOLECHECK: guards "A[i] = V" 715 // We have to go to the runtime if the current value is the hole because 716 // there may be a callback on the element 717 Label holecheck_passed1; 718 __ cmp(FixedArrayElementOperand(ebx, ecx), 719 masm->isolate()->factory()->the_hole_value()); 720 __ j(not_equal, &holecheck_passed1); 721 __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow); 722 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 723 724 __ bind(&holecheck_passed1); 725 726 // Smi stores don't require further checks. 727 Label non_smi_value; 728 __ JumpIfNotSmi(eax, &non_smi_value); 729 if (increment_length == kIncrementLength) { 730 // Add 1 to receiver->length. 731 __ add(FieldOperand(edx, JSArray::kLengthOffset), 732 Immediate(Smi::FromInt(1))); 733 } 734 // It's irrelevant whether array is smi-only or not when writing a smi. 735 __ mov(FixedArrayElementOperand(ebx, ecx), eax); 736 __ ret(0); 737 738 __ bind(&non_smi_value); 739 // Escape to elements kind transition case. 740 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); 741 __ CheckFastObjectElements(edi, &transition_smi_elements); 742 743 // Fast elements array, store the value to the elements backing store. 744 __ bind(&finish_object_store); 745 if (increment_length == kIncrementLength) { 746 // Add 1 to receiver->length. 747 __ add(FieldOperand(edx, JSArray::kLengthOffset), 748 Immediate(Smi::FromInt(1))); 749 } 750 __ mov(FixedArrayElementOperand(ebx, ecx), eax); 751 // Update write barrier for the elements array address. 752 __ mov(edx, eax); // Preserve the value which is returned. 753 __ RecordWriteArray( 754 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 755 __ ret(0); 756 757 __ bind(fast_double); 758 if (check_map == kCheckMap) { 759 // Check for fast double array case. If this fails, call through to the 760 // runtime. 761 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); 762 __ j(not_equal, slow); 763 // If the value is a number, store it as a double in the FastDoubleElements 764 // array. 765 } 766 767 // HOLECHECK: guards "A[i] double hole?" 768 // We have to see if the double version of the hole is present. If so 769 // go to the runtime. 770 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); 771 __ cmp(FieldOperand(ebx, ecx, times_4, offset), Immediate(kHoleNanUpper32)); 772 __ j(not_equal, &fast_double_without_map_check); 773 __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow); 774 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 775 776 __ bind(&fast_double_without_map_check); 777 __ StoreNumberToDoubleElements(eax, ebx, ecx, edi, xmm0, 778 &transition_double_elements); 779 if (increment_length == kIncrementLength) { 780 // Add 1 to receiver->length. 781 __ add(FieldOperand(edx, JSArray::kLengthOffset), 782 Immediate(Smi::FromInt(1))); 783 } 784 __ ret(0); 785 786 __ bind(&transition_smi_elements); 787 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 788 789 // Transition the array appropriately depending on the value type. 790 __ CheckMap(eax, 791 masm->isolate()->factory()->heap_number_map(), 792 &non_double_value, 793 DONT_DO_SMI_CHECK); 794 795 // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS 796 // and complete the store. 797 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 798 FAST_DOUBLE_ELEMENTS, 799 ebx, 800 edi, 801 slow); 802 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, 803 FAST_DOUBLE_ELEMENTS); 804 ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); 805 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 806 __ jmp(&fast_double_without_map_check); 807 808 __ bind(&non_double_value); 809 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS 810 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 811 FAST_ELEMENTS, 812 ebx, 813 edi, 814 slow); 815 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); 816 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, 817 slow); 818 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 819 __ jmp(&finish_object_store); 820 821 __ bind(&transition_double_elements); 822 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a 823 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and 824 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS 825 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 826 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, 827 FAST_ELEMENTS, 828 ebx, 829 edi, 830 slow); 831 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); 832 ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); 833 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 834 __ jmp(&finish_object_store); 835 } 836 837 838 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, 839 StrictMode strict_mode) { 840 // ----------- S t a t e ------------- 841 // -- eax : value 842 // -- ecx : key 843 // -- edx : receiver 844 // -- esp[0] : return address 845 // ----------------------------------- 846 Label slow, fast_object, fast_object_grow; 847 Label fast_double, fast_double_grow; 848 Label array, extra, check_if_double_array; 849 850 // Check that the object isn't a smi. 851 __ JumpIfSmi(edx, &slow); 852 // Get the map from the receiver. 853 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); 854 // Check that the receiver does not require access checks and is not observed. 855 // The generic stub does not perform map checks or handle observed objects. 856 __ test_b(FieldOperand(edi, Map::kBitFieldOffset), 857 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved); 858 __ j(not_zero, &slow); 859 // Check that the key is a smi. 860 __ JumpIfNotSmi(ecx, &slow); 861 __ CmpInstanceType(edi, JS_ARRAY_TYPE); 862 __ j(equal, &array); 863 // Check that the object is some kind of JSObject. 864 __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE); 865 __ j(below, &slow); 866 867 // Object case: Check key against length in the elements array. 868 // eax: value 869 // edx: JSObject 870 // ecx: key (a smi) 871 // edi: receiver map 872 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 873 // Check array bounds. Both the key and the length of FixedArray are smis. 874 __ cmp(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); 875 __ j(below, &fast_object); 876 877 // Slow case: call runtime. 878 __ bind(&slow); 879 GenerateRuntimeSetProperty(masm, strict_mode); 880 881 // Extra capacity case: Check if there is extra capacity to 882 // perform the store and update the length. Used for adding one 883 // element to the array by writing to array[array.length]. 884 __ bind(&extra); 885 // eax: value 886 // edx: receiver, a JSArray 887 // ecx: key, a smi. 888 // ebx: receiver->elements, a FixedArray 889 // edi: receiver map 890 // flags: compare (ecx, edx.length()) 891 // do not leave holes in the array: 892 __ j(not_equal, &slow); 893 __ cmp(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); 894 __ j(above_equal, &slow); 895 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset)); 896 __ cmp(edi, masm->isolate()->factory()->fixed_array_map()); 897 __ j(not_equal, &check_if_double_array); 898 __ jmp(&fast_object_grow); 899 900 __ bind(&check_if_double_array); 901 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); 902 __ j(not_equal, &slow); 903 __ jmp(&fast_double_grow); 904 905 // Array case: Get the length and the elements array from the JS 906 // array. Check that the array is in fast mode (and writable); if it 907 // is the length is always a smi. 908 __ bind(&array); 909 // eax: value 910 // edx: receiver, a JSArray 911 // ecx: key, a smi. 912 // edi: receiver map 913 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 914 915 // Check the key against the length in the array and fall through to the 916 // common store code. 917 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. 918 __ j(above_equal, &extra); 919 920 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, 921 &slow, kCheckMap, kDontIncrementLength); 922 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, 923 &slow, kDontCheckMap, kIncrementLength); 924 } 925 926 927 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { 928 // ----------- S t a t e ------------- 929 // -- ecx : name 930 // -- edx : receiver 931 // -- esp[0] : return address 932 // ----------------------------------- 933 934 // Probe the stub cache. 935 Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC); 936 masm->isolate()->stub_cache()->GenerateProbe( 937 masm, flags, edx, ecx, ebx, eax); 938 939 // Cache miss: Jump to runtime. 940 GenerateMiss(masm); 941 } 942 943 944 void LoadIC::GenerateNormal(MacroAssembler* masm) { 945 // ----------- S t a t e ------------- 946 // -- ecx : name 947 // -- edx : receiver 948 // -- esp[0] : return address 949 // ----------------------------------- 950 Label miss, slow; 951 952 GenerateNameDictionaryReceiverCheck(masm, edx, eax, ebx, &miss); 953 954 // eax: elements 955 // Search the dictionary placing the result in eax. 956 GenerateDictionaryLoad(masm, &slow, eax, ecx, edi, ebx, eax); 957 __ ret(0); 958 959 // Dictionary load failed, go slow (but don't miss). 960 __ bind(&slow); 961 GenerateRuntimeGetProperty(masm); 962 963 // Cache miss: Jump to runtime. 964 __ bind(&miss); 965 GenerateMiss(masm); 966 } 967 968 969 void LoadIC::GenerateMiss(MacroAssembler* masm) { 970 // ----------- S t a t e ------------- 971 // -- ecx : name 972 // -- edx : receiver 973 // -- esp[0] : return address 974 // ----------------------------------- 975 976 __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1); 977 978 __ pop(ebx); 979 __ push(edx); // receiver 980 __ push(ecx); // name 981 __ push(ebx); // return address 982 983 // Perform tail call to the entry. 984 ExternalReference ref = 985 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); 986 __ TailCallExternalReference(ref, 2, 1); 987 } 988 989 990 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 991 // ----------- S t a t e ------------- 992 // -- ecx : key 993 // -- edx : receiver 994 // -- esp[0] : return address 995 // ----------------------------------- 996 997 __ pop(ebx); 998 __ push(edx); // receiver 999 __ push(ecx); // name 1000 __ push(ebx); // return address 1001 1002 // Perform tail call to the entry. 1003 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); 1004 } 1005 1006 1007 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 1008 // ----------- S t a t e ------------- 1009 // -- ecx : key 1010 // -- edx : receiver 1011 // -- esp[0] : return address 1012 // ----------------------------------- 1013 1014 __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1); 1015 1016 __ pop(ebx); 1017 __ push(edx); // receiver 1018 __ push(ecx); // name 1019 __ push(ebx); // return address 1020 1021 // Perform tail call to the entry. 1022 ExternalReference ref = 1023 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); 1024 __ TailCallExternalReference(ref, 2, 1); 1025 } 1026 1027 1028 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 1029 // ----------- S t a t e ------------- 1030 // -- ecx : key 1031 // -- edx : receiver 1032 // -- esp[0] : return address 1033 // ----------------------------------- 1034 1035 __ pop(ebx); 1036 __ push(edx); // receiver 1037 __ push(ecx); // name 1038 __ push(ebx); // return address 1039 1040 // Perform tail call to the entry. 1041 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); 1042 } 1043 1044 1045 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { 1046 // ----------- S t a t e ------------- 1047 // -- eax : value 1048 // -- ecx : name 1049 // -- edx : receiver 1050 // -- esp[0] : return address 1051 // ----------------------------------- 1052 Code::Flags flags = Code::ComputeHandlerFlags(Code::STORE_IC); 1053 masm->isolate()->stub_cache()->GenerateProbe( 1054 masm, flags, edx, ecx, ebx, no_reg); 1055 1056 // Cache miss: Jump to runtime. 1057 GenerateMiss(masm); 1058 } 1059 1060 1061 void StoreIC::GenerateMiss(MacroAssembler* masm) { 1062 // ----------- S t a t e ------------- 1063 // -- eax : value 1064 // -- ecx : name 1065 // -- edx : receiver 1066 // -- esp[0] : return address 1067 // ----------------------------------- 1068 1069 __ pop(ebx); 1070 __ push(edx); 1071 __ push(ecx); 1072 __ push(eax); 1073 __ push(ebx); 1074 1075 // Perform tail call to the entry. 1076 ExternalReference ref = 1077 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); 1078 __ TailCallExternalReference(ref, 3, 1); 1079 } 1080 1081 1082 void StoreIC::GenerateNormal(MacroAssembler* masm) { 1083 // ----------- S t a t e ------------- 1084 // -- eax : value 1085 // -- ecx : name 1086 // -- edx : receiver 1087 // -- esp[0] : return address 1088 // ----------------------------------- 1089 1090 Label miss, restore_miss; 1091 1092 GenerateNameDictionaryReceiverCheck(masm, edx, ebx, edi, &miss); 1093 1094 // A lot of registers are needed for storing to slow case 1095 // objects. Push and restore receiver but rely on 1096 // GenerateDictionaryStore preserving the value and name. 1097 __ push(edx); 1098 GenerateDictionaryStore(masm, &restore_miss, ebx, ecx, eax, edx, edi); 1099 __ Drop(1); 1100 Counters* counters = masm->isolate()->counters(); 1101 __ IncrementCounter(counters->store_normal_hit(), 1); 1102 __ ret(0); 1103 1104 __ bind(&restore_miss); 1105 __ pop(edx); 1106 1107 __ bind(&miss); 1108 __ IncrementCounter(counters->store_normal_miss(), 1); 1109 GenerateMiss(masm); 1110 } 1111 1112 1113 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, 1114 StrictMode strict_mode) { 1115 // ----------- S t a t e ------------- 1116 // -- eax : value 1117 // -- ecx : name 1118 // -- edx : receiver 1119 // -- esp[0] : return address 1120 // ----------------------------------- 1121 __ pop(ebx); 1122 __ push(edx); 1123 __ push(ecx); 1124 __ push(eax); 1125 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes 1126 __ push(Immediate(Smi::FromInt(strict_mode))); 1127 __ push(ebx); // return address 1128 1129 // Do tail-call to runtime routine. 1130 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); 1131 } 1132 1133 1134 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, 1135 StrictMode strict_mode) { 1136 // ----------- S t a t e ------------- 1137 // -- eax : value 1138 // -- ecx : key 1139 // -- edx : receiver 1140 // -- esp[0] : return address 1141 // ----------------------------------- 1142 1143 __ pop(ebx); 1144 __ push(edx); 1145 __ push(ecx); 1146 __ push(eax); 1147 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes 1148 __ push(Immediate(Smi::FromInt(strict_mode))); // Strict mode. 1149 __ push(ebx); // return address 1150 1151 // Do tail-call to runtime routine. 1152 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); 1153 } 1154 1155 1156 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { 1157 // ----------- S t a t e ------------- 1158 // -- eax : value 1159 // -- ecx : key 1160 // -- edx : receiver 1161 // -- esp[0] : return address 1162 // ----------------------------------- 1163 1164 __ pop(ebx); 1165 __ push(edx); 1166 __ push(ecx); 1167 __ push(eax); 1168 __ push(ebx); 1169 1170 // Do tail-call to runtime routine. 1171 ExternalReference ref = 1172 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); 1173 __ TailCallExternalReference(ref, 3, 1); 1174 } 1175 1176 1177 void StoreIC::GenerateSlow(MacroAssembler* masm) { 1178 // ----------- S t a t e ------------- 1179 // -- eax : value 1180 // -- ecx : key 1181 // -- edx : receiver 1182 // -- esp[0] : return address 1183 // ----------------------------------- 1184 1185 __ pop(ebx); 1186 __ push(edx); 1187 __ push(ecx); 1188 __ push(eax); 1189 __ push(ebx); // return address 1190 1191 // Do tail-call to runtime routine. 1192 ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate()); 1193 __ TailCallExternalReference(ref, 3, 1); 1194 } 1195 1196 1197 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { 1198 // ----------- S t a t e ------------- 1199 // -- eax : value 1200 // -- ecx : key 1201 // -- edx : receiver 1202 // -- esp[0] : return address 1203 // ----------------------------------- 1204 1205 __ pop(ebx); 1206 __ push(edx); 1207 __ push(ecx); 1208 __ push(eax); 1209 __ push(ebx); // return address 1210 1211 // Do tail-call to runtime routine. 1212 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); 1213 __ TailCallExternalReference(ref, 3, 1); 1214 } 1215 1216 1217 #undef __ 1218 1219 1220 Condition CompareIC::ComputeCondition(Token::Value op) { 1221 switch (op) { 1222 case Token::EQ_STRICT: 1223 case Token::EQ: 1224 return equal; 1225 case Token::LT: 1226 return less; 1227 case Token::GT: 1228 return greater; 1229 case Token::LTE: 1230 return less_equal; 1231 case Token::GTE: 1232 return greater_equal; 1233 default: 1234 UNREACHABLE(); 1235 return no_condition; 1236 } 1237 } 1238 1239 1240 bool CompareIC::HasInlinedSmiCode(Address address) { 1241 // The address of the instruction following the call. 1242 Address test_instruction_address = 1243 address + Assembler::kCallTargetAddressOffset; 1244 1245 // If the instruction following the call is not a test al, nothing 1246 // was inlined. 1247 return *test_instruction_address == Assembler::kTestAlByte; 1248 } 1249 1250 1251 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { 1252 // The address of the instruction following the call. 1253 Address test_instruction_address = 1254 address + Assembler::kCallTargetAddressOffset; 1255 1256 // If the instruction following the call is not a test al, nothing 1257 // was inlined. 1258 if (*test_instruction_address != Assembler::kTestAlByte) { 1259 ASSERT(*test_instruction_address == Assembler::kNopByte); 1260 return; 1261 } 1262 1263 Address delta_address = test_instruction_address + 1; 1264 // The delta to the start of the map check instruction and the 1265 // condition code uses at the patched jump. 1266 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address); 1267 if (FLAG_trace_ic) { 1268 PrintF("[ patching ic at %p, test=%p, delta=%d\n", 1269 address, test_instruction_address, delta); 1270 } 1271 1272 // Patch with a short conditional jump. Enabling means switching from a short 1273 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the 1274 // reverse operation of that. 1275 Address jmp_address = test_instruction_address - delta; 1276 ASSERT((check == ENABLE_INLINED_SMI_CHECK) 1277 ? (*jmp_address == Assembler::kJncShortOpcode || 1278 *jmp_address == Assembler::kJcShortOpcode) 1279 : (*jmp_address == Assembler::kJnzShortOpcode || 1280 *jmp_address == Assembler::kJzShortOpcode)); 1281 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) 1282 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) 1283 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); 1284 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); 1285 } 1286 1287 1288 } } // namespace v8::internal 1289 1290 #endif // V8_TARGET_ARCH_IA32 1291