1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_PPC 6 7 #include "src/ic/call-optimization.h" 8 #include "src/ic/handler-compiler.h" 9 #include "src/ic/ic.h" 10 #include "src/isolate-inl.h" 11 12 namespace v8 { 13 namespace internal { 14 15 #define __ ACCESS_MASM(masm) 16 17 18 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( 19 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 20 int accessor_index, int expected_arguments, Register scratch) { 21 // ----------- S t a t e ------------- 22 // -- r3 : receiver 23 // -- r5 : name 24 // -- lr : return address 25 // ----------------------------------- 26 { 27 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 28 29 if (accessor_index >= 0) { 30 DCHECK(!holder.is(scratch)); 31 DCHECK(!receiver.is(scratch)); 32 // Call the JavaScript getter with the receiver on the stack. 33 if (map->IsJSGlobalObjectMap()) { 34 // Swap in the global receiver. 35 __ LoadP(scratch, 36 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 37 receiver = scratch; 38 } 39 __ push(receiver); 40 ParameterCount actual(0); 41 ParameterCount expected(expected_arguments); 42 __ LoadAccessor(r4, holder, accessor_index, ACCESSOR_GETTER); 43 __ InvokeFunction(r4, expected, actual, CALL_FUNCTION, 44 CheckDebugStepCallWrapper()); 45 } else { 46 // If we generate a global code snippet for deoptimization only, remember 47 // the place to continue after deoptimization. 48 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 49 } 50 51 // Restore context register. 52 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 53 } 54 __ Ret(); 55 } 56 57 58 void NamedStoreHandlerCompiler::GenerateStoreViaSetter( 59 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 60 int accessor_index, int expected_arguments, Register scratch) { 61 // ----------- S t a t e ------------- 62 // -- lr : return address 63 // ----------------------------------- 64 { 65 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 66 67 // Save value register, so we can restore it later. 68 __ push(value()); 69 70 if (accessor_index >= 0) { 71 DCHECK(!holder.is(scratch)); 72 DCHECK(!receiver.is(scratch)); 73 DCHECK(!value().is(scratch)); 74 // Call the JavaScript setter with receiver and value on the stack. 75 if (map->IsJSGlobalObjectMap()) { 76 // Swap in the global receiver. 77 __ LoadP(scratch, 78 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 79 receiver = scratch; 80 } 81 __ Push(receiver, value()); 82 ParameterCount actual(1); 83 ParameterCount expected(expected_arguments); 84 __ LoadAccessor(r4, holder, accessor_index, ACCESSOR_SETTER); 85 __ InvokeFunction(r4, expected, actual, CALL_FUNCTION, 86 CheckDebugStepCallWrapper()); 87 } else { 88 // If we generate a global code snippet for deoptimization only, remember 89 // the place to continue after deoptimization. 90 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 91 } 92 93 // We have to return the passed value, not the return value of the setter. 94 __ pop(r3); 95 96 // Restore context register. 97 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 98 } 99 __ Ret(); 100 } 101 102 103 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector, 104 Register slot) { 105 MacroAssembler* masm = this->masm(); 106 __ Push(vector, slot); 107 } 108 109 110 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) { 111 MacroAssembler* masm = this->masm(); 112 __ Pop(vector, slot); 113 } 114 115 116 void PropertyHandlerCompiler::DiscardVectorAndSlot() { 117 MacroAssembler* masm = this->masm(); 118 // Remove vector and slot. 119 __ addi(sp, sp, Operand(2 * kPointerSize)); 120 } 121 122 123 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 124 MacroAssembler* masm, Label* miss_label, Register receiver, 125 Handle<Name> name, Register scratch0, Register scratch1) { 126 DCHECK(name->IsUniqueName()); 127 DCHECK(!receiver.is(scratch0)); 128 Counters* counters = masm->isolate()->counters(); 129 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 130 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 131 132 Label done; 133 134 const int kInterceptorOrAccessCheckNeededMask = 135 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 136 137 // Bail out if the receiver has a named interceptor or requires access checks. 138 Register map = scratch1; 139 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 140 __ lbz(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 141 __ andi(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); 142 __ bne(miss_label, cr0); 143 144 // Check that receiver is a JSObject. 145 __ lbz(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 146 __ cmpi(scratch0, Operand(FIRST_JS_RECEIVER_TYPE)); 147 __ blt(miss_label); 148 149 // Load properties array. 150 Register properties = scratch0; 151 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 152 // Check that the properties array is a dictionary. 153 __ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 154 Register tmp = properties; 155 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); 156 __ cmp(map, tmp); 157 __ bne(miss_label); 158 159 // Restore the temporarily used register. 160 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 161 162 163 NameDictionaryLookupStub::GenerateNegativeLookup( 164 masm, miss_label, &done, receiver, properties, name, scratch1); 165 __ bind(&done); 166 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 167 } 168 169 170 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 171 MacroAssembler* masm, int index, Register result, Label* miss) { 172 __ LoadNativeContextSlot(index, result); 173 // Load its initial map. The global functions all have initial maps. 174 __ LoadP(result, 175 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset)); 176 // Load the prototype from the initial map. 177 __ LoadP(result, FieldMemOperand(result, Map::kPrototypeOffset)); 178 } 179 180 181 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 182 MacroAssembler* masm, Register receiver, Register scratch1, 183 Register scratch2, Label* miss_label) { 184 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 185 __ mr(r3, scratch1); 186 __ Ret(); 187 } 188 189 190 // Generate code to check that a global property cell is empty. Create 191 // the property cell at compilation time if no cell exists for the 192 // property. 193 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 194 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 195 Register scratch, Label* miss) { 196 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 197 DCHECK(cell->value()->IsTheHole()); 198 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell); 199 __ LoadWeakValue(scratch, weak_cell, miss); 200 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 201 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 202 __ cmp(scratch, ip); 203 __ bne(miss); 204 } 205 206 207 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 208 Register holder, Register name, 209 Handle<JSObject> holder_obj) { 210 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 211 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1); 212 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2); 213 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3); 214 __ push(name); 215 __ push(receiver); 216 __ push(holder); 217 } 218 219 220 static void CompileCallLoadPropertyWithInterceptor( 221 MacroAssembler* masm, Register receiver, Register holder, Register name, 222 Handle<JSObject> holder_obj, Runtime::FunctionId id) { 223 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength == 224 Runtime::FunctionForId(id)->nargs); 225 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 226 __ CallRuntime(id); 227 } 228 229 230 // Generate call to api function. 231 void PropertyHandlerCompiler::GenerateApiAccessorCall( 232 MacroAssembler* masm, const CallOptimization& optimization, 233 Handle<Map> receiver_map, Register receiver, Register scratch_in, 234 bool is_store, Register store_parameter, Register accessor_holder, 235 int accessor_index) { 236 DCHECK(!accessor_holder.is(scratch_in)); 237 DCHECK(!receiver.is(scratch_in)); 238 __ push(receiver); 239 // Write the arguments to stack frame. 240 if (is_store) { 241 DCHECK(!receiver.is(store_parameter)); 242 DCHECK(!scratch_in.is(store_parameter)); 243 __ push(store_parameter); 244 } 245 DCHECK(optimization.is_simple_api_call()); 246 247 // Abi for CallApiFunctionStub. 248 Register callee = r3; 249 Register data = r7; 250 Register holder = r5; 251 Register api_function_address = r4; 252 253 // Put callee in place. 254 __ LoadAccessor(callee, accessor_holder, accessor_index, 255 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER); 256 257 // Put holder in place. 258 CallOptimization::HolderLookup holder_lookup; 259 int holder_depth = 0; 260 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup, 261 &holder_depth); 262 switch (holder_lookup) { 263 case CallOptimization::kHolderIsReceiver: 264 __ Move(holder, receiver); 265 break; 266 case CallOptimization::kHolderFound: 267 __ LoadP(holder, FieldMemOperand(receiver, HeapObject::kMapOffset)); 268 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 269 for (int i = 1; i < holder_depth; i++) { 270 __ LoadP(holder, FieldMemOperand(holder, HeapObject::kMapOffset)); 271 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 272 } 273 break; 274 case CallOptimization::kHolderNotFound: 275 UNREACHABLE(); 276 break; 277 } 278 279 Isolate* isolate = masm->isolate(); 280 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 281 bool call_data_undefined = false; 282 // Put call data in place. 283 if (api_call_info->data()->IsUndefined()) { 284 call_data_undefined = true; 285 __ LoadRoot(data, Heap::kUndefinedValueRootIndex); 286 } else { 287 __ LoadP(data, 288 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset)); 289 __ LoadP(data, 290 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset)); 291 __ LoadP(data, 292 FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset)); 293 __ LoadP(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset)); 294 } 295 296 if (api_call_info->fast_handler()->IsCode()) { 297 // Just tail call into the fast handler if present. 298 __ Jump(handle(Code::cast(api_call_info->fast_handler())), 299 RelocInfo::CODE_TARGET); 300 return; 301 } 302 303 // Put api_function_address in place. 304 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 305 ApiFunction fun(function_address); 306 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; 307 ExternalReference ref = ExternalReference(&fun, type, masm->isolate()); 308 __ mov(api_function_address, Operand(ref)); 309 310 // Jump to stub. 311 CallApiAccessorStub stub(isolate, is_store, call_data_undefined); 312 __ TailCallStub(&stub); 313 } 314 315 316 static void StoreIC_PushArgs(MacroAssembler* masm) { 317 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), 318 StoreDescriptor::ValueRegister(), 319 VectorStoreICDescriptor::SlotRegister(), 320 VectorStoreICDescriptor::VectorRegister()); 321 } 322 323 324 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) { 325 StoreIC_PushArgs(masm); 326 327 // The slow case calls into the runtime to complete the store without causing 328 // an IC miss that would otherwise cause a transition to the generic stub. 329 __ TailCallRuntime(Runtime::kStoreIC_Slow); 330 } 331 332 333 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) { 334 StoreIC_PushArgs(masm); 335 336 // The slow case calls into the runtime to complete the store without causing 337 // an IC miss that would otherwise cause a transition to the generic stub. 338 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow); 339 } 340 341 342 #undef __ 343 #define __ ACCESS_MASM(masm()) 344 345 346 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, 347 Handle<Name> name) { 348 if (!label->is_unused()) { 349 __ bind(label); 350 __ mov(this->name(), Operand(name)); 351 } 352 } 353 354 355 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) { 356 __ mov(this->name(), Operand(name)); 357 } 358 359 360 void NamedStoreHandlerCompiler::RearrangeVectorAndSlot( 361 Register current_map, Register destination_map) { 362 DCHECK(false); // Not implemented. 363 } 364 365 366 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition, 367 Register map_reg, 368 Register scratch, 369 Label* miss) { 370 Handle<WeakCell> cell = Map::WeakCellForMap(transition); 371 DCHECK(!map_reg.is(scratch)); 372 __ LoadWeakValue(map_reg, cell, miss); 373 if (transition->CanBeDeprecated()) { 374 __ lwz(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset)); 375 __ DecodeField<Map::Deprecated>(r0, scratch, SetRC); 376 __ bne(miss, cr0); 377 } 378 } 379 380 381 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg, 382 int descriptor, 383 Register value_reg, 384 Register scratch, 385 Label* miss_label) { 386 DCHECK(!map_reg.is(scratch)); 387 DCHECK(!map_reg.is(value_reg)); 388 DCHECK(!value_reg.is(scratch)); 389 __ LoadInstanceDescriptors(map_reg, scratch); 390 __ LoadP(scratch, FieldMemOperand( 391 scratch, DescriptorArray::GetValueOffset(descriptor))); 392 __ cmp(value_reg, scratch); 393 __ bne(miss_label); 394 } 395 396 397 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type, 398 Register value_reg, 399 Label* miss_label) { 400 Register map_reg = scratch1(); 401 Register scratch = scratch2(); 402 DCHECK(!value_reg.is(map_reg)); 403 DCHECK(!value_reg.is(scratch)); 404 __ JumpIfSmi(value_reg, miss_label); 405 HeapType::Iterator<Map> it = field_type->Classes(); 406 if (!it.Done()) { 407 __ LoadP(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 408 Label do_store; 409 while (true) { 410 __ CmpWeakValue(map_reg, Map::WeakCellForMap(it.Current()), scratch); 411 it.Advance(); 412 if (it.Done()) { 413 __ bne(miss_label); 414 break; 415 } 416 __ beq(&do_store); 417 } 418 __ bind(&do_store); 419 } 420 } 421 422 423 Register PropertyHandlerCompiler::CheckPrototypes( 424 Register object_reg, Register holder_reg, Register scratch1, 425 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check, 426 ReturnHolder return_what) { 427 Handle<Map> receiver_map = map(); 428 429 // Make sure there's no overlap between holder and object registers. 430 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 431 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && 432 !scratch2.is(scratch1)); 433 434 if (FLAG_eliminate_prototype_chain_checks) { 435 Handle<Cell> validity_cell = 436 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); 437 if (!validity_cell.is_null()) { 438 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), 439 validity_cell->value()); 440 __ mov(scratch1, Operand(validity_cell)); 441 __ LoadP(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset)); 442 __ CmpSmiLiteral(scratch1, Smi::FromInt(Map::kPrototypeChainValid), r0); 443 __ bne(miss); 444 } 445 446 // The prototype chain of primitives (and their JSValue wrappers) depends 447 // on the native context, which can't be guarded by validity cells. 448 // |object_reg| holds the native context specific prototype in this case; 449 // we need to check its map. 450 if (check == CHECK_ALL_MAPS) { 451 __ LoadP(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 452 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map); 453 __ CmpWeakValue(scratch1, cell, scratch2); 454 __ b(ne, miss); 455 } 456 } 457 458 // Keep track of the current object in register reg. 459 Register reg = object_reg; 460 int depth = 0; 461 462 Handle<JSObject> current = Handle<JSObject>::null(); 463 if (receiver_map->IsJSGlobalObjectMap()) { 464 current = isolate()->global_object(); 465 } 466 // Check access rights to the global object. This has to happen after 467 // the map check so that we know that the object is actually a global 468 // object. 469 // This allows us to install generated handlers for accesses to the 470 // global proxy (as opposed to using slow ICs). See corresponding code 471 // in LookupForRead(). 472 if (receiver_map->IsJSGlobalProxyMap()) { 473 __ CheckAccessGlobalProxy(reg, scratch2, miss); 474 } 475 476 Handle<JSObject> prototype = Handle<JSObject>::null(); 477 Handle<Map> current_map = receiver_map; 478 Handle<Map> holder_map(holder()->map()); 479 // Traverse the prototype chain and check the maps in the prototype chain for 480 // fast and global objects or do negative lookup for normal objects. 481 while (!current_map.is_identical_to(holder_map)) { 482 ++depth; 483 484 // Only global objects and objects that do not require access 485 // checks are allowed in stubs. 486 DCHECK(current_map->IsJSGlobalProxyMap() || 487 !current_map->is_access_check_needed()); 488 489 prototype = handle(JSObject::cast(current_map->prototype())); 490 if (current_map->is_dictionary_map() && 491 !current_map->IsJSGlobalObjectMap()) { 492 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. 493 if (!name->IsUniqueName()) { 494 DCHECK(name->IsString()); 495 name = factory()->InternalizeString(Handle<String>::cast(name)); 496 } 497 DCHECK(current.is_null() || 498 current->property_dictionary()->FindEntry(name) == 499 NameDictionary::kNotFound); 500 501 if (FLAG_eliminate_prototype_chain_checks && depth > 1) { 502 // TODO(jkummerow): Cache and re-use weak cell. 503 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 504 } 505 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, 506 scratch2); 507 if (!FLAG_eliminate_prototype_chain_checks) { 508 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 509 __ LoadP(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 510 } 511 } else { 512 Register map_reg = scratch1; 513 if (!FLAG_eliminate_prototype_chain_checks) { 514 __ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 515 } 516 if (current_map->IsJSGlobalObjectMap()) { 517 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), 518 name, scratch2, miss); 519 } else if (!FLAG_eliminate_prototype_chain_checks && 520 (depth != 1 || check == CHECK_ALL_MAPS)) { 521 Handle<WeakCell> cell = Map::WeakCellForMap(current_map); 522 __ CmpWeakValue(map_reg, cell, scratch2); 523 __ bne(miss); 524 } 525 if (!FLAG_eliminate_prototype_chain_checks) { 526 __ LoadP(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); 527 } 528 } 529 530 reg = holder_reg; // From now on the object will be in holder_reg. 531 // Go to the next object in the prototype chain. 532 current = prototype; 533 current_map = handle(current->map()); 534 } 535 536 DCHECK(!current_map->IsJSGlobalProxyMap()); 537 538 // Log the check depth. 539 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 540 541 if (!FLAG_eliminate_prototype_chain_checks && 542 (depth != 0 || check == CHECK_ALL_MAPS)) { 543 // Check the holder map. 544 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 545 Handle<WeakCell> cell = Map::WeakCellForMap(current_map); 546 __ CmpWeakValue(scratch1, cell, scratch2); 547 __ bne(miss); 548 } 549 550 bool return_holder = return_what == RETURN_HOLDER; 551 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) { 552 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 553 } 554 555 // Return the register containing the holder. 556 return return_holder ? reg : no_reg; 557 } 558 559 560 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 561 if (!miss->is_unused()) { 562 Label success; 563 __ b(&success); 564 __ bind(miss); 565 if (IC::ICUseVector(kind())) { 566 DCHECK(kind() == Code::LOAD_IC); 567 PopVectorAndSlot(); 568 } 569 TailCallBuiltin(masm(), MissBuiltin(kind())); 570 __ bind(&success); 571 } 572 } 573 574 575 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 576 if (!miss->is_unused()) { 577 Label success; 578 __ b(&success); 579 GenerateRestoreName(miss, name); 580 if (IC::ICUseVector(kind())) PopVectorAndSlot(); 581 TailCallBuiltin(masm(), MissBuiltin(kind())); 582 __ bind(&success); 583 } 584 } 585 586 587 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 588 // Return the constant value. 589 __ Move(r3, value); 590 __ Ret(); 591 } 592 593 594 void NamedLoadHandlerCompiler::GenerateLoadCallback( 595 Register reg, Handle<ExecutableAccessorInfo> callback) { 596 // Build AccessorInfo::args_ list on the stack and push property name below 597 // the exit frame to make GC aware of them and store pointers to them. 598 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 599 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 600 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 601 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 602 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 603 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 604 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 605 DCHECK(!scratch2().is(reg)); 606 DCHECK(!scratch3().is(reg)); 607 DCHECK(!scratch4().is(reg)); 608 __ push(receiver()); 609 // Push data from ExecutableAccessorInfo. 610 Handle<Object> data(callback->data(), isolate()); 611 if (data->IsUndefined() || data->IsSmi()) { 612 __ Move(scratch3(), data); 613 } else { 614 Handle<WeakCell> cell = 615 isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data)); 616 // The callback is alive if this instruction is executed, 617 // so the weak cell is not cleared and points to data. 618 __ GetWeakValue(scratch3(), cell); 619 } 620 __ push(scratch3()); 621 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); 622 __ mr(scratch4(), scratch3()); 623 __ Push(scratch3(), scratch4()); 624 __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); 625 __ Push(scratch4(), reg); 626 __ push(name()); 627 628 // Abi for CallApiGetter 629 Register getter_address_reg = ApiGetterDescriptor::function_address(); 630 631 Address getter_address = v8::ToCData<Address>(callback->getter()); 632 ApiFunction fun(getter_address); 633 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; 634 ExternalReference ref = ExternalReference(&fun, type, isolate()); 635 __ mov(getter_address_reg, Operand(ref)); 636 637 CallApiGetterStub stub(isolate()); 638 __ TailCallStub(&stub); 639 } 640 641 642 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( 643 LookupIterator* it, Register holder_reg) { 644 DCHECK(holder()->HasNamedInterceptor()); 645 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 646 647 // Compile the interceptor call, followed by inline code to load the 648 // property from further up the prototype chain if the call fails. 649 // Check that the maps haven't changed. 650 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 651 652 // Preserve the receiver register explicitly whenever it is different from the 653 // holder and it is needed should the interceptor return without any result. 654 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD 655 // case might cause a miss during the prototype check. 656 bool must_perform_prototype_check = 657 !holder().is_identical_to(it->GetHolder<JSObject>()); 658 bool must_preserve_receiver_reg = 659 !receiver().is(holder_reg) && 660 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check); 661 662 // Save necessary data before invoking an interceptor. 663 // Requires a frame to make GC aware of pushed pointers. 664 { 665 FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL); 666 if (must_preserve_receiver_reg) { 667 __ Push(receiver(), holder_reg, this->name()); 668 } else { 669 __ Push(holder_reg, this->name()); 670 } 671 InterceptorVectorSlotPush(holder_reg); 672 // Invoke an interceptor. Note: map checks from receiver to 673 // interceptor's holder has been compiled before (see a caller 674 // of this method.) 675 CompileCallLoadPropertyWithInterceptor( 676 masm(), receiver(), holder_reg, this->name(), holder(), 677 Runtime::kLoadPropertyWithInterceptorOnly); 678 679 // Check if interceptor provided a value for property. If it's 680 // the case, return immediately. 681 Label interceptor_failed; 682 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex); 683 __ cmp(r3, scratch1()); 684 __ beq(&interceptor_failed); 685 frame_scope.GenerateLeaveFrame(); 686 __ Ret(); 687 688 __ bind(&interceptor_failed); 689 InterceptorVectorSlotPop(holder_reg); 690 __ pop(this->name()); 691 __ pop(holder_reg); 692 if (must_preserve_receiver_reg) { 693 __ pop(receiver()); 694 } 695 // Leave the internal frame. 696 } 697 698 GenerateLoadPostInterceptor(it, holder_reg); 699 } 700 701 702 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { 703 // Call the runtime system to load the interceptor. 704 DCHECK(holder()->HasNamedInterceptor()); 705 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 706 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), 707 holder()); 708 709 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor); 710 } 711 712 713 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( 714 Handle<JSObject> object, Handle<Name> name, 715 Handle<ExecutableAccessorInfo> callback) { 716 Register holder_reg = Frontend(name); 717 718 __ Push(receiver(), holder_reg); // receiver 719 720 // If the callback cannot leak, then push the callback directly, 721 // otherwise wrap it in a weak cell. 722 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) { 723 __ mov(ip, Operand(callback)); 724 } else { 725 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback); 726 __ mov(ip, Operand(cell)); 727 } 728 __ push(ip); 729 __ mov(ip, Operand(name)); 730 __ Push(ip, value()); 731 732 // Do tail-call to the runtime system. 733 __ TailCallRuntime(Runtime::kStoreCallbackProperty); 734 735 // Return the generated code. 736 return GetCode(kind(), Code::FAST, name); 737 } 738 739 740 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor( 741 Handle<Name> name) { 742 __ Push(receiver(), this->name(), value()); 743 744 // Do tail-call to the runtime system. 745 __ TailCallRuntime(Runtime::kStorePropertyWithInterceptor); 746 747 // Return the generated code. 748 return GetCode(kind(), Code::FAST, name); 749 } 750 751 752 Register NamedStoreHandlerCompiler::value() { 753 return StoreDescriptor::ValueRegister(); 754 } 755 756 757 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( 758 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { 759 Label miss; 760 if (IC::ICUseVector(kind())) { 761 PushVectorAndSlot(); 762 } 763 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING); 764 765 // Get the value from the cell. 766 Register result = StoreDescriptor::ValueRegister(); 767 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell); 768 __ LoadWeakValue(result, weak_cell, &miss); 769 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 770 771 // Check for deleted property if property can actually be deleted. 772 if (is_configurable) { 773 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 774 __ cmp(result, ip); 775 __ beq(&miss); 776 } 777 778 Counters* counters = isolate()->counters(); 779 __ IncrementCounter(counters->named_load_global_stub(), 1, r4, r6); 780 if (IC::ICUseVector(kind())) { 781 DiscardVectorAndSlot(); 782 } 783 __ Ret(); 784 785 FrontendFooter(name, &miss); 786 787 // Return the generated code. 788 return GetCode(kind(), Code::NORMAL, name); 789 } 790 791 792 #undef __ 793 } // namespace internal 794 } // namespace v8 795 796 #endif // V8_TARGET_ARCH_ARM 797