Home | History | Annotate | Download | only in mips64
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #if V8_TARGET_ARCH_MIPS64
      6 
      7 #include "src/ic/handler-compiler.h"
      8 
      9 #include "src/api-arguments.h"
     10 #include "src/field-type.h"
     11 #include "src/ic/call-optimization.h"
     12 #include "src/ic/ic.h"
     13 #include "src/isolate-inl.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 #define __ ACCESS_MASM(masm)
     19 
     20 
     21 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
     22     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
     23     int accessor_index, int expected_arguments, Register scratch) {
     24   // ----------- S t a t e -------------
     25   //  -- a0    : receiver
     26   //  -- a2    : name
     27   //  -- ra    : return address
     28   // -----------------------------------
     29   {
     30     FrameScope scope(masm, StackFrame::INTERNAL);
     31 
     32     // Save context register
     33     __ push(cp);
     34 
     35     if (accessor_index >= 0) {
     36       DCHECK(!holder.is(scratch));
     37       DCHECK(!receiver.is(scratch));
     38       // Call the JavaScript getter with the receiver on the stack.
     39       if (map->IsJSGlobalObjectMap()) {
     40         // Swap in the global receiver.
     41         __ ld(scratch,
     42               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
     43         receiver = scratch;
     44       }
     45       __ push(receiver);
     46       __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
     47       __ li(a0, Operand(V8_INT64_C(0)));
     48       __ Call(masm->isolate()->builtins()->CallFunction(
     49                   ConvertReceiverMode::kNotNullOrUndefined),
     50               RelocInfo::CODE_TARGET);
     51     } else {
     52       // If we generate a global code snippet for deoptimization only, remember
     53       // the place to continue after deoptimization.
     54       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
     55     }
     56 
     57     // Restore context register.
     58     __ pop(cp);
     59   }
     60   __ Ret();
     61 }
     62 
     63 
     64 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
     65     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
     66     int accessor_index, int expected_arguments, Register scratch) {
     67   // ----------- S t a t e -------------
     68   //  -- ra    : return address
     69   // -----------------------------------
     70   {
     71     FrameScope scope(masm, StackFrame::INTERNAL);
     72 
     73     // Save context and value registers, so we can restore them later.
     74     __ Push(cp, value());
     75 
     76     if (accessor_index >= 0) {
     77       DCHECK(!holder.is(scratch));
     78       DCHECK(!receiver.is(scratch));
     79       DCHECK(!value().is(scratch));
     80       // Call the JavaScript setter with receiver and value on the stack.
     81       if (map->IsJSGlobalObjectMap()) {
     82         // Swap in the global receiver.
     83         __ ld(scratch,
     84               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
     85         receiver = scratch;
     86       }
     87       __ Push(receiver, value());
     88       __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
     89       __ li(a0, Operand(1));
     90       __ Call(masm->isolate()->builtins()->CallFunction(
     91                   ConvertReceiverMode::kNotNullOrUndefined),
     92               RelocInfo::CODE_TARGET);
     93     } else {
     94       // If we generate a global code snippet for deoptimization only, remember
     95       // the place to continue after deoptimization.
     96       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
     97     }
     98 
     99     // We have to return the passed value, not the return value of the setter.
    100     // Restore context register.
    101     __ Pop(cp, v0);
    102   }
    103   __ Ret();
    104 }
    105 
    106 
    107 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
    108                                                 Register slot) {
    109   MacroAssembler* masm = this->masm();
    110   STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
    111                 LoadWithVectorDescriptor::kVector);
    112   STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
    113                 StoreWithVectorDescriptor::kVector);
    114   STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
    115                 StoreTransitionDescriptor::kVector);
    116   __ Push(slot, vector);
    117 }
    118 
    119 
    120 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
    121   MacroAssembler* masm = this->masm();
    122   __ Pop(slot, vector);
    123 }
    124 
    125 
    126 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
    127   MacroAssembler* masm = this->masm();
    128   // Remove vector and slot.
    129   __ Daddu(sp, sp, Operand(2 * kPointerSize));
    130 }
    131 
    132 void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
    133   // No-op. Return address is in ra register.
    134 }
    135 
    136 void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
    137   // No-op. Return address is in ra register.
    138 }
    139 
    140 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
    141     MacroAssembler* masm, Label* miss_label, Register receiver,
    142     Handle<Name> name, Register scratch0, Register scratch1) {
    143   DCHECK(name->IsUniqueName());
    144   DCHECK(!receiver.is(scratch0));
    145   Counters* counters = masm->isolate()->counters();
    146   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
    147   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    148 
    149   Label done;
    150 
    151   const int kInterceptorOrAccessCheckNeededMask =
    152       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    153 
    154   // Bail out if the receiver has a named interceptor or requires access checks.
    155   Register map = scratch1;
    156   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
    157   __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
    158   __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
    159   __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
    160 
    161   // Check that receiver is a JSObject.
    162   __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
    163   __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
    164 
    165   // Load properties array.
    166   Register properties = scratch0;
    167   __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    168   // Check that the properties array is a dictionary.
    169   __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset));
    170   Register tmp = properties;
    171   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
    172   __ Branch(miss_label, ne, map, Operand(tmp));
    173 
    174   // Restore the temporarily used register.
    175   __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
    176 
    177 
    178   NameDictionaryLookupStub::GenerateNegativeLookup(
    179       masm, miss_label, &done, receiver, properties, name, scratch1);
    180   __ bind(&done);
    181   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
    182 }
    183 
    184 
    185 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    186     MacroAssembler* masm, int index, Register result, Label* miss) {
    187   __ LoadNativeContextSlot(index, result);
    188   // Load its initial map. The global functions all have initial maps.
    189   __ ld(result,
    190         FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
    191   // Load the prototype from the initial map.
    192   __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset));
    193 }
    194 
    195 
    196 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
    197     MacroAssembler* masm, Register receiver, Register scratch1,
    198     Register scratch2, Label* miss_label) {
    199   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
    200   __ Ret(USE_DELAY_SLOT);
    201   __ mov(v0, scratch1);
    202 }
    203 
    204 
    205 // Generate code to check that a global property cell is empty. Create
    206 // the property cell at compilation time if no cell exists for the
    207 // property.
    208 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
    209     MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
    210     Register scratch, Label* miss) {
    211   Handle<PropertyCell> cell = JSGlobalObject::EnsureEmptyPropertyCell(
    212       global, name, PropertyCellType::kInvalidated);
    213   Isolate* isolate = masm->isolate();
    214   DCHECK(cell->value()->IsTheHole(isolate));
    215   Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell);
    216   __ LoadWeakValue(scratch, weak_cell, miss);
    217   __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
    218   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    219   __ Branch(miss, ne, scratch, Operand(at));
    220 }
    221 
    222 
    223 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
    224                                      Register holder, Register name,
    225                                      Handle<JSObject> holder_obj) {
    226   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
    227   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
    228   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
    229   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
    230   __ Push(name, receiver, holder);
    231 }
    232 
    233 
    234 static void CompileCallLoadPropertyWithInterceptor(
    235     MacroAssembler* masm, Register receiver, Register holder, Register name,
    236     Handle<JSObject> holder_obj, Runtime::FunctionId id) {
    237   DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
    238          Runtime::FunctionForId(id)->nargs);
    239   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    240   __ CallRuntime(id);
    241 }
    242 
    243 
    244 // Generate call to api function.
    245 void PropertyHandlerCompiler::GenerateApiAccessorCall(
    246     MacroAssembler* masm, const CallOptimization& optimization,
    247     Handle<Map> receiver_map, Register receiver, Register scratch_in,
    248     bool is_store, Register store_parameter, Register accessor_holder,
    249     int accessor_index) {
    250   DCHECK(!accessor_holder.is(scratch_in));
    251   DCHECK(!receiver.is(scratch_in));
    252   __ push(receiver);
    253   // Write the arguments to stack frame.
    254   if (is_store) {
    255     DCHECK(!receiver.is(store_parameter));
    256     DCHECK(!scratch_in.is(store_parameter));
    257     __ push(store_parameter);
    258   }
    259   DCHECK(optimization.is_simple_api_call());
    260 
    261   // Abi for CallApiCallbackStub.
    262   Register callee = a0;
    263   Register data = a4;
    264   Register holder = a2;
    265   Register api_function_address = a1;
    266 
    267   // Put callee in place.
    268   __ LoadAccessor(callee, accessor_holder, accessor_index,
    269                   is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
    270 
    271   // Put holder in place.
    272   CallOptimization::HolderLookup holder_lookup;
    273   int holder_depth = 0;
    274   optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
    275                                           &holder_depth);
    276   switch (holder_lookup) {
    277     case CallOptimization::kHolderIsReceiver:
    278       __ Move(holder, receiver);
    279       break;
    280     case CallOptimization::kHolderFound:
    281       __ ld(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
    282       __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
    283       for (int i = 1; i < holder_depth; i++) {
    284         __ ld(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
    285         __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
    286       }
    287       break;
    288     case CallOptimization::kHolderNotFound:
    289       UNREACHABLE();
    290       break;
    291   }
    292 
    293   Isolate* isolate = masm->isolate();
    294   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    295   bool call_data_undefined = false;
    296   // Put call data in place.
    297   if (api_call_info->data()->IsUndefined(isolate)) {
    298     call_data_undefined = true;
    299     __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
    300   } else {
    301     if (optimization.is_constant_call()) {
    302       __ ld(data,
    303             FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
    304       __ ld(data,
    305             FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
    306       __ ld(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
    307     } else {
    308       __ ld(data,
    309             FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
    310     }
    311     __ ld(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
    312   }
    313 
    314   if (api_call_info->fast_handler()->IsCode()) {
    315     // Just tail call into the fast handler if present.
    316     __ Jump(handle(Code::cast(api_call_info->fast_handler())),
    317             RelocInfo::CODE_TARGET);
    318     return;
    319   }
    320   // Put api_function_address in place.
    321   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    322   ApiFunction fun(function_address);
    323   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
    324   ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
    325   __ li(api_function_address, Operand(ref));
    326 
    327   // Jump to stub.
    328   CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
    329                            !optimization.is_constant_call());
    330   __ TailCallStub(&stub);
    331 }
    332 
    333 #undef __
    334 #define __ ACCESS_MASM(masm())
    335 
    336 
    337 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
    338                                                     Handle<Name> name) {
    339   if (!label->is_unused()) {
    340     __ bind(label);
    341     __ li(this->name(), Operand(name));
    342   }
    343 }
    344 
    345 
    346 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
    347   __ li(this->name(), Operand(name));
    348 }
    349 
    350 
    351 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
    352                                                    Register map_reg,
    353                                                    Register scratch,
    354                                                    Label* miss) {
    355   Handle<WeakCell> cell = Map::WeakCellForMap(transition);
    356   DCHECK(!map_reg.is(scratch));
    357   __ LoadWeakValue(map_reg, cell, miss);
    358   if (transition->CanBeDeprecated()) {
    359     __ lwu(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
    360     __ And(at, scratch, Operand(Map::Deprecated::kMask));
    361     __ Branch(miss, ne, at, Operand(zero_reg));
    362   }
    363 }
    364 
    365 
    366 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
    367                                                       int descriptor,
    368                                                       Register value_reg,
    369                                                       Register scratch,
    370                                                       Label* miss_label) {
    371   DCHECK(!map_reg.is(scratch));
    372   DCHECK(!map_reg.is(value_reg));
    373   DCHECK(!value_reg.is(scratch));
    374   __ LoadInstanceDescriptors(map_reg, scratch);
    375   __ ld(scratch,
    376         FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
    377   __ Branch(miss_label, ne, value_reg, Operand(scratch));
    378 }
    379 
    380 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
    381                                                         Register value_reg,
    382                                                         Label* miss_label) {
    383   Register map_reg = scratch1();
    384   Register scratch = scratch2();
    385   DCHECK(!value_reg.is(map_reg));
    386   DCHECK(!value_reg.is(scratch));
    387   __ JumpIfSmi(value_reg, miss_label);
    388   if (field_type->IsClass()) {
    389     __ ld(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
    390     // Compare map directly within the Branch() functions.
    391     __ GetWeakValue(scratch, Map::WeakCellForMap(field_type->AsClass()));
    392     __ Branch(miss_label, ne, map_reg, Operand(scratch));
    393   }
    394 }
    395 
    396 void PropertyHandlerCompiler::GenerateAccessCheck(
    397     Handle<WeakCell> native_context_cell, Register scratch1, Register scratch2,
    398     Label* miss, bool compare_native_contexts_only) {
    399   Label done;
    400   // Load current native context.
    401   __ ld(scratch1, NativeContextMemOperand());
    402   // Load expected native context.
    403   __ LoadWeakValue(scratch2, native_context_cell, miss);
    404 
    405   if (!compare_native_contexts_only) {
    406     __ Branch(&done, eq, scratch1, Operand(scratch2));
    407 
    408     // Compare security tokens of current and expected native contexts.
    409     __ ld(scratch1, ContextMemOperand(scratch1, Context::SECURITY_TOKEN_INDEX));
    410     __ ld(scratch2, ContextMemOperand(scratch2, Context::SECURITY_TOKEN_INDEX));
    411   }
    412   __ Branch(miss, ne, scratch1, Operand(scratch2));
    413 
    414   __ bind(&done);
    415 }
    416 
    417 Register PropertyHandlerCompiler::CheckPrototypes(
    418     Register object_reg, Register holder_reg, Register scratch1,
    419     Register scratch2, Handle<Name> name, Label* miss,
    420     ReturnHolder return_what) {
    421   Handle<Map> receiver_map = map();
    422 
    423   // Make sure there's no overlap between holder and object registers.
    424   DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
    425   DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
    426          !scratch2.is(scratch1));
    427 
    428   Handle<Cell> validity_cell =
    429       Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
    430   if (!validity_cell.is_null()) {
    431     DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value());
    432     __ li(scratch1, Operand(validity_cell));
    433     __ ld(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
    434     __ Branch(miss, ne, scratch1,
    435               Operand(Smi::FromInt(Map::kPrototypeChainValid)));
    436   }
    437 
    438   // Keep track of the current object in register reg.
    439   Register reg = object_reg;
    440   int depth = 0;
    441 
    442   Handle<JSObject> current = Handle<JSObject>::null();
    443   if (receiver_map->IsJSGlobalObjectMap()) {
    444     current = isolate()->global_object();
    445   }
    446 
    447   Handle<Map> current_map(receiver_map->GetPrototypeChainRootMap(isolate()),
    448                           isolate());
    449   Handle<Map> holder_map(holder()->map());
    450   // Traverse the prototype chain and check the maps in the prototype chain for
    451   // fast and global objects or do negative lookup for normal objects.
    452   while (!current_map.is_identical_to(holder_map)) {
    453     ++depth;
    454 
    455     if (current_map->IsJSGlobalObjectMap()) {
    456       GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
    457                                 name, scratch2, miss);
    458     } else if (current_map->is_dictionary_map()) {
    459       DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
    460       DCHECK(name->IsUniqueName());
    461       DCHECK(current.is_null() ||
    462              current->property_dictionary()->FindEntry(name) ==
    463                  NameDictionary::kNotFound);
    464 
    465       if (depth > 1) {
    466         Handle<WeakCell> weak_cell =
    467             Map::GetOrCreatePrototypeWeakCell(current, isolate());
    468         __ LoadWeakValue(reg, weak_cell, miss);
    469       }
    470       GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
    471                                        scratch2);
    472     }
    473 
    474     reg = holder_reg;  // From now on the object will be in holder_reg.
    475     // Go to the next object in the prototype chain.
    476     current = handle(JSObject::cast(current_map->prototype()));
    477     current_map = handle(current->map());
    478   }
    479 
    480   DCHECK(!current_map->IsJSGlobalProxyMap());
    481 
    482   // Log the check depth.
    483   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
    484 
    485   bool return_holder = return_what == RETURN_HOLDER;
    486   if (return_holder && depth != 0) {
    487     Handle<WeakCell> weak_cell =
    488         Map::GetOrCreatePrototypeWeakCell(current, isolate());
    489     __ LoadWeakValue(reg, weak_cell, miss);
    490   }
    491 
    492   // Return the register containing the holder.
    493   return return_holder ? reg : no_reg;
    494 }
    495 
    496 
    497 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
    498   if (!miss->is_unused()) {
    499     Label success;
    500     __ Branch(&success);
    501     __ bind(miss);
    502     if (IC::ICUseVector(kind())) {
    503       DCHECK(kind() == Code::LOAD_IC);
    504       PopVectorAndSlot();
    505     }
    506     TailCallBuiltin(masm(), MissBuiltin(kind()));
    507     __ bind(&success);
    508   }
    509 }
    510 
    511 
    512 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
    513   if (!miss->is_unused()) {
    514     Label success;
    515     __ Branch(&success);
    516     GenerateRestoreName(miss, name);
    517     if (IC::ICUseVector(kind())) PopVectorAndSlot();
    518     TailCallBuiltin(masm(), MissBuiltin(kind()));
    519     __ bind(&success);
    520   }
    521 }
    522 
    523 
    524 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
    525   // Return the constant value.
    526   __ li(v0, value);
    527   __ Ret();
    528 }
    529 
    530 
    531 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
    532     LookupIterator* it, Register holder_reg) {
    533   DCHECK(holder()->HasNamedInterceptor());
    534   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
    535 
    536   // Compile the interceptor call, followed by inline code to load the
    537   // property from further up the prototype chain if the call fails.
    538   // Check that the maps haven't changed.
    539   DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
    540 
    541   // Preserve the receiver register explicitly whenever it is different from the
    542   // holder and it is needed should the interceptor return without any result.
    543   // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
    544   // case might cause a miss during the prototype check.
    545   bool must_perform_prototype_check =
    546       !holder().is_identical_to(it->GetHolder<JSObject>());
    547   bool must_preserve_receiver_reg =
    548       !receiver().is(holder_reg) &&
    549       (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
    550 
    551   // Save necessary data before invoking an interceptor.
    552   // Requires a frame to make GC aware of pushed pointers.
    553   {
    554     FrameScope frame_scope(masm(), StackFrame::INTERNAL);
    555     if (must_preserve_receiver_reg) {
    556       __ Push(receiver(), holder_reg, this->name());
    557     } else {
    558       __ Push(holder_reg, this->name());
    559     }
    560     InterceptorVectorSlotPush(holder_reg);
    561     // Invoke an interceptor.  Note: map checks from receiver to
    562     // interceptor's holder has been compiled before (see a caller
    563     // of this method).
    564     CompileCallLoadPropertyWithInterceptor(
    565         masm(), receiver(), holder_reg, this->name(), holder(),
    566         Runtime::kLoadPropertyWithInterceptorOnly);
    567 
    568     // Check if interceptor provided a value for property.  If it's
    569     // the case, return immediately.
    570     Label interceptor_failed;
    571     __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
    572     __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
    573     frame_scope.GenerateLeaveFrame();
    574     __ Ret();
    575 
    576     __ bind(&interceptor_failed);
    577     InterceptorVectorSlotPop(holder_reg);
    578     if (must_preserve_receiver_reg) {
    579       __ Pop(receiver(), holder_reg, this->name());
    580     } else {
    581       __ Pop(holder_reg, this->name());
    582     }
    583     // Leave the internal frame.
    584   }
    585 
    586   GenerateLoadPostInterceptor(it, holder_reg);
    587 }
    588 
    589 
    590 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
    591   // Call the runtime system to load the interceptor.
    592   DCHECK(holder()->HasNamedInterceptor());
    593   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
    594   PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
    595                            holder());
    596 
    597   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
    598 }
    599 
    600 void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
    601   STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
    602 }
    603 
    604 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
    605     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
    606     LanguageMode language_mode) {
    607   Register holder_reg = Frontend(name);
    608 
    609   __ Push(receiver(), holder_reg);  // Receiver.
    610   // If the callback cannot leak, then push the callback directly,
    611   // otherwise wrap it in a weak cell.
    612   if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) {
    613     __ li(at, Operand(callback));
    614   } else {
    615     Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
    616     __ li(at, Operand(cell));
    617   }
    618   __ push(at);
    619   __ li(at, Operand(name));
    620   __ Push(at, value());
    621   __ Push(Smi::FromInt(language_mode));
    622 
    623   // Do tail-call to the runtime system.
    624   __ TailCallRuntime(Runtime::kStoreCallbackProperty);
    625 
    626   // Return the generated code.
    627   return GetCode(kind(), name);
    628 }
    629 
    630 
    631 Register NamedStoreHandlerCompiler::value() {
    632   return StoreDescriptor::ValueRegister();
    633 }
    634 
    635 
    636 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
    637     Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
    638   Label miss;
    639   if (IC::ICUseVector(kind())) {
    640     PushVectorAndSlot();
    641   }
    642 
    643   FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
    644 
    645   // Get the value from the cell.
    646   Register result = StoreDescriptor::ValueRegister();
    647   Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
    648   __ LoadWeakValue(result, weak_cell, &miss);
    649   __ ld(result, FieldMemOperand(result, PropertyCell::kValueOffset));
    650 
    651   // Check for deleted property if property can actually be deleted.
    652   if (is_configurable) {
    653     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
    654     __ Branch(&miss, eq, result, Operand(at));
    655   }
    656 
    657   Counters* counters = isolate()->counters();
    658   __ IncrementCounter(counters->ic_named_load_global_stub(), 1, a1, a3);
    659   if (IC::ICUseVector(kind())) {
    660     DiscardVectorAndSlot();
    661   }
    662   __ Ret(USE_DELAY_SLOT);
    663   __ Move(v0, result);  // Ensure the stub returns correct value.
    664 
    665   FrontendFooter(name, &miss);
    666 
    667   // Return the generated code.
    668   return GetCode(kind(), name);
    669 }
    670 
    671 
    672 #undef __
    673 }  // namespace internal
    674 }  // namespace v8
    675 
    676 #endif  // V8_TARGET_ARCH_MIPS64
    677