Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #include "src/code-stubs.h"
      8 #include "src/field-index.h"
      9 #include "src/hydrogen.h"
     10 #include "src/lithium.h"
     11 
     12 namespace v8 {
     13 namespace internal {
     14 
     15 
     16 static LChunk* OptimizeGraph(HGraph* graph) {
     17   DisallowHeapAllocation no_allocation;
     18   DisallowHandleAllocation no_handles;
     19   DisallowHandleDereference no_deref;
     20 
     21   ASSERT(graph != NULL);
     22   BailoutReason bailout_reason = kNoReason;
     23   if (!graph->Optimize(&bailout_reason)) {
     24     FATAL(GetBailoutReason(bailout_reason));
     25   }
     26   LChunk* chunk = LChunk::NewChunk(graph);
     27   if (chunk == NULL) {
     28     FATAL(GetBailoutReason(graph->info()->bailout_reason()));
     29   }
     30   return chunk;
     31 }
     32 
     33 
     34 class CodeStubGraphBuilderBase : public HGraphBuilder {
     35  public:
     36   CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
     37       : HGraphBuilder(&info_),
     38         arguments_length_(NULL),
     39         info_(stub, isolate),
     40         context_(NULL) {
     41     descriptor_ = stub->GetInterfaceDescriptor();
     42     parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
     43   }
     44   virtual bool BuildGraph();
     45 
     46  protected:
     47   virtual HValue* BuildCodeStub() = 0;
     48   HParameter* GetParameter(int parameter) {
     49     ASSERT(parameter < descriptor_->register_param_count_);
     50     return parameters_[parameter];
     51   }
     52   HValue* GetArgumentsLength() {
     53     // This is initialized in BuildGraph()
     54     ASSERT(arguments_length_ != NULL);
     55     return arguments_length_;
     56   }
     57   CompilationInfo* info() { return &info_; }
     58   HydrogenCodeStub* stub() { return info_.code_stub(); }
     59   HContext* context() { return context_; }
     60   Isolate* isolate() { return info_.isolate(); }
     61 
     62   HLoadNamedField* BuildLoadNamedField(HValue* object,
     63                                        FieldIndex index);
     64 
     65   enum ArgumentClass {
     66     NONE,
     67     SINGLE,
     68     MULTIPLE
     69   };
     70 
     71   HValue* BuildArrayConstructor(ElementsKind kind,
     72                                 AllocationSiteOverrideMode override_mode,
     73                                 ArgumentClass argument_class);
     74   HValue* BuildInternalArrayConstructor(ElementsKind kind,
     75                                         ArgumentClass argument_class);
     76 
     77   // BuildCheckAndInstallOptimizedCode emits code to install the optimized
     78   // function found in the optimized code map at map_index in js_function, if
     79   // the function at map_index matches the given native_context. Builder is
     80   // left in the "Then()" state after the install.
     81   void BuildCheckAndInstallOptimizedCode(HValue* js_function,
     82                                          HValue* native_context,
     83                                          IfBuilder* builder,
     84                                          HValue* optimized_map,
     85                                          HValue* map_index);
     86   void BuildInstallCode(HValue* js_function, HValue* shared_info);
     87 
     88   HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
     89                                          HValue* iterator,
     90                                          int field_offset);
     91   void BuildInstallFromOptimizedCodeMap(HValue* js_function,
     92                                         HValue* shared_info,
     93                                         HValue* native_context);
     94 
     95  private:
     96   HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
     97   HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
     98                                           ElementsKind kind);
     99 
    100   SmartArrayPointer<HParameter*> parameters_;
    101   HValue* arguments_length_;
    102   CompilationInfoWithZone info_;
    103   CodeStubInterfaceDescriptor* descriptor_;
    104   HContext* context_;
    105 };
    106 
    107 
    108 bool CodeStubGraphBuilderBase::BuildGraph() {
    109   // Update the static counter each time a new code stub is generated.
    110   isolate()->counters()->code_stubs()->Increment();
    111 
    112   if (FLAG_trace_hydrogen_stubs) {
    113     const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
    114     PrintF("-----------------------------------------------------------\n");
    115     PrintF("Compiling stub %s using hydrogen\n", name);
    116     isolate()->GetHTracer()->TraceCompilation(&info_);
    117   }
    118 
    119   int param_count = descriptor_->register_param_count_;
    120   HEnvironment* start_environment = graph()->start_environment();
    121   HBasicBlock* next_block = CreateBasicBlock(start_environment);
    122   Goto(next_block);
    123   next_block->SetJoinId(BailoutId::StubEntry());
    124   set_current_block(next_block);
    125 
    126   bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
    127   HInstruction* stack_parameter_count = NULL;
    128   for (int i = 0; i < param_count; ++i) {
    129     Representation r = descriptor_->register_param_representations_ == NULL
    130         ? Representation::Tagged()
    131         : descriptor_->register_param_representations_[i];
    132     HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
    133     start_environment->Bind(i, param);
    134     parameters_[i] = param;
    135     if (descriptor_->IsParameterCountRegister(i)) {
    136       param->set_type(HType::Smi());
    137       stack_parameter_count = param;
    138       arguments_length_ = stack_parameter_count;
    139     }
    140   }
    141 
    142   ASSERT(!runtime_stack_params || arguments_length_ != NULL);
    143   if (!runtime_stack_params) {
    144     stack_parameter_count = graph()->GetConstantMinus1();
    145     arguments_length_ = graph()->GetConstant0();
    146   }
    147 
    148   context_ = Add<HContext>();
    149   start_environment->BindContext(context_);
    150 
    151   Add<HSimulate>(BailoutId::StubEntry());
    152 
    153   NoObservableSideEffectsScope no_effects(this);
    154 
    155   HValue* return_value = BuildCodeStub();
    156 
    157   // We might have extra expressions to pop from the stack in addition to the
    158   // arguments above.
    159   HInstruction* stack_pop_count = stack_parameter_count;
    160   if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
    161     if (!stack_parameter_count->IsConstant() &&
    162         descriptor_->hint_stack_parameter_count_ < 0) {
    163       HInstruction* constant_one = graph()->GetConstant1();
    164       stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
    165       stack_pop_count->ClearFlag(HValue::kCanOverflow);
    166       // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
    167       // smi.
    168     } else {
    169       int count = descriptor_->hint_stack_parameter_count_;
    170       stack_pop_count = Add<HConstant>(count);
    171     }
    172   }
    173 
    174   if (current_block() != NULL) {
    175     HReturn* hreturn_instruction = New<HReturn>(return_value,
    176                                                 stack_pop_count);
    177     FinishCurrentBlock(hreturn_instruction);
    178   }
    179   return true;
    180 }
    181 
    182 
    183 template <class Stub>
    184 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
    185  public:
    186   CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
    187       : CodeStubGraphBuilderBase(isolate, stub) {}
    188 
    189  protected:
    190   virtual HValue* BuildCodeStub() {
    191     if (casted_stub()->IsUninitialized()) {
    192       return BuildCodeUninitializedStub();
    193     } else {
    194       return BuildCodeInitializedStub();
    195     }
    196   }
    197 
    198   virtual HValue* BuildCodeInitializedStub() {
    199     UNIMPLEMENTED();
    200     return NULL;
    201   }
    202 
    203   virtual HValue* BuildCodeUninitializedStub() {
    204     // Force a deopt that falls back to the runtime.
    205     HValue* undefined = graph()->GetConstantUndefined();
    206     IfBuilder builder(this);
    207     builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
    208     builder.Then();
    209     builder.ElseDeopt("Forced deopt to runtime");
    210     return undefined;
    211   }
    212 
    213   Stub* casted_stub() { return static_cast<Stub*>(stub()); }
    214 };
    215 
    216 
    217 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode() {
    218   Factory* factory = isolate()->factory();
    219 
    220   // Generate the new code.
    221   MacroAssembler masm(isolate(), NULL, 256);
    222 
    223   {
    224     // Update the static counter each time a new code stub is generated.
    225     isolate()->counters()->code_stubs()->Increment();
    226 
    227     // Generate the code for the stub.
    228     masm.set_generating_stub(true);
    229     NoCurrentFrameScope scope(&masm);
    230     GenerateLightweightMiss(&masm);
    231   }
    232 
    233   // Create the code object.
    234   CodeDesc desc;
    235   masm.GetCode(&desc);
    236 
    237   // Copy the generated code into a heap object.
    238   Code::Flags flags = Code::ComputeFlags(
    239       GetCodeKind(),
    240       GetICState(),
    241       GetExtraICState(),
    242       GetStubType());
    243   Handle<Code> new_object = factory->NewCode(
    244       desc, flags, masm.CodeObject(), NeedsImmovableCode());
    245   return new_object;
    246 }
    247 
    248 
    249 template <class Stub>
    250 static Handle<Code> DoGenerateCode(Stub* stub) {
    251   Isolate* isolate = stub->isolate();
    252   CodeStub::Major  major_key =
    253       static_cast<HydrogenCodeStub*>(stub)->MajorKey();
    254   CodeStubInterfaceDescriptor* descriptor =
    255       isolate->code_stub_interface_descriptor(major_key);
    256   if (descriptor->register_param_count_ < 0) {
    257     stub->InitializeInterfaceDescriptor(descriptor);
    258   }
    259 
    260   // If we are uninitialized we can use a light-weight stub to enter
    261   // the runtime that is significantly faster than using the standard
    262   // stub-failure deopt mechanism.
    263   if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
    264     ASSERT(!descriptor->stack_parameter_count_.is_valid());
    265     return stub->GenerateLightweightMissCode();
    266   }
    267   ElapsedTimer timer;
    268   if (FLAG_profile_hydrogen_code_stub_compilation) {
    269     timer.Start();
    270   }
    271   CodeStubGraphBuilder<Stub> builder(isolate, stub);
    272   LChunk* chunk = OptimizeGraph(builder.CreateGraph());
    273   Handle<Code> code = chunk->Codegen();
    274   if (FLAG_profile_hydrogen_code_stub_compilation) {
    275     double ms = timer.Elapsed().InMillisecondsF();
    276     PrintF("[Lazy compilation of %s took %0.3f ms]\n",
    277            stub->GetName().get(), ms);
    278   }
    279   return code;
    280 }
    281 
    282 
    283 template <>
    284 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
    285   HValue* value = GetParameter(0);
    286 
    287   // Check if the parameter is already a SMI or heap number.
    288   IfBuilder if_number(this);
    289   if_number.If<HIsSmiAndBranch>(value);
    290   if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
    291   if_number.Then();
    292 
    293   // Return the number.
    294   Push(value);
    295 
    296   if_number.Else();
    297 
    298   // Convert the parameter to number using the builtin.
    299   HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
    300   Add<HPushArguments>(value);
    301   Push(Add<HInvokeFunction>(function, 1));
    302 
    303   if_number.End();
    304 
    305   return Pop();
    306 }
    307 
    308 
    309 Handle<Code> ToNumberStub::GenerateCode() {
    310   return DoGenerateCode(this);
    311 }
    312 
    313 
    314 template <>
    315 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
    316   info()->MarkAsSavesCallerDoubles();
    317   HValue* number = GetParameter(NumberToStringStub::kNumber);
    318   return BuildNumberToString(number, Type::Number(zone()));
    319 }
    320 
    321 
    322 Handle<Code> NumberToStringStub::GenerateCode() {
    323   return DoGenerateCode(this);
    324 }
    325 
    326 
    327 template <>
    328 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
    329   Factory* factory = isolate()->factory();
    330   HValue* undefined = graph()->GetConstantUndefined();
    331   AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
    332 
    333   // This stub is very performance sensitive, the generated code must be tuned
    334   // so that it doesn't build and eager frame.
    335   info()->MarkMustNotHaveEagerFrame();
    336 
    337   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
    338                                                   GetParameter(1),
    339                                                   static_cast<HValue*>(NULL),
    340                                                   FAST_ELEMENTS);
    341   IfBuilder checker(this);
    342   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
    343                                                     undefined);
    344   checker.Then();
    345 
    346   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
    347       AllocationSite::kTransitionInfoOffset);
    348   HInstruction* boilerplate = Add<HLoadNamedField>(
    349       allocation_site, static_cast<HValue*>(NULL), access);
    350   HValue* elements = AddLoadElements(boilerplate);
    351   HValue* capacity = AddLoadFixedArrayLength(elements);
    352   IfBuilder zero_capacity(this);
    353   zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
    354                                            Token::EQ);
    355   zero_capacity.Then();
    356   Push(BuildCloneShallowArrayEmpty(boilerplate,
    357                                    allocation_site,
    358                                    alloc_site_mode));
    359   zero_capacity.Else();
    360   IfBuilder if_fixed_cow(this);
    361   if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
    362   if_fixed_cow.Then();
    363   Push(BuildCloneShallowArrayCow(boilerplate,
    364                                  allocation_site,
    365                                  alloc_site_mode,
    366                                  FAST_ELEMENTS));
    367   if_fixed_cow.Else();
    368   IfBuilder if_fixed(this);
    369   if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
    370   if_fixed.Then();
    371   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
    372                                       allocation_site,
    373                                       alloc_site_mode,
    374                                       FAST_ELEMENTS));
    375 
    376   if_fixed.Else();
    377   Push(BuildCloneShallowArrayNonEmpty(boilerplate,
    378                                       allocation_site,
    379                                       alloc_site_mode,
    380                                       FAST_DOUBLE_ELEMENTS));
    381   if_fixed.End();
    382   if_fixed_cow.End();
    383   zero_capacity.End();
    384 
    385   checker.ElseDeopt("Uninitialized boilerplate literals");
    386   checker.End();
    387 
    388   return environment()->Pop();
    389 }
    390 
    391 
    392 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
    393   return DoGenerateCode(this);
    394 }
    395 
    396 
    397 template <>
    398 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
    399   HValue* undefined = graph()->GetConstantUndefined();
    400 
    401   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
    402                                                   GetParameter(1),
    403                                                   static_cast<HValue*>(NULL),
    404                                                   FAST_ELEMENTS);
    405 
    406   IfBuilder checker(this);
    407   checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
    408                                                     undefined);
    409   checker.And();
    410 
    411   HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
    412       AllocationSite::kTransitionInfoOffset);
    413   HInstruction* boilerplate = Add<HLoadNamedField>(
    414       allocation_site, static_cast<HValue*>(NULL), access);
    415 
    416   int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
    417   int object_size = size;
    418   if (FLAG_allocation_site_pretenuring) {
    419     size += AllocationMemento::kSize;
    420   }
    421 
    422   HValue* boilerplate_map = Add<HLoadNamedField>(
    423       boilerplate, static_cast<HValue*>(NULL),
    424       HObjectAccess::ForMap());
    425   HValue* boilerplate_size = Add<HLoadNamedField>(
    426       boilerplate_map, static_cast<HValue*>(NULL),
    427       HObjectAccess::ForMapInstanceSize());
    428   HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
    429   checker.If<HCompareNumericAndBranch>(boilerplate_size,
    430                                        size_in_words, Token::EQ);
    431   checker.Then();
    432 
    433   HValue* size_in_bytes = Add<HConstant>(size);
    434 
    435   HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
    436       NOT_TENURED, JS_OBJECT_TYPE);
    437 
    438   for (int i = 0; i < object_size; i += kPointerSize) {
    439     HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
    440     Add<HStoreNamedField>(
    441         object, access, Add<HLoadNamedField>(
    442             boilerplate, static_cast<HValue*>(NULL), access));
    443   }
    444 
    445   ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
    446   if (FLAG_allocation_site_pretenuring) {
    447     BuildCreateAllocationMemento(
    448         object, Add<HConstant>(object_size), allocation_site);
    449   }
    450 
    451   environment()->Push(object);
    452   checker.ElseDeopt("Uninitialized boilerplate in fast clone");
    453   checker.End();
    454 
    455   return environment()->Pop();
    456 }
    457 
    458 
    459 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
    460   return DoGenerateCode(this);
    461 }
    462 
    463 
    464 template <>
    465 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
    466   HValue* size = Add<HConstant>(AllocationSite::kSize);
    467   HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
    468       JS_OBJECT_TYPE);
    469 
    470   // Store the map
    471   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
    472   AddStoreMapConstant(object, allocation_site_map);
    473 
    474   // Store the payload (smi elements kind)
    475   HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
    476   Add<HStoreNamedField>(object,
    477                         HObjectAccess::ForAllocationSiteOffset(
    478                             AllocationSite::kTransitionInfoOffset),
    479                         initial_elements_kind);
    480 
    481   // Unlike literals, constructed arrays don't have nested sites
    482   Add<HStoreNamedField>(object,
    483                         HObjectAccess::ForAllocationSiteOffset(
    484                             AllocationSite::kNestedSiteOffset),
    485                         graph()->GetConstant0());
    486 
    487   // Pretenuring calculation field.
    488   Add<HStoreNamedField>(object,
    489                         HObjectAccess::ForAllocationSiteOffset(
    490                             AllocationSite::kPretenureDataOffset),
    491                         graph()->GetConstant0());
    492 
    493   // Pretenuring memento creation count field.
    494   Add<HStoreNamedField>(object,
    495                         HObjectAccess::ForAllocationSiteOffset(
    496                             AllocationSite::kPretenureCreateCountOffset),
    497                         graph()->GetConstant0());
    498 
    499   // Store an empty fixed array for the code dependency.
    500   HConstant* empty_fixed_array =
    501     Add<HConstant>(isolate()->factory()->empty_fixed_array());
    502   Add<HStoreNamedField>(
    503       object,
    504       HObjectAccess::ForAllocationSiteOffset(
    505           AllocationSite::kDependentCodeOffset),
    506       empty_fixed_array);
    507 
    508   // Link the object to the allocation site list
    509   HValue* site_list = Add<HConstant>(
    510       ExternalReference::allocation_sites_list_address(isolate()));
    511   HValue* site = Add<HLoadNamedField>(
    512       site_list, static_cast<HValue*>(NULL),
    513       HObjectAccess::ForAllocationSiteList());
    514   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
    515   // mark as such in order to skip the write barrier, once we have a unified
    516   // system for weakness. For now we decided to keep it like this because having
    517   // an initial write barrier backed store makes this pointer strong until the
    518   // next GC, and allocation sites are designed to survive several GCs anyway.
    519   Add<HStoreNamedField>(
    520       object,
    521       HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
    522       site);
    523   Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
    524                         object);
    525 
    526   HInstruction* feedback_vector = GetParameter(0);
    527   HInstruction* slot = GetParameter(1);
    528   Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
    529                    INITIALIZING_STORE);
    530   return feedback_vector;
    531 }
    532 
    533 
    534 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
    535   return DoGenerateCode(this);
    536 }
    537 
    538 
    539 template <>
    540 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
    541   HInstruction* load = BuildUncheckedMonomorphicElementAccess(
    542       GetParameter(0), GetParameter(1), NULL,
    543       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
    544       LOAD, NEVER_RETURN_HOLE, STANDARD_STORE);
    545   return load;
    546 }
    547 
    548 
    549 Handle<Code> KeyedLoadFastElementStub::GenerateCode() {
    550   return DoGenerateCode(this);
    551 }
    552 
    553 
    554 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
    555     HValue* object, FieldIndex index) {
    556   Representation representation = index.is_double()
    557       ? Representation::Double()
    558       : Representation::Tagged();
    559   int offset = index.offset();
    560   HObjectAccess access = index.is_inobject()
    561       ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
    562       : HObjectAccess::ForBackingStoreOffset(offset, representation);
    563   if (index.is_double()) {
    564     // Load the heap number.
    565     object = Add<HLoadNamedField>(
    566         object, static_cast<HValue*>(NULL),
    567         access.WithRepresentation(Representation::Tagged()));
    568     // Load the double value from it.
    569     access = HObjectAccess::ForHeapNumberValue();
    570   }
    571   return Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), access);
    572 }
    573 
    574 
    575 template<>
    576 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
    577   return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
    578 }
    579 
    580 
    581 Handle<Code> LoadFieldStub::GenerateCode() {
    582   return DoGenerateCode(this);
    583 }
    584 
    585 
    586 template<>
    587 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
    588   HValue* string = BuildLoadNamedField(GetParameter(0),
    589       FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
    590   return BuildLoadNamedField(string,
    591       FieldIndex::ForInObjectOffset(String::kLengthOffset));
    592 }
    593 
    594 
    595 Handle<Code> StringLengthStub::GenerateCode() {
    596   return DoGenerateCode(this);
    597 }
    598 
    599 
    600 template <>
    601 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
    602   BuildUncheckedMonomorphicElementAccess(
    603       GetParameter(0), GetParameter(1), GetParameter(2),
    604       casted_stub()->is_js_array(), casted_stub()->elements_kind(),
    605       STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode());
    606 
    607   return GetParameter(2);
    608 }
    609 
    610 
    611 Handle<Code> KeyedStoreFastElementStub::GenerateCode() {
    612   return DoGenerateCode(this);
    613 }
    614 
    615 
    616 template <>
    617 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
    618   info()->MarkAsSavesCallerDoubles();
    619 
    620   BuildTransitionElementsKind(GetParameter(0),
    621                               GetParameter(1),
    622                               casted_stub()->from_kind(),
    623                               casted_stub()->to_kind(),
    624                               casted_stub()->is_js_array());
    625 
    626   return GetParameter(0);
    627 }
    628 
    629 
    630 Handle<Code> TransitionElementsKindStub::GenerateCode() {
    631   return DoGenerateCode(this);
    632 }
    633 
    634 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
    635     ElementsKind kind,
    636     AllocationSiteOverrideMode override_mode,
    637     ArgumentClass argument_class) {
    638   HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
    639   HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
    640   JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
    641                                override_mode);
    642   HValue* result = NULL;
    643   switch (argument_class) {
    644     case NONE:
    645       // This stub is very performance sensitive, the generated code must be
    646       // tuned so that it doesn't build and eager frame.
    647       info()->MarkMustNotHaveEagerFrame();
    648       result = array_builder.AllocateEmptyArray();
    649       break;
    650     case SINGLE:
    651       result = BuildArraySingleArgumentConstructor(&array_builder);
    652       break;
    653     case MULTIPLE:
    654       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
    655       break;
    656   }
    657 
    658   return result;
    659 }
    660 
    661 
    662 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
    663     ElementsKind kind, ArgumentClass argument_class) {
    664   HValue* constructor = GetParameter(
    665       InternalArrayConstructorStubBase::kConstructor);
    666   JSArrayBuilder array_builder(this, kind, constructor);
    667 
    668   HValue* result = NULL;
    669   switch (argument_class) {
    670     case NONE:
    671       // This stub is very performance sensitive, the generated code must be
    672       // tuned so that it doesn't build and eager frame.
    673       info()->MarkMustNotHaveEagerFrame();
    674       result = array_builder.AllocateEmptyArray();
    675       break;
    676     case SINGLE:
    677       result = BuildArraySingleArgumentConstructor(&array_builder);
    678       break;
    679     case MULTIPLE:
    680       result = BuildArrayNArgumentsConstructor(&array_builder, kind);
    681       break;
    682   }
    683   return result;
    684 }
    685 
    686 
    687 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
    688     JSArrayBuilder* array_builder) {
    689   // Smi check and range check on the input arg.
    690   HValue* constant_one = graph()->GetConstant1();
    691   HValue* constant_zero = graph()->GetConstant0();
    692 
    693   HInstruction* elements = Add<HArgumentsElements>(false);
    694   HInstruction* argument = Add<HAccessArgumentsAt>(
    695       elements, constant_one, constant_zero);
    696 
    697   return BuildAllocateArrayFromLength(array_builder, argument);
    698 }
    699 
    700 
    701 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
    702     JSArrayBuilder* array_builder, ElementsKind kind) {
    703   // Insert a bounds check because the number of arguments might exceed
    704   // the kInitialMaxFastElementArray limit. This cannot happen for code
    705   // that was parsed, but calling via Array.apply(thisArg, [...]) might
    706   // trigger it.
    707   HValue* length = GetArgumentsLength();
    708   HConstant* max_alloc_length =
    709       Add<HConstant>(JSObject::kInitialMaxFastElementArray);
    710   HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
    711 
    712   // We need to fill with the hole if it's a smi array in the multi-argument
    713   // case because we might have to bail out while copying arguments into
    714   // the array because they aren't compatible with a smi array.
    715   // If it's a double array, no problem, and if it's fast then no
    716   // problem either because doubles are boxed.
    717   //
    718   // TODO(mvstanton): consider an instruction to memset fill the array
    719   // with zero in this case instead.
    720   JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
    721       ? JSArrayBuilder::FILL_WITH_HOLE
    722       : JSArrayBuilder::DONT_FILL_WITH_HOLE;
    723   HValue* new_object = array_builder->AllocateArray(checked_length,
    724                                                     max_alloc_length,
    725                                                     checked_length,
    726                                                     fill_mode);
    727   HValue* elements = array_builder->GetElementsLocation();
    728   ASSERT(elements != NULL);
    729 
    730   // Now populate the elements correctly.
    731   LoopBuilder builder(this,
    732                       context(),
    733                       LoopBuilder::kPostIncrement);
    734   HValue* start = graph()->GetConstant0();
    735   HValue* key = builder.BeginBody(start, checked_length, Token::LT);
    736   HInstruction* argument_elements = Add<HArgumentsElements>(false);
    737   HInstruction* argument = Add<HAccessArgumentsAt>(
    738       argument_elements, checked_length, key);
    739 
    740   Add<HStoreKeyed>(elements, key, argument, kind);
    741   builder.EndBody();
    742   return new_object;
    743 }
    744 
    745 
    746 template <>
    747 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
    748   ElementsKind kind = casted_stub()->elements_kind();
    749   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
    750   return BuildArrayConstructor(kind, override_mode, NONE);
    751 }
    752 
    753 
    754 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
    755   return DoGenerateCode(this);
    756 }
    757 
    758 
    759 template <>
    760 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
    761     BuildCodeStub() {
    762   ElementsKind kind = casted_stub()->elements_kind();
    763   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
    764   return BuildArrayConstructor(kind, override_mode, SINGLE);
    765 }
    766 
    767 
    768 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
    769   return DoGenerateCode(this);
    770 }
    771 
    772 
    773 template <>
    774 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
    775   ElementsKind kind = casted_stub()->elements_kind();
    776   AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
    777   return BuildArrayConstructor(kind, override_mode, MULTIPLE);
    778 }
    779 
    780 
    781 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
    782   return DoGenerateCode(this);
    783 }
    784 
    785 
    786 template <>
    787 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
    788     BuildCodeStub() {
    789   ElementsKind kind = casted_stub()->elements_kind();
    790   return BuildInternalArrayConstructor(kind, NONE);
    791 }
    792 
    793 
    794 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
    795   return DoGenerateCode(this);
    796 }
    797 
    798 
    799 template <>
    800 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
    801     BuildCodeStub() {
    802   ElementsKind kind = casted_stub()->elements_kind();
    803   return BuildInternalArrayConstructor(kind, SINGLE);
    804 }
    805 
    806 
    807 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
    808   return DoGenerateCode(this);
    809 }
    810 
    811 
    812 template <>
    813 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
    814     BuildCodeStub() {
    815   ElementsKind kind = casted_stub()->elements_kind();
    816   return BuildInternalArrayConstructor(kind, MULTIPLE);
    817 }
    818 
    819 
    820 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
    821   return DoGenerateCode(this);
    822 }
    823 
    824 
    825 template <>
    826 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
    827   Isolate* isolate = graph()->isolate();
    828   CompareNilICStub* stub = casted_stub();
    829   HIfContinuation continuation;
    830   Handle<Map> sentinel_map(isolate->heap()->meta_map());
    831   Type* type = stub->GetType(zone(), sentinel_map);
    832   BuildCompareNil(GetParameter(0), type, &continuation);
    833   IfBuilder if_nil(this, &continuation);
    834   if_nil.Then();
    835   if (continuation.IsFalseReachable()) {
    836     if_nil.Else();
    837     if_nil.Return(graph()->GetConstant0());
    838   }
    839   if_nil.End();
    840   return continuation.IsTrueReachable()
    841       ? graph()->GetConstant1()
    842       : graph()->GetConstantUndefined();
    843 }
    844 
    845 
    846 Handle<Code> CompareNilICStub::GenerateCode() {
    847   return DoGenerateCode(this);
    848 }
    849 
    850 
    851 template <>
    852 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
    853   BinaryOpIC::State state = casted_stub()->state();
    854 
    855   HValue* left = GetParameter(BinaryOpICStub::kLeft);
    856   HValue* right = GetParameter(BinaryOpICStub::kRight);
    857 
    858   Type* left_type = state.GetLeftType(zone());
    859   Type* right_type = state.GetRightType(zone());
    860   Type* result_type = state.GetResultType(zone());
    861 
    862   ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
    863          (state.HasSideEffects() || !result_type->Is(Type::None())));
    864 
    865   HValue* result = NULL;
    866   HAllocationMode allocation_mode(NOT_TENURED);
    867   if (state.op() == Token::ADD &&
    868       (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
    869       !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
    870     // For the generic add stub a fast case for string addition is performance
    871     // critical.
    872     if (left_type->Maybe(Type::String())) {
    873       IfBuilder if_leftisstring(this);
    874       if_leftisstring.If<HIsStringAndBranch>(left);
    875       if_leftisstring.Then();
    876       {
    877         Push(BuildBinaryOperation(
    878                     state.op(), left, right,
    879                     Type::String(zone()), right_type,
    880                     result_type, state.fixed_right_arg(),
    881                     allocation_mode));
    882       }
    883       if_leftisstring.Else();
    884       {
    885         Push(BuildBinaryOperation(
    886                     state.op(), left, right,
    887                     left_type, right_type, result_type,
    888                     state.fixed_right_arg(), allocation_mode));
    889       }
    890       if_leftisstring.End();
    891       result = Pop();
    892     } else {
    893       IfBuilder if_rightisstring(this);
    894       if_rightisstring.If<HIsStringAndBranch>(right);
    895       if_rightisstring.Then();
    896       {
    897         Push(BuildBinaryOperation(
    898                     state.op(), left, right,
    899                     left_type, Type::String(zone()),
    900                     result_type, state.fixed_right_arg(),
    901                     allocation_mode));
    902       }
    903       if_rightisstring.Else();
    904       {
    905         Push(BuildBinaryOperation(
    906                     state.op(), left, right,
    907                     left_type, right_type, result_type,
    908                     state.fixed_right_arg(), allocation_mode));
    909       }
    910       if_rightisstring.End();
    911       result = Pop();
    912     }
    913   } else {
    914     result = BuildBinaryOperation(
    915             state.op(), left, right,
    916             left_type, right_type, result_type,
    917             state.fixed_right_arg(), allocation_mode);
    918   }
    919 
    920   // If we encounter a generic argument, the number conversion is
    921   // observable, thus we cannot afford to bail out after the fact.
    922   if (!state.HasSideEffects()) {
    923     result = EnforceNumberType(result, result_type);
    924   }
    925 
    926   // Reuse the double box of one of the operands if we are allowed to (i.e.
    927   // chained binops).
    928   if (state.CanReuseDoubleBox()) {
    929     HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
    930     IfBuilder if_heap_number(this);
    931     if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE);
    932     if_heap_number.Then();
    933     Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
    934     Push(operand);
    935     if_heap_number.Else();
    936     Push(result);
    937     if_heap_number.End();
    938     result = Pop();
    939   }
    940 
    941   return result;
    942 }
    943 
    944 
    945 Handle<Code> BinaryOpICStub::GenerateCode() {
    946   return DoGenerateCode(this);
    947 }
    948 
    949 
    950 template <>
    951 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
    952   BinaryOpIC::State state = casted_stub()->state();
    953 
    954   HValue* allocation_site = GetParameter(
    955       BinaryOpWithAllocationSiteStub::kAllocationSite);
    956   HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
    957   HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
    958 
    959   Type* left_type = state.GetLeftType(zone());
    960   Type* right_type = state.GetRightType(zone());
    961   Type* result_type = state.GetResultType(zone());
    962   HAllocationMode allocation_mode(allocation_site);
    963 
    964   return BuildBinaryOperation(state.op(), left, right,
    965                               left_type, right_type, result_type,
    966                               state.fixed_right_arg(), allocation_mode);
    967 }
    968 
    969 
    970 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
    971   return DoGenerateCode(this);
    972 }
    973 
    974 
    975 template <>
    976 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
    977   StringAddStub* stub = casted_stub();
    978   StringAddFlags flags = stub->flags();
    979   PretenureFlag pretenure_flag = stub->pretenure_flag();
    980 
    981   HValue* left = GetParameter(StringAddStub::kLeft);
    982   HValue* right = GetParameter(StringAddStub::kRight);
    983 
    984   // Make sure that both arguments are strings if not known in advance.
    985   if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
    986     left = BuildCheckString(left);
    987   }
    988   if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
    989     right = BuildCheckString(right);
    990   }
    991 
    992   return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
    993 }
    994 
    995 
    996 Handle<Code> StringAddStub::GenerateCode() {
    997   return DoGenerateCode(this);
    998 }
    999 
   1000 
   1001 template <>
   1002 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
   1003   ToBooleanStub* stub = casted_stub();
   1004 
   1005   IfBuilder if_true(this);
   1006   if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
   1007   if_true.Then();
   1008   if_true.Return(graph()->GetConstant1());
   1009   if_true.Else();
   1010   if_true.End();
   1011   return graph()->GetConstant0();
   1012 }
   1013 
   1014 
   1015 Handle<Code> ToBooleanStub::GenerateCode() {
   1016   return DoGenerateCode(this);
   1017 }
   1018 
   1019 
   1020 template <>
   1021 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
   1022   StoreGlobalStub* stub = casted_stub();
   1023   Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
   1024   Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
   1025   Handle<PropertyCell> placeholder_cell =
   1026       isolate()->factory()->NewPropertyCell(placeholer_value);
   1027 
   1028   HParameter* value = GetParameter(2);
   1029 
   1030   if (stub->check_global()) {
   1031     // Check that the map of the global has not changed: use a placeholder map
   1032     // that will be replaced later with the global object's map.
   1033     Handle<Map> placeholder_map = isolate()->factory()->meta_map();
   1034     HValue* global = Add<HConstant>(
   1035         StoreGlobalStub::global_placeholder(isolate()));
   1036     Add<HCheckMaps>(global, placeholder_map);
   1037   }
   1038 
   1039   HValue* cell = Add<HConstant>(placeholder_cell);
   1040   HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
   1041   HValue* cell_contents = Add<HLoadNamedField>(
   1042       cell, static_cast<HValue*>(NULL), access);
   1043 
   1044   if (stub->is_constant()) {
   1045     IfBuilder builder(this);
   1046     builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
   1047     builder.Then();
   1048     builder.ElseDeopt("Unexpected cell contents in constant global store");
   1049     builder.End();
   1050   } else {
   1051     // Load the payload of the global parameter cell. A hole indicates that the
   1052     // property has been deleted and that the store must be handled by the
   1053     // runtime.
   1054     IfBuilder builder(this);
   1055     HValue* hole_value = Add<HConstant>(hole);
   1056     builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
   1057     builder.Then();
   1058     builder.Deopt("Unexpected cell contents in global store");
   1059     builder.Else();
   1060     Add<HStoreNamedField>(cell, access, value);
   1061     builder.End();
   1062   }
   1063 
   1064   return value;
   1065 }
   1066 
   1067 
   1068 Handle<Code> StoreGlobalStub::GenerateCode() {
   1069   return DoGenerateCode(this);
   1070 }
   1071 
   1072 
   1073 template<>
   1074 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
   1075   HValue* value = GetParameter(0);
   1076   HValue* map = GetParameter(1);
   1077   HValue* key = GetParameter(2);
   1078   HValue* object = GetParameter(3);
   1079 
   1080   if (FLAG_trace_elements_transitions) {
   1081     // Tracing elements transitions is the job of the runtime.
   1082     Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
   1083   } else {
   1084     info()->MarkAsSavesCallerDoubles();
   1085 
   1086     BuildTransitionElementsKind(object, map,
   1087                                 casted_stub()->from_kind(),
   1088                                 casted_stub()->to_kind(),
   1089                                 casted_stub()->is_jsarray());
   1090 
   1091     BuildUncheckedMonomorphicElementAccess(object, key, value,
   1092                                            casted_stub()->is_jsarray(),
   1093                                            casted_stub()->to_kind(),
   1094                                            STORE, ALLOW_RETURN_HOLE,
   1095                                            casted_stub()->store_mode());
   1096   }
   1097 
   1098   return value;
   1099 }
   1100 
   1101 
   1102 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
   1103   return DoGenerateCode(this);
   1104 }
   1105 
   1106 
   1107 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
   1108     HValue* js_function,
   1109     HValue* native_context,
   1110     IfBuilder* builder,
   1111     HValue* optimized_map,
   1112     HValue* map_index) {
   1113   HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
   1114   HValue* context_slot = LoadFromOptimizedCodeMap(
   1115       optimized_map, map_index, SharedFunctionInfo::kContextOffset);
   1116   HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
   1117       optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
   1118   builder->If<HCompareObjectEqAndBranch>(native_context,
   1119                                          context_slot);
   1120   builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
   1121   builder->Then();
   1122   HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
   1123       map_index, SharedFunctionInfo::kCachedCodeOffset);
   1124   // and the literals
   1125   HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
   1126       map_index, SharedFunctionInfo::kLiteralsOffset);
   1127 
   1128   Counters* counters = isolate()->counters();
   1129   AddIncrementCounter(counters->fast_new_closure_install_optimized());
   1130 
   1131   // TODO(fschneider): Idea: store proper code pointers in the optimized code
   1132   // map and either unmangle them on marking or do nothing as the whole map is
   1133   // discarded on major GC anyway.
   1134   Add<HStoreCodeEntry>(js_function, code_object);
   1135   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
   1136                         literals);
   1137 
   1138   // Now link a function into a list of optimized functions.
   1139   HValue* optimized_functions_list = Add<HLoadNamedField>(
   1140       native_context, static_cast<HValue*>(NULL),
   1141       HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
   1142   Add<HStoreNamedField>(js_function,
   1143                         HObjectAccess::ForNextFunctionLinkPointer(),
   1144                         optimized_functions_list);
   1145 
   1146   // This store is the only one that should have a write barrier.
   1147   Add<HStoreNamedField>(native_context,
   1148            HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
   1149            js_function);
   1150 
   1151   // The builder continues in the "then" after this function.
   1152 }
   1153 
   1154 
   1155 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
   1156                                                 HValue* shared_info) {
   1157   Add<HStoreNamedField>(js_function,
   1158                         HObjectAccess::ForNextFunctionLinkPointer(),
   1159                         graph()->GetConstantUndefined());
   1160   HValue* code_object = Add<HLoadNamedField>(
   1161       shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
   1162   Add<HStoreCodeEntry>(js_function, code_object);
   1163 }
   1164 
   1165 
   1166 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
   1167     HValue* optimized_map,
   1168     HValue* iterator,
   1169     int field_offset) {
   1170   // By making sure to express these loads in the form [<hvalue> + constant]
   1171   // the keyed load can be hoisted.
   1172   ASSERT(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
   1173   HValue* field_slot = iterator;
   1174   if (field_offset > 0) {
   1175     HValue* field_offset_value = Add<HConstant>(field_offset);
   1176     field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
   1177   }
   1178   HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
   1179       static_cast<HValue*>(NULL), FAST_ELEMENTS);
   1180   return field_entry;
   1181 }
   1182 
   1183 
   1184 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
   1185     HValue* js_function,
   1186     HValue* shared_info,
   1187     HValue* native_context) {
   1188   Counters* counters = isolate()->counters();
   1189   IfBuilder is_optimized(this);
   1190   HInstruction* optimized_map = Add<HLoadNamedField>(
   1191       shared_info, static_cast<HValue*>(NULL),
   1192       HObjectAccess::ForOptimizedCodeMap());
   1193   HValue* null_constant = Add<HConstant>(0);
   1194   is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
   1195   is_optimized.Then();
   1196   {
   1197     BuildInstallCode(js_function, shared_info);
   1198   }
   1199   is_optimized.Else();
   1200   {
   1201     AddIncrementCounter(counters->fast_new_closure_try_optimized());
   1202     // optimized_map points to fixed array of 3-element entries
   1203     // (native context, optimized code, literals).
   1204     // Map must never be empty, so check the first elements.
   1205     HValue* first_entry_index =
   1206         Add<HConstant>(SharedFunctionInfo::kEntriesStart);
   1207     IfBuilder already_in(this);
   1208     BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
   1209                                       optimized_map, first_entry_index);
   1210     already_in.Else();
   1211     {
   1212       // Iterate through the rest of map backwards. Do not double check first
   1213       // entry. After the loop, if no matching optimized code was found,
   1214       // install unoptimized code.
   1215       // for(i = map.length() - SharedFunctionInfo::kEntryLength;
   1216       //     i > SharedFunctionInfo::kEntriesStart;
   1217       //     i -= SharedFunctionInfo::kEntryLength) { .. }
   1218       HValue* shared_function_entry_length =
   1219           Add<HConstant>(SharedFunctionInfo::kEntryLength);
   1220       LoopBuilder loop_builder(this,
   1221                                context(),
   1222                                LoopBuilder::kPostDecrement,
   1223                                shared_function_entry_length);
   1224       HValue* array_length = Add<HLoadNamedField>(
   1225           optimized_map, static_cast<HValue*>(NULL),
   1226           HObjectAccess::ForFixedArrayLength());
   1227       HValue* start_pos = AddUncasted<HSub>(array_length,
   1228                                             shared_function_entry_length);
   1229       HValue* slot_iterator = loop_builder.BeginBody(start_pos,
   1230                                                      first_entry_index,
   1231                                                      Token::GT);
   1232       {
   1233         IfBuilder done_check(this);
   1234         BuildCheckAndInstallOptimizedCode(js_function, native_context,
   1235                                           &done_check,
   1236                                           optimized_map,
   1237                                           slot_iterator);
   1238         // Fall out of the loop
   1239         loop_builder.Break();
   1240       }
   1241       loop_builder.EndBody();
   1242 
   1243       // If slot_iterator equals first entry index, then we failed to find and
   1244       // install optimized code
   1245       IfBuilder no_optimized_code_check(this);
   1246       no_optimized_code_check.If<HCompareNumericAndBranch>(
   1247           slot_iterator, first_entry_index, Token::EQ);
   1248       no_optimized_code_check.Then();
   1249       {
   1250         // Store the unoptimized code
   1251         BuildInstallCode(js_function, shared_info);
   1252       }
   1253     }
   1254   }
   1255 }
   1256 
   1257 
   1258 template<>
   1259 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
   1260   Counters* counters = isolate()->counters();
   1261   Factory* factory = isolate()->factory();
   1262   HInstruction* empty_fixed_array =
   1263       Add<HConstant>(factory->empty_fixed_array());
   1264   HValue* shared_info = GetParameter(0);
   1265 
   1266   AddIncrementCounter(counters->fast_new_closure_total());
   1267 
   1268   // Create a new closure from the given function info in new space
   1269   HValue* size = Add<HConstant>(JSFunction::kSize);
   1270   HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
   1271                                              NOT_TENURED, JS_FUNCTION_TYPE);
   1272 
   1273   int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
   1274                                             casted_stub()->is_generator());
   1275 
   1276   // Compute the function map in the current native context and set that
   1277   // as the map of the allocated object.
   1278   HInstruction* native_context = BuildGetNativeContext();
   1279   HInstruction* map_slot_value = Add<HLoadNamedField>(
   1280       native_context, static_cast<HValue*>(NULL),
   1281       HObjectAccess::ForContextSlot(map_index));
   1282   Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
   1283 
   1284   // Initialize the rest of the function.
   1285   Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
   1286                         empty_fixed_array);
   1287   Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
   1288                         empty_fixed_array);
   1289   Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
   1290                         empty_fixed_array);
   1291   Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
   1292                         graph()->GetConstantHole());
   1293   Add<HStoreNamedField>(js_function,
   1294                         HObjectAccess::ForSharedFunctionInfoPointer(),
   1295                         shared_info);
   1296   Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
   1297                         context());
   1298 
   1299   // Initialize the code pointer in the function to be the one
   1300   // found in the shared function info object.
   1301   // But first check if there is an optimized version for our context.
   1302   if (FLAG_cache_optimized_code) {
   1303     BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
   1304   } else {
   1305     BuildInstallCode(js_function, shared_info);
   1306   }
   1307 
   1308   return js_function;
   1309 }
   1310 
   1311 
   1312 Handle<Code> FastNewClosureStub::GenerateCode() {
   1313   return DoGenerateCode(this);
   1314 }
   1315 
   1316 
   1317 template<>
   1318 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
   1319   int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
   1320 
   1321   // Get the function.
   1322   HParameter* function = GetParameter(FastNewContextStub::kFunction);
   1323 
   1324   // Allocate the context in new space.
   1325   HAllocate* function_context = Add<HAllocate>(
   1326       Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
   1327       HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
   1328 
   1329   // Set up the object header.
   1330   AddStoreMapConstant(function_context,
   1331                       isolate()->factory()->function_context_map());
   1332   Add<HStoreNamedField>(function_context,
   1333                         HObjectAccess::ForFixedArrayLength(),
   1334                         Add<HConstant>(length));
   1335 
   1336   // Set up the fixed slots.
   1337   Add<HStoreNamedField>(function_context,
   1338                         HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
   1339                         function);
   1340   Add<HStoreNamedField>(function_context,
   1341                         HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
   1342                         context());
   1343   Add<HStoreNamedField>(function_context,
   1344                         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
   1345                         graph()->GetConstant0());
   1346 
   1347   // Copy the global object from the previous context.
   1348   HValue* global_object = Add<HLoadNamedField>(
   1349       context(), static_cast<HValue*>(NULL),
   1350       HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
   1351   Add<HStoreNamedField>(function_context,
   1352                         HObjectAccess::ForContextSlot(
   1353                             Context::GLOBAL_OBJECT_INDEX),
   1354                         global_object);
   1355 
   1356   // Initialize the rest of the slots to undefined.
   1357   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
   1358     Add<HStoreNamedField>(function_context,
   1359                           HObjectAccess::ForContextSlot(i),
   1360                           graph()->GetConstantUndefined());
   1361   }
   1362 
   1363   return function_context;
   1364 }
   1365 
   1366 
   1367 Handle<Code> FastNewContextStub::GenerateCode() {
   1368   return DoGenerateCode(this);
   1369 }
   1370 
   1371 
   1372 template<>
   1373 HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() {
   1374   HValue* receiver = GetParameter(0);
   1375   HValue* key = GetParameter(1);
   1376 
   1377   Add<HCheckSmi>(key);
   1378 
   1379   HValue* elements = AddLoadElements(receiver);
   1380 
   1381   HValue* hash = BuildElementIndexHash(key);
   1382 
   1383   return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
   1384 }
   1385 
   1386 
   1387 Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode() {
   1388   return DoGenerateCode(this);
   1389 }
   1390 
   1391 
   1392 template<>
   1393 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
   1394   // Determine the parameters.
   1395   HValue* length = GetParameter(RegExpConstructResultStub::kLength);
   1396   HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
   1397   HValue* input = GetParameter(RegExpConstructResultStub::kInput);
   1398 
   1399   info()->MarkMustNotHaveEagerFrame();
   1400 
   1401   return BuildRegExpConstructResult(length, index, input);
   1402 }
   1403 
   1404 
   1405 Handle<Code> RegExpConstructResultStub::GenerateCode() {
   1406   return DoGenerateCode(this);
   1407 }
   1408 
   1409 
   1410 template <>
   1411 class CodeStubGraphBuilder<KeyedLoadGenericElementStub>
   1412   : public CodeStubGraphBuilderBase {
   1413  public:
   1414   CodeStubGraphBuilder(Isolate* isolate,
   1415                        KeyedLoadGenericElementStub* stub)
   1416     : CodeStubGraphBuilderBase(isolate, stub) {}
   1417 
   1418  protected:
   1419   virtual HValue* BuildCodeStub();
   1420 
   1421   void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
   1422                                    HValue* bit_field2,
   1423                                    ElementsKind kind);
   1424 
   1425   void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
   1426                             HValue* receiver,
   1427                             HValue* key,
   1428                             HValue* instance_type,
   1429                             HValue* bit_field2,
   1430                             ElementsKind kind);
   1431 
   1432   void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
   1433                                 HValue* receiver,
   1434                                 HValue* key,
   1435                                 HValue* instance_type,
   1436                                 HValue* bit_field2,
   1437                                 ElementsKind kind);
   1438 
   1439   KeyedLoadGenericElementStub* casted_stub() {
   1440     return static_cast<KeyedLoadGenericElementStub*>(stub());
   1441   }
   1442 };
   1443 
   1444 
   1445 void CodeStubGraphBuilder<
   1446   KeyedLoadGenericElementStub>::BuildElementsKindLimitCheck(
   1447     HGraphBuilder::IfBuilder* if_builder,
   1448     HValue* bit_field2,
   1449     ElementsKind kind) {
   1450   ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
   1451   HValue* kind_limit = Add<HConstant>(
   1452       static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
   1453 
   1454   if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
   1455   if_builder->Then();
   1456 }
   1457 
   1458 
   1459 void CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildFastElementLoad(
   1460     HGraphBuilder::IfBuilder* if_builder,
   1461     HValue* receiver,
   1462     HValue* key,
   1463     HValue* instance_type,
   1464     HValue* bit_field2,
   1465     ElementsKind kind) {
   1466   ASSERT(!IsExternalArrayElementsKind(kind));
   1467 
   1468   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
   1469 
   1470   IfBuilder js_array_check(this);
   1471   js_array_check.If<HCompareNumericAndBranch>(
   1472       instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
   1473   js_array_check.Then();
   1474   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
   1475                                               true, kind,
   1476                                               LOAD, NEVER_RETURN_HOLE,
   1477                                               STANDARD_STORE));
   1478   js_array_check.Else();
   1479   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
   1480                                               false, kind,
   1481                                               LOAD, NEVER_RETURN_HOLE,
   1482                                               STANDARD_STORE));
   1483   js_array_check.End();
   1484 }
   1485 
   1486 
   1487 void CodeStubGraphBuilder<
   1488   KeyedLoadGenericElementStub>::BuildExternalElementLoad(
   1489     HGraphBuilder::IfBuilder* if_builder,
   1490     HValue* receiver,
   1491     HValue* key,
   1492     HValue* instance_type,
   1493     HValue* bit_field2,
   1494     ElementsKind kind) {
   1495   ASSERT(IsExternalArrayElementsKind(kind));
   1496 
   1497   BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
   1498 
   1499   Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
   1500                                               false, kind,
   1501                                               LOAD, NEVER_RETURN_HOLE,
   1502                                               STANDARD_STORE));
   1503 }
   1504 
   1505 
   1506 HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() {
   1507   HValue* receiver = GetParameter(0);
   1508   HValue* key = GetParameter(1);
   1509 
   1510   // Split into a smi/integer case and unique string case.
   1511   HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
   1512                                                 graph()->CreateBasicBlock());
   1513 
   1514   BuildKeyedIndexCheck(key, &index_name_split_continuation);
   1515 
   1516   IfBuilder index_name_split(this, &index_name_split_continuation);
   1517   index_name_split.Then();
   1518   {
   1519     // Key is an index (number)
   1520     key = Pop();
   1521 
   1522     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
   1523       (1 << Map::kHasIndexedInterceptor);
   1524     BuildJSObjectCheck(receiver, bit_field_mask);
   1525 
   1526     HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
   1527                                        HObjectAccess::ForMap());
   1528 
   1529     HValue* instance_type =
   1530       Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
   1531                            HObjectAccess::ForMapInstanceType());
   1532 
   1533     HValue* bit_field2 = Add<HLoadNamedField>(map,
   1534                                               static_cast<HValue*>(NULL),
   1535                                               HObjectAccess::ForMapBitField2());
   1536 
   1537     IfBuilder kind_if(this);
   1538     BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1539                          FAST_HOLEY_ELEMENTS);
   1540 
   1541     kind_if.Else();
   1542     {
   1543       BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1544                            FAST_HOLEY_DOUBLE_ELEMENTS);
   1545     }
   1546     kind_if.Else();
   1547 
   1548     // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
   1549     BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
   1550     {
   1551       HValue* elements = AddLoadElements(receiver);
   1552 
   1553       HValue* hash = BuildElementIndexHash(key);
   1554 
   1555       Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
   1556     }
   1557     kind_if.Else();
   1558 
   1559     // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
   1560     BuildElementsKindLimitCheck(&kind_if, bit_field2,
   1561                                 SLOPPY_ARGUMENTS_ELEMENTS);
   1562     // Non-strict elements are not handled.
   1563     Add<HDeoptimize>("non-strict elements in KeyedLoadGenericElementStub",
   1564                      Deoptimizer::EAGER);
   1565     Push(graph()->GetConstant0());
   1566 
   1567     kind_if.Else();
   1568     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1569                              EXTERNAL_INT8_ELEMENTS);
   1570 
   1571     kind_if.Else();
   1572     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1573                              EXTERNAL_UINT8_ELEMENTS);
   1574 
   1575     kind_if.Else();
   1576     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1577                              EXTERNAL_INT16_ELEMENTS);
   1578 
   1579     kind_if.Else();
   1580     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1581                              EXTERNAL_UINT16_ELEMENTS);
   1582 
   1583     kind_if.Else();
   1584     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1585                              EXTERNAL_INT32_ELEMENTS);
   1586 
   1587     kind_if.Else();
   1588     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1589                              EXTERNAL_UINT32_ELEMENTS);
   1590 
   1591     kind_if.Else();
   1592     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1593                              EXTERNAL_FLOAT32_ELEMENTS);
   1594 
   1595     kind_if.Else();
   1596     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1597                              EXTERNAL_FLOAT64_ELEMENTS);
   1598 
   1599     kind_if.Else();
   1600     BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
   1601                              EXTERNAL_UINT8_CLAMPED_ELEMENTS);
   1602 
   1603     kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericElementStub");
   1604 
   1605     kind_if.End();
   1606   }
   1607   index_name_split.Else();
   1608   {
   1609     // Key is a unique string.
   1610     key = Pop();
   1611 
   1612     int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
   1613         (1 << Map::kHasNamedInterceptor);
   1614     BuildJSObjectCheck(receiver, bit_field_mask);
   1615 
   1616     HIfContinuation continuation;
   1617     BuildTestForDictionaryProperties(receiver, &continuation);
   1618     IfBuilder if_dict_properties(this, &continuation);
   1619     if_dict_properties.Then();
   1620     {
   1621       //  Key is string, properties are dictionary mode
   1622       BuildNonGlobalObjectCheck(receiver);
   1623 
   1624       HValue* properties = Add<HLoadNamedField>(
   1625           receiver, static_cast<HValue*>(NULL),
   1626           HObjectAccess::ForPropertiesPointer());
   1627 
   1628       HValue* hash =
   1629           Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
   1630           HObjectAccess::ForNameHashField());
   1631 
   1632       HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
   1633                                                           properties,
   1634                                                           key,
   1635                                                           hash);
   1636       Push(value);
   1637     }
   1638     if_dict_properties.Else();
   1639     {
   1640       //  Key is string, properties are fast mode
   1641       HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
   1642 
   1643       ExternalReference cache_keys_ref =
   1644           ExternalReference::keyed_lookup_cache_keys(isolate());
   1645       HValue* cache_keys = Add<HConstant>(cache_keys_ref);
   1646 
   1647       HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
   1648                                          HObjectAccess::ForMap());
   1649       HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
   1650       base_index->ClearFlag(HValue::kCanOverflow);
   1651 
   1652       IfBuilder lookup_if(this);
   1653       for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
   1654            ++probe) {
   1655         int probe_base = probe * KeyedLookupCache::kEntryLength;
   1656         HValue* map_index = AddUncasted<HAdd>(base_index,
   1657             Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
   1658         map_index->ClearFlag(HValue::kCanOverflow);
   1659         HValue* key_index = AddUncasted<HAdd>(base_index,
   1660             Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
   1661         key_index->ClearFlag(HValue::kCanOverflow);
   1662         HValue* map_to_check = Add<HLoadKeyed>(cache_keys,
   1663                                                map_index,
   1664                                                static_cast<HValue*>(NULL),
   1665                                                FAST_ELEMENTS,
   1666                                                NEVER_RETURN_HOLE, 0);
   1667         lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map);
   1668         lookup_if.And();
   1669         HValue* key_to_check = Add<HLoadKeyed>(cache_keys,
   1670                                                key_index,
   1671                                                static_cast<HValue*>(NULL),
   1672                                                FAST_ELEMENTS,
   1673                                                NEVER_RETURN_HOLE, 0);
   1674         lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key);
   1675         lookup_if.Then();
   1676         {
   1677           ExternalReference cache_field_offsets_ref =
   1678               ExternalReference::keyed_lookup_cache_field_offsets(isolate());
   1679           HValue* cache_field_offsets = Add<HConstant>(cache_field_offsets_ref);
   1680           HValue* index = AddUncasted<HAdd>(hash,
   1681                                             Add<HConstant>(probe));
   1682           index->ClearFlag(HValue::kCanOverflow);
   1683           HValue* property_index = Add<HLoadKeyed>(cache_field_offsets,
   1684                                                    index,
   1685                                                    static_cast<HValue*>(NULL),
   1686                                                    EXTERNAL_INT32_ELEMENTS,
   1687                                                    NEVER_RETURN_HOLE, 0);
   1688           Push(property_index);
   1689         }
   1690         lookup_if.Else();
   1691       }
   1692       Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER);
   1693       Push(graph()->GetConstant0());
   1694       lookup_if.End();
   1695       Push(Add<HLoadFieldByIndex>(receiver, Pop()));
   1696     }
   1697     if_dict_properties.End();
   1698   }
   1699   index_name_split.End();
   1700 
   1701   return Pop();
   1702 }
   1703 
   1704 
   1705 Handle<Code> KeyedLoadGenericElementStub::GenerateCode() {
   1706   return DoGenerateCode(this);
   1707 }
   1708 
   1709 
   1710 } }  // namespace v8::internal
   1711