1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "code-stubs.h" 31 #include "hydrogen.h" 32 #include "lithium.h" 33 34 namespace v8 { 35 namespace internal { 36 37 38 static LChunk* OptimizeGraph(HGraph* graph) { 39 DisallowHeapAllocation no_allocation; 40 DisallowHandleAllocation no_handles; 41 DisallowHandleDereference no_deref; 42 43 ASSERT(graph != NULL); 44 BailoutReason bailout_reason = kNoReason; 45 if (!graph->Optimize(&bailout_reason)) { 46 FATAL(GetBailoutReason(bailout_reason)); 47 } 48 LChunk* chunk = LChunk::NewChunk(graph); 49 if (chunk == NULL) { 50 FATAL(GetBailoutReason(graph->info()->bailout_reason())); 51 } 52 return chunk; 53 } 54 55 56 class CodeStubGraphBuilderBase : public HGraphBuilder { 57 public: 58 CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub) 59 : HGraphBuilder(&info_), 60 arguments_length_(NULL), 61 info_(stub, isolate), 62 context_(NULL) { 63 descriptor_ = stub->GetInterfaceDescriptor(isolate); 64 parameters_.Reset(new HParameter*[descriptor_->register_param_count_]); 65 } 66 virtual bool BuildGraph(); 67 68 protected: 69 virtual HValue* BuildCodeStub() = 0; 70 HParameter* GetParameter(int parameter) { 71 ASSERT(parameter < descriptor_->register_param_count_); 72 return parameters_[parameter]; 73 } 74 HValue* GetArgumentsLength() { 75 // This is initialized in BuildGraph() 76 ASSERT(arguments_length_ != NULL); 77 return arguments_length_; 78 } 79 CompilationInfo* info() { return &info_; } 80 HydrogenCodeStub* stub() { return info_.code_stub(); } 81 HContext* context() { return context_; } 82 Isolate* isolate() { return info_.isolate(); } 83 84 class ArrayContextChecker { 85 public: 86 ArrayContextChecker(HGraphBuilder* builder, HValue* constructor, 87 HValue* array_function) 88 : checker_(builder) { 89 checker_.If<HCompareObjectEqAndBranch, HValue*>(constructor, 90 array_function); 91 checker_.Then(); 92 } 93 94 ~ArrayContextChecker() { 95 checker_.ElseDeopt("Array constructor called from different context"); 96 checker_.End(); 97 } 98 private: 99 IfBuilder checker_; 100 }; 101 102 enum ArgumentClass { 103 NONE, 104 SINGLE, 105 MULTIPLE 106 }; 107 108 HValue* BuildArrayConstructor(ElementsKind kind, 109 ContextCheckMode context_mode, 110 AllocationSiteOverrideMode override_mode, 111 ArgumentClass argument_class); 112 HValue* BuildInternalArrayConstructor(ElementsKind kind, 113 ArgumentClass argument_class); 114 115 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context, 116 HValue* code_object); 117 void BuildInstallCode(HValue* js_function, HValue* shared_info); 118 void BuildInstallFromOptimizedCodeMap(HValue* js_function, 119 HValue* shared_info, 120 HValue* native_context); 121 122 private: 123 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder); 124 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder, 125 ElementsKind kind); 126 127 SmartArrayPointer<HParameter*> parameters_; 128 HValue* arguments_length_; 129 CompilationInfoWithZone info_; 130 CodeStubInterfaceDescriptor* descriptor_; 131 HContext* context_; 132 }; 133 134 135 bool CodeStubGraphBuilderBase::BuildGraph() { 136 // Update the static counter each time a new code stub is generated. 137 isolate()->counters()->code_stubs()->Increment(); 138 139 if (FLAG_trace_hydrogen_stubs) { 140 const char* name = CodeStub::MajorName(stub()->MajorKey(), false); 141 PrintF("-----------------------------------------------------------\n"); 142 PrintF("Compiling stub %s using hydrogen\n", name); 143 isolate()->GetHTracer()->TraceCompilation(&info_); 144 } 145 146 int param_count = descriptor_->register_param_count_; 147 HEnvironment* start_environment = graph()->start_environment(); 148 HBasicBlock* next_block = CreateBasicBlock(start_environment); 149 Goto(next_block); 150 next_block->SetJoinId(BailoutId::StubEntry()); 151 set_current_block(next_block); 152 153 bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid(); 154 HInstruction* stack_parameter_count = NULL; 155 for (int i = 0; i < param_count; ++i) { 156 Representation r = descriptor_->IsParameterCountRegister(i) 157 ? Representation::Integer32() 158 : Representation::Tagged(); 159 HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r); 160 start_environment->Bind(i, param); 161 parameters_[i] = param; 162 if (descriptor_->IsParameterCountRegister(i)) { 163 param->set_type(HType::Smi()); 164 stack_parameter_count = param; 165 arguments_length_ = stack_parameter_count; 166 } 167 } 168 169 ASSERT(!runtime_stack_params || arguments_length_ != NULL); 170 if (!runtime_stack_params) { 171 stack_parameter_count = graph()->GetConstantMinus1(); 172 arguments_length_ = graph()->GetConstant0(); 173 } 174 175 context_ = Add<HContext>(); 176 start_environment->BindContext(context_); 177 178 Add<HSimulate>(BailoutId::StubEntry()); 179 180 NoObservableSideEffectsScope no_effects(this); 181 182 HValue* return_value = BuildCodeStub(); 183 184 // We might have extra expressions to pop from the stack in addition to the 185 // arguments above. 186 HInstruction* stack_pop_count = stack_parameter_count; 187 if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) { 188 if (!stack_parameter_count->IsConstant() && 189 descriptor_->hint_stack_parameter_count_ < 0) { 190 HInstruction* constant_one = graph()->GetConstant1(); 191 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one); 192 stack_pop_count->ClearFlag(HValue::kCanOverflow); 193 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a 194 // smi. 195 } else { 196 int count = descriptor_->hint_stack_parameter_count_; 197 stack_pop_count = Add<HConstant>(count); 198 } 199 } 200 201 if (current_block() != NULL) { 202 HReturn* hreturn_instruction = New<HReturn>(return_value, 203 stack_pop_count); 204 FinishCurrentBlock(hreturn_instruction); 205 } 206 return true; 207 } 208 209 210 template <class Stub> 211 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase { 212 public: 213 CodeStubGraphBuilder(Isolate* isolate, Stub* stub) 214 : CodeStubGraphBuilderBase(isolate, stub) {} 215 216 protected: 217 virtual HValue* BuildCodeStub() { 218 if (casted_stub()->IsUninitialized()) { 219 return BuildCodeUninitializedStub(); 220 } else { 221 return BuildCodeInitializedStub(); 222 } 223 } 224 225 virtual HValue* BuildCodeInitializedStub() { 226 UNIMPLEMENTED(); 227 return NULL; 228 } 229 230 virtual HValue* BuildCodeUninitializedStub() { 231 // Force a deopt that falls back to the runtime. 232 HValue* undefined = graph()->GetConstantUndefined(); 233 IfBuilder builder(this); 234 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined); 235 builder.Then(); 236 builder.ElseDeopt("Forced deopt to runtime"); 237 return undefined; 238 } 239 240 Stub* casted_stub() { return static_cast<Stub*>(stub()); } 241 }; 242 243 244 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) { 245 Factory* factory = isolate->factory(); 246 247 // Generate the new code. 248 MacroAssembler masm(isolate, NULL, 256); 249 250 { 251 // Update the static counter each time a new code stub is generated. 252 isolate->counters()->code_stubs()->Increment(); 253 254 // Generate the code for the stub. 255 masm.set_generating_stub(true); 256 NoCurrentFrameScope scope(&masm); 257 GenerateLightweightMiss(&masm); 258 } 259 260 // Create the code object. 261 CodeDesc desc; 262 masm.GetCode(&desc); 263 264 // Copy the generated code into a heap object. 265 Code::Flags flags = Code::ComputeFlags( 266 GetCodeKind(), 267 GetICState(), 268 GetExtraICState(), 269 GetStubType(), 270 GetStubFlags()); 271 Handle<Code> new_object = factory->NewCode( 272 desc, flags, masm.CodeObject(), NeedsImmovableCode()); 273 return new_object; 274 } 275 276 277 template <class Stub> 278 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) { 279 CodeStub::Major major_key = 280 static_cast<HydrogenCodeStub*>(stub)->MajorKey(); 281 CodeStubInterfaceDescriptor* descriptor = 282 isolate->code_stub_interface_descriptor(major_key); 283 if (descriptor->register_param_count_ < 0) { 284 stub->InitializeInterfaceDescriptor(isolate, descriptor); 285 } 286 287 // If we are uninitialized we can use a light-weight stub to enter 288 // the runtime that is significantly faster than using the standard 289 // stub-failure deopt mechanism. 290 if (stub->IsUninitialized() && descriptor->has_miss_handler()) { 291 ASSERT(!descriptor->stack_parameter_count_.is_valid()); 292 return stub->GenerateLightweightMissCode(isolate); 293 } 294 ElapsedTimer timer; 295 if (FLAG_profile_hydrogen_code_stub_compilation) { 296 timer.Start(); 297 } 298 CodeStubGraphBuilder<Stub> builder(isolate, stub); 299 LChunk* chunk = OptimizeGraph(builder.CreateGraph()); 300 Handle<Code> code = chunk->Codegen(); 301 if (FLAG_profile_hydrogen_code_stub_compilation) { 302 double ms = timer.Elapsed().InMillisecondsF(); 303 PrintF("[Lazy compilation of %s took %0.3f ms]\n", *stub->GetName(), ms); 304 } 305 return code; 306 } 307 308 309 template <> 310 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() { 311 HValue* value = GetParameter(0); 312 313 // Check if the parameter is already a SMI or heap number. 314 IfBuilder if_number(this); 315 if_number.If<HIsSmiAndBranch>(value); 316 if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map()); 317 if_number.Then(); 318 319 // Return the number. 320 Push(value); 321 322 if_number.Else(); 323 324 // Convert the parameter to number using the builtin. 325 HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER); 326 Add<HPushArgument>(value); 327 Push(Add<HInvokeFunction>(function, 1)); 328 329 if_number.End(); 330 331 return Pop(); 332 } 333 334 335 Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) { 336 return DoGenerateCode(isolate, this); 337 } 338 339 340 template <> 341 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() { 342 info()->MarkAsSavesCallerDoubles(); 343 HValue* number = GetParameter(NumberToStringStub::kNumber); 344 return BuildNumberToString(number, handle(Type::Number(), isolate())); 345 } 346 347 348 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) { 349 return DoGenerateCode(isolate, this); 350 } 351 352 353 template <> 354 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() { 355 Factory* factory = isolate()->factory(); 356 HValue* undefined = graph()->GetConstantUndefined(); 357 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); 358 FastCloneShallowArrayStub::Mode mode = casted_stub()->mode(); 359 int length = casted_stub()->length(); 360 361 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0), 362 GetParameter(1), 363 static_cast<HValue*>(NULL), 364 FAST_ELEMENTS); 365 IfBuilder checker(this); 366 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, 367 undefined); 368 checker.Then(); 369 370 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( 371 AllocationSite::kTransitionInfoOffset); 372 HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access); 373 HValue* push_value; 374 if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) { 375 HValue* elements = AddLoadElements(boilerplate); 376 377 IfBuilder if_fixed_cow(this); 378 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 379 if_fixed_cow.Then(); 380 push_value = BuildCloneShallowArray(boilerplate, 381 allocation_site, 382 alloc_site_mode, 383 FAST_ELEMENTS, 384 0/*copy-on-write*/); 385 environment()->Push(push_value); 386 if_fixed_cow.Else(); 387 388 IfBuilder if_fixed(this); 389 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map()); 390 if_fixed.Then(); 391 push_value = BuildCloneShallowArray(boilerplate, 392 allocation_site, 393 alloc_site_mode, 394 FAST_ELEMENTS, 395 length); 396 environment()->Push(push_value); 397 if_fixed.Else(); 398 push_value = BuildCloneShallowArray(boilerplate, 399 allocation_site, 400 alloc_site_mode, 401 FAST_DOUBLE_ELEMENTS, 402 length); 403 environment()->Push(push_value); 404 } else { 405 ElementsKind elements_kind = casted_stub()->ComputeElementsKind(); 406 push_value = BuildCloneShallowArray(boilerplate, 407 allocation_site, 408 alloc_site_mode, 409 elements_kind, 410 length); 411 environment()->Push(push_value); 412 } 413 414 checker.ElseDeopt("Uninitialized boilerplate literals"); 415 checker.End(); 416 417 return environment()->Pop(); 418 } 419 420 421 Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) { 422 return DoGenerateCode(isolate, this); 423 } 424 425 426 template <> 427 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() { 428 HValue* undefined = graph()->GetConstantUndefined(); 429 430 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0), 431 GetParameter(1), 432 static_cast<HValue*>(NULL), 433 FAST_ELEMENTS); 434 435 IfBuilder checker(this); 436 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, 437 undefined); 438 checker.And(); 439 440 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( 441 AllocationSite::kTransitionInfoOffset); 442 HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access); 443 444 int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize; 445 int object_size = size; 446 if (FLAG_allocation_site_pretenuring) { 447 size += AllocationMemento::kSize; 448 } 449 450 HValue* boilerplate_map = Add<HLoadNamedField>( 451 boilerplate, HObjectAccess::ForMap()); 452 HValue* boilerplate_size = Add<HLoadNamedField>( 453 boilerplate_map, HObjectAccess::ForMapInstanceSize()); 454 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2); 455 checker.If<HCompareNumericAndBranch>(boilerplate_size, 456 size_in_words, Token::EQ); 457 checker.Then(); 458 459 HValue* size_in_bytes = Add<HConstant>(size); 460 461 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(), 462 isolate()->heap()->GetPretenureMode(), JS_OBJECT_TYPE); 463 464 for (int i = 0; i < object_size; i += kPointerSize) { 465 HObjectAccess access = HObjectAccess::ForJSObjectOffset(i); 466 Add<HStoreNamedField>(object, access, 467 Add<HLoadNamedField>(boilerplate, access)); 468 } 469 470 ASSERT(FLAG_allocation_site_pretenuring || (size == object_size)); 471 if (FLAG_allocation_site_pretenuring) { 472 BuildCreateAllocationMemento( 473 object, Add<HConstant>(object_size), allocation_site); 474 } 475 476 environment()->Push(object); 477 checker.ElseDeopt("Uninitialized boilerplate in fast clone"); 478 checker.End(); 479 480 return environment()->Pop(); 481 } 482 483 484 Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) { 485 return DoGenerateCode(isolate, this); 486 } 487 488 489 template <> 490 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() { 491 HValue* size = Add<HConstant>(AllocationSite::kSize); 492 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED, 493 JS_OBJECT_TYPE); 494 495 // Store the map 496 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map(); 497 AddStoreMapConstant(object, allocation_site_map); 498 499 // Store the payload (smi elements kind) 500 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind()); 501 Add<HStoreNamedField>(object, 502 HObjectAccess::ForAllocationSiteOffset( 503 AllocationSite::kTransitionInfoOffset), 504 initial_elements_kind); 505 506 // Unlike literals, constructed arrays don't have nested sites 507 Add<HStoreNamedField>(object, 508 HObjectAccess::ForAllocationSiteOffset( 509 AllocationSite::kNestedSiteOffset), 510 graph()->GetConstant0()); 511 512 // Pretenuring calculation fields. 513 Add<HStoreNamedField>(object, 514 HObjectAccess::ForAllocationSiteOffset( 515 AllocationSite::kMementoFoundCountOffset), 516 graph()->GetConstant0()); 517 518 Add<HStoreNamedField>(object, 519 HObjectAccess::ForAllocationSiteOffset( 520 AllocationSite::kMementoCreateCountOffset), 521 graph()->GetConstant0()); 522 523 Add<HStoreNamedField>(object, 524 HObjectAccess::ForAllocationSiteOffset( 525 AllocationSite::kPretenureDecisionOffset), 526 graph()->GetConstant0()); 527 528 // Store an empty fixed array for the code dependency. 529 HConstant* empty_fixed_array = 530 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 531 HStoreNamedField* store = Add<HStoreNamedField>( 532 object, 533 HObjectAccess::ForAllocationSiteOffset( 534 AllocationSite::kDependentCodeOffset), 535 empty_fixed_array); 536 537 // Link the object to the allocation site list 538 HValue* site_list = Add<HConstant>( 539 ExternalReference::allocation_sites_list_address(isolate())); 540 HValue* site = Add<HLoadNamedField>(site_list, 541 HObjectAccess::ForAllocationSiteList()); 542 store = Add<HStoreNamedField>(object, 543 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), 544 site); 545 store->SkipWriteBarrier(); 546 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(), 547 object); 548 549 // We use a hammer (SkipWriteBarrier()) to indicate that we know the input 550 // cell is really a Cell, and so no write barrier is needed. 551 // TODO(mvstanton): Add a debug_code check to verify the input cell is really 552 // a cell. (perhaps with a new instruction, HAssert). 553 HInstruction* cell = GetParameter(0); 554 HObjectAccess access = HObjectAccess::ForCellValue(); 555 store = Add<HStoreNamedField>(cell, access, object); 556 store->SkipWriteBarrier(); 557 return cell; 558 } 559 560 561 Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) { 562 return DoGenerateCode(isolate, this); 563 } 564 565 566 template <> 567 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() { 568 HInstruction* load = BuildUncheckedMonomorphicElementAccess( 569 GetParameter(0), GetParameter(1), NULL, 570 casted_stub()->is_js_array(), casted_stub()->elements_kind(), 571 false, NEVER_RETURN_HOLE, STANDARD_STORE); 572 return load; 573 } 574 575 576 Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) { 577 return DoGenerateCode(isolate, this); 578 } 579 580 581 template<> 582 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() { 583 Representation rep = casted_stub()->representation(); 584 HObjectAccess access = casted_stub()->is_inobject() ? 585 HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) : 586 HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep); 587 return AddLoadNamedField(GetParameter(0), access); 588 } 589 590 591 Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) { 592 return DoGenerateCode(isolate, this); 593 } 594 595 596 template<> 597 HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() { 598 Representation rep = casted_stub()->representation(); 599 HObjectAccess access = casted_stub()->is_inobject() ? 600 HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) : 601 HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep); 602 return AddLoadNamedField(GetParameter(0), access); 603 } 604 605 606 Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) { 607 return DoGenerateCode(isolate, this); 608 } 609 610 611 template<> 612 HValue* CodeStubGraphBuilder<KeyedArrayCallStub>::BuildCodeStub() { 613 int argc = casted_stub()->argc() + 1; 614 info()->set_parameter_count(argc); 615 616 HValue* receiver = Add<HParameter>(1); 617 BuildCheckHeapObject(receiver); 618 619 // Load the expected initial array map from the context. 620 JSArrayBuilder array_builder(this, casted_stub()->elements_kind()); 621 HValue* map = array_builder.EmitMapCode(); 622 623 HValue* checked_receiver = Add<HCheckMapValue>(receiver, map); 624 625 HValue* function = BuildUncheckedMonomorphicElementAccess( 626 checked_receiver, GetParameter(0), 627 NULL, true, casted_stub()->elements_kind(), 628 false, NEVER_RETURN_HOLE, STANDARD_STORE); 629 return Add<HCallFunction>(function, argc, TAIL_CALL); 630 } 631 632 633 Handle<Code> KeyedArrayCallStub::GenerateCode(Isolate* isolate) { 634 return DoGenerateCode(isolate, this); 635 } 636 637 638 template <> 639 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() { 640 BuildUncheckedMonomorphicElementAccess( 641 GetParameter(0), GetParameter(1), GetParameter(2), 642 casted_stub()->is_js_array(), casted_stub()->elements_kind(), 643 true, NEVER_RETURN_HOLE, casted_stub()->store_mode()); 644 645 return GetParameter(2); 646 } 647 648 649 Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) { 650 return DoGenerateCode(isolate, this); 651 } 652 653 654 template <> 655 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() { 656 info()->MarkAsSavesCallerDoubles(); 657 658 BuildTransitionElementsKind(GetParameter(0), 659 GetParameter(1), 660 casted_stub()->from_kind(), 661 casted_stub()->to_kind(), 662 true); 663 664 return GetParameter(0); 665 } 666 667 668 Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) { 669 return DoGenerateCode(isolate, this); 670 } 671 672 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( 673 ElementsKind kind, 674 ContextCheckMode context_mode, 675 AllocationSiteOverrideMode override_mode, 676 ArgumentClass argument_class) { 677 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); 678 if (context_mode == CONTEXT_CHECK_REQUIRED) { 679 HInstruction* array_function = BuildGetArrayFunction(); 680 ArrayContextChecker checker(this, constructor, array_function); 681 } 682 683 HValue* property_cell = GetParameter(ArrayConstructorStubBase::kPropertyCell); 684 // Walk through the property cell to the AllocationSite 685 HValue* alloc_site = Add<HLoadNamedField>(property_cell, 686 HObjectAccess::ForCellValue()); 687 JSArrayBuilder array_builder(this, kind, alloc_site, constructor, 688 override_mode); 689 HValue* result = NULL; 690 switch (argument_class) { 691 case NONE: 692 result = array_builder.AllocateEmptyArray(); 693 break; 694 case SINGLE: 695 result = BuildArraySingleArgumentConstructor(&array_builder); 696 break; 697 case MULTIPLE: 698 result = BuildArrayNArgumentsConstructor(&array_builder, kind); 699 break; 700 } 701 702 return result; 703 } 704 705 706 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( 707 ElementsKind kind, ArgumentClass argument_class) { 708 HValue* constructor = GetParameter( 709 InternalArrayConstructorStubBase::kConstructor); 710 JSArrayBuilder array_builder(this, kind, constructor); 711 712 HValue* result = NULL; 713 switch (argument_class) { 714 case NONE: 715 result = array_builder.AllocateEmptyArray(); 716 break; 717 case SINGLE: 718 result = BuildArraySingleArgumentConstructor(&array_builder); 719 break; 720 case MULTIPLE: 721 result = BuildArrayNArgumentsConstructor(&array_builder, kind); 722 break; 723 } 724 return result; 725 } 726 727 728 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( 729 JSArrayBuilder* array_builder) { 730 // Smi check and range check on the input arg. 731 HValue* constant_one = graph()->GetConstant1(); 732 HValue* constant_zero = graph()->GetConstant0(); 733 734 HInstruction* elements = Add<HArgumentsElements>(false); 735 HInstruction* argument = Add<HAccessArgumentsAt>( 736 elements, constant_one, constant_zero); 737 738 return BuildAllocateArrayFromLength(array_builder, argument); 739 } 740 741 742 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( 743 JSArrayBuilder* array_builder, ElementsKind kind) { 744 // Insert a bounds check because the number of arguments might exceed 745 // the kInitialMaxFastElementArray limit. This cannot happen for code 746 // that was parsed, but calling via Array.apply(thisArg, [...]) might 747 // trigger it. 748 HValue* length = GetArgumentsLength(); 749 HConstant* max_alloc_length = 750 Add<HConstant>(JSObject::kInitialMaxFastElementArray); 751 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length); 752 753 // We need to fill with the hole if it's a smi array in the multi-argument 754 // case because we might have to bail out while copying arguments into 755 // the array because they aren't compatible with a smi array. 756 // If it's a double array, no problem, and if it's fast then no 757 // problem either because doubles are boxed. 758 // 759 // TODO(mvstanton): consider an instruction to memset fill the array 760 // with zero in this case instead. 761 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) 762 ? JSArrayBuilder::FILL_WITH_HOLE 763 : JSArrayBuilder::DONT_FILL_WITH_HOLE; 764 HValue* new_object = array_builder->AllocateArray(checked_length, 765 checked_length, 766 fill_mode); 767 HValue* elements = array_builder->GetElementsLocation(); 768 ASSERT(elements != NULL); 769 770 // Now populate the elements correctly. 771 LoopBuilder builder(this, 772 context(), 773 LoopBuilder::kPostIncrement); 774 HValue* start = graph()->GetConstant0(); 775 HValue* key = builder.BeginBody(start, checked_length, Token::LT); 776 HInstruction* argument_elements = Add<HArgumentsElements>(false); 777 HInstruction* argument = Add<HAccessArgumentsAt>( 778 argument_elements, checked_length, key); 779 780 Add<HStoreKeyed>(elements, key, argument, kind); 781 builder.EndBody(); 782 return new_object; 783 } 784 785 786 template <> 787 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() { 788 ElementsKind kind = casted_stub()->elements_kind(); 789 ContextCheckMode context_mode = casted_stub()->context_mode(); 790 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); 791 return BuildArrayConstructor(kind, context_mode, override_mode, NONE); 792 } 793 794 795 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) { 796 return DoGenerateCode(isolate, this); 797 } 798 799 800 template <> 801 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>:: 802 BuildCodeStub() { 803 ElementsKind kind = casted_stub()->elements_kind(); 804 ContextCheckMode context_mode = casted_stub()->context_mode(); 805 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); 806 return BuildArrayConstructor(kind, context_mode, override_mode, SINGLE); 807 } 808 809 810 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode( 811 Isolate* isolate) { 812 return DoGenerateCode(isolate, this); 813 } 814 815 816 template <> 817 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() { 818 ElementsKind kind = casted_stub()->elements_kind(); 819 ContextCheckMode context_mode = casted_stub()->context_mode(); 820 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); 821 return BuildArrayConstructor(kind, context_mode, override_mode, MULTIPLE); 822 } 823 824 825 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) { 826 return DoGenerateCode(isolate, this); 827 } 828 829 830 template <> 831 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>:: 832 BuildCodeStub() { 833 ElementsKind kind = casted_stub()->elements_kind(); 834 return BuildInternalArrayConstructor(kind, NONE); 835 } 836 837 838 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode( 839 Isolate* isolate) { 840 return DoGenerateCode(isolate, this); 841 } 842 843 844 template <> 845 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>:: 846 BuildCodeStub() { 847 ElementsKind kind = casted_stub()->elements_kind(); 848 return BuildInternalArrayConstructor(kind, SINGLE); 849 } 850 851 852 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode( 853 Isolate* isolate) { 854 return DoGenerateCode(isolate, this); 855 } 856 857 858 template <> 859 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>:: 860 BuildCodeStub() { 861 ElementsKind kind = casted_stub()->elements_kind(); 862 return BuildInternalArrayConstructor(kind, MULTIPLE); 863 } 864 865 866 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode( 867 Isolate* isolate) { 868 return DoGenerateCode(isolate, this); 869 } 870 871 872 template <> 873 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() { 874 Isolate* isolate = graph()->isolate(); 875 CompareNilICStub* stub = casted_stub(); 876 HIfContinuation continuation; 877 Handle<Map> sentinel_map(isolate->heap()->meta_map()); 878 Handle<Type> type = stub->GetType(isolate, sentinel_map); 879 BuildCompareNil(GetParameter(0), type, &continuation); 880 IfBuilder if_nil(this, &continuation); 881 if_nil.Then(); 882 if (continuation.IsFalseReachable()) { 883 if_nil.Else(); 884 if_nil.Return(graph()->GetConstant0()); 885 } 886 if_nil.End(); 887 return continuation.IsTrueReachable() 888 ? graph()->GetConstant1() 889 : graph()->GetConstantUndefined(); 890 } 891 892 893 Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) { 894 return DoGenerateCode(isolate, this); 895 } 896 897 898 template <> 899 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() { 900 BinaryOpIC::State state = casted_stub()->state(); 901 902 HValue* left = GetParameter(BinaryOpICStub::kLeft); 903 HValue* right = GetParameter(BinaryOpICStub::kRight); 904 905 Handle<Type> left_type = state.GetLeftType(isolate()); 906 Handle<Type> right_type = state.GetRightType(isolate()); 907 Handle<Type> result_type = state.GetResultType(isolate()); 908 909 ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && 910 (state.HasSideEffects() || !result_type->Is(Type::None()))); 911 912 HValue* result = NULL; 913 if (state.op() == Token::ADD && 914 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && 915 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { 916 // For the generic add stub a fast case for string addition is performance 917 // critical. 918 if (left_type->Maybe(Type::String())) { 919 IfBuilder if_leftisstring(this); 920 if_leftisstring.If<HIsStringAndBranch>(left); 921 if_leftisstring.Then(); 922 { 923 Push(BuildBinaryOperation( 924 state.op(), left, right, 925 handle(Type::String(), isolate()), right_type, 926 result_type, state.fixed_right_arg())); 927 } 928 if_leftisstring.Else(); 929 { 930 Push(BuildBinaryOperation( 931 state.op(), left, right, 932 left_type, right_type, result_type, 933 state.fixed_right_arg())); 934 } 935 if_leftisstring.End(); 936 result = Pop(); 937 } else { 938 IfBuilder if_rightisstring(this); 939 if_rightisstring.If<HIsStringAndBranch>(right); 940 if_rightisstring.Then(); 941 { 942 Push(BuildBinaryOperation( 943 state.op(), left, right, 944 left_type, handle(Type::String(), isolate()), 945 result_type, state.fixed_right_arg())); 946 } 947 if_rightisstring.Else(); 948 { 949 Push(BuildBinaryOperation( 950 state.op(), left, right, 951 left_type, right_type, result_type, 952 state.fixed_right_arg())); 953 } 954 if_rightisstring.End(); 955 result = Pop(); 956 } 957 } else { 958 result = BuildBinaryOperation( 959 state.op(), left, right, 960 left_type, right_type, result_type, 961 state.fixed_right_arg()); 962 } 963 964 // If we encounter a generic argument, the number conversion is 965 // observable, thus we cannot afford to bail out after the fact. 966 if (!state.HasSideEffects()) { 967 if (result_type->Is(Type::Smi())) { 968 if (state.op() == Token::SHR) { 969 // TODO(olivf) Replace this by a SmiTagU Instruction. 970 // 0x40000000: this number would convert to negative when interpreting 971 // the register as signed value; 972 IfBuilder if_of(this); 973 if_of.IfNot<HCompareNumericAndBranch>(result, 974 Add<HConstant>(static_cast<int>(SmiValuesAre32Bits() 975 ? 0x80000000 : 0x40000000)), Token::EQ_STRICT); 976 if_of.Then(); 977 if_of.ElseDeopt("UInt->Smi oveflow"); 978 if_of.End(); 979 } 980 } 981 result = EnforceNumberType(result, result_type); 982 } 983 984 // Reuse the double box of one of the operands if we are allowed to (i.e. 985 // chained binops). 986 if (state.CanReuseDoubleBox()) { 987 HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right; 988 IfBuilder if_heap_number(this); 989 if_heap_number.IfNot<HIsSmiAndBranch>(operand); 990 if_heap_number.Then(); 991 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result); 992 Push(operand); 993 if_heap_number.Else(); 994 Push(result); 995 if_heap_number.End(); 996 result = Pop(); 997 } 998 999 return result; 1000 } 1001 1002 1003 Handle<Code> BinaryOpICStub::GenerateCode(Isolate* isolate) { 1004 return DoGenerateCode(isolate, this); 1005 } 1006 1007 1008 template <> 1009 HValue* CodeStubGraphBuilder<NewStringAddStub>::BuildCodeInitializedStub() { 1010 NewStringAddStub* stub = casted_stub(); 1011 StringAddFlags flags = stub->flags(); 1012 PretenureFlag pretenure_flag = stub->pretenure_flag(); 1013 1014 HValue* left = GetParameter(NewStringAddStub::kLeft); 1015 HValue* right = GetParameter(NewStringAddStub::kRight); 1016 1017 // Make sure that both arguments are strings if not known in advance. 1018 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { 1019 left = BuildCheckString(left); 1020 } 1021 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { 1022 right = BuildCheckString(right); 1023 } 1024 1025 return BuildStringAdd(left, right, pretenure_flag); 1026 } 1027 1028 1029 Handle<Code> NewStringAddStub::GenerateCode(Isolate* isolate) { 1030 return DoGenerateCode(isolate, this); 1031 } 1032 1033 1034 template <> 1035 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() { 1036 ToBooleanStub* stub = casted_stub(); 1037 1038 IfBuilder if_true(this); 1039 if_true.If<HBranch>(GetParameter(0), stub->GetTypes()); 1040 if_true.Then(); 1041 if_true.Return(graph()->GetConstant1()); 1042 if_true.Else(); 1043 if_true.End(); 1044 return graph()->GetConstant0(); 1045 } 1046 1047 1048 Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) { 1049 return DoGenerateCode(isolate, this); 1050 } 1051 1052 1053 template <> 1054 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() { 1055 StoreGlobalStub* stub = casted_stub(); 1056 Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate()); 1057 Handle<Object> placeholer_value(Smi::FromInt(0), isolate()); 1058 Handle<PropertyCell> placeholder_cell = 1059 isolate()->factory()->NewPropertyCell(placeholer_value); 1060 1061 HParameter* receiver = GetParameter(0); 1062 HParameter* value = GetParameter(2); 1063 1064 // Check that the map of the global has not changed: use a placeholder map 1065 // that will be replaced later with the global object's map. 1066 Handle<Map> placeholder_map = isolate()->factory()->meta_map(); 1067 Add<HCheckMaps>(receiver, placeholder_map, top_info()); 1068 1069 HValue* cell = Add<HConstant>(placeholder_cell); 1070 HObjectAccess access(HObjectAccess::ForCellPayload(isolate())); 1071 HValue* cell_contents = Add<HLoadNamedField>(cell, access); 1072 1073 if (stub->is_constant()) { 1074 IfBuilder builder(this); 1075 builder.If<HCompareObjectEqAndBranch>(cell_contents, value); 1076 builder.Then(); 1077 builder.ElseDeopt("Unexpected cell contents in constant global store"); 1078 builder.End(); 1079 } else { 1080 // Load the payload of the global parameter cell. A hole indicates that the 1081 // property has been deleted and that the store must be handled by the 1082 // runtime. 1083 IfBuilder builder(this); 1084 HValue* hole_value = Add<HConstant>(hole); 1085 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value); 1086 builder.Then(); 1087 builder.Deopt("Unexpected cell contents in global store"); 1088 builder.Else(); 1089 Add<HStoreNamedField>(cell, access, value); 1090 builder.End(); 1091 } 1092 1093 return value; 1094 } 1095 1096 1097 Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) { 1098 return DoGenerateCode(isolate, this); 1099 } 1100 1101 1102 template<> 1103 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() { 1104 HValue* value = GetParameter(0); 1105 HValue* map = GetParameter(1); 1106 HValue* key = GetParameter(2); 1107 HValue* object = GetParameter(3); 1108 1109 if (FLAG_trace_elements_transitions) { 1110 // Tracing elements transitions is the job of the runtime. 1111 Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER); 1112 } else { 1113 info()->MarkAsSavesCallerDoubles(); 1114 1115 BuildTransitionElementsKind(object, map, 1116 casted_stub()->from_kind(), 1117 casted_stub()->to_kind(), 1118 casted_stub()->is_jsarray()); 1119 1120 BuildUncheckedMonomorphicElementAccess(object, key, value, 1121 casted_stub()->is_jsarray(), 1122 casted_stub()->to_kind(), 1123 true, ALLOW_RETURN_HOLE, 1124 casted_stub()->store_mode()); 1125 } 1126 1127 return value; 1128 } 1129 1130 1131 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) { 1132 return DoGenerateCode(isolate, this); 1133 } 1134 1135 1136 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode( 1137 HValue* js_function, 1138 HValue* native_context, 1139 HValue* code_object) { 1140 Counters* counters = isolate()->counters(); 1141 AddIncrementCounter(counters->fast_new_closure_install_optimized()); 1142 1143 // TODO(fschneider): Idea: store proper code pointers in the optimized code 1144 // map and either unmangle them on marking or do nothing as the whole map is 1145 // discarded on major GC anyway. 1146 Add<HStoreCodeEntry>(js_function, code_object); 1147 1148 // Now link a function into a list of optimized functions. 1149 HValue* optimized_functions_list = Add<HLoadNamedField>(native_context, 1150 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST)); 1151 Add<HStoreNamedField>(js_function, 1152 HObjectAccess::ForNextFunctionLinkPointer(), 1153 optimized_functions_list); 1154 1155 // This store is the only one that should have a write barrier. 1156 Add<HStoreNamedField>(native_context, 1157 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST), 1158 js_function); 1159 } 1160 1161 1162 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function, 1163 HValue* shared_info) { 1164 Add<HStoreNamedField>(js_function, 1165 HObjectAccess::ForNextFunctionLinkPointer(), 1166 graph()->GetConstantUndefined()); 1167 HValue* code_object = Add<HLoadNamedField>(shared_info, 1168 HObjectAccess::ForCodeOffset()); 1169 Add<HStoreCodeEntry>(js_function, code_object); 1170 } 1171 1172 1173 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap( 1174 HValue* js_function, 1175 HValue* shared_info, 1176 HValue* native_context) { 1177 Counters* counters = isolate()->counters(); 1178 IfBuilder is_optimized(this); 1179 HInstruction* optimized_map = Add<HLoadNamedField>(shared_info, 1180 HObjectAccess::ForOptimizedCodeMap()); 1181 HValue* null_constant = Add<HConstant>(0); 1182 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant); 1183 is_optimized.Then(); 1184 { 1185 BuildInstallCode(js_function, shared_info); 1186 } 1187 is_optimized.Else(); 1188 { 1189 AddIncrementCounter(counters->fast_new_closure_try_optimized()); 1190 // optimized_map points to fixed array of 3-element entries 1191 // (native context, optimized code, literals). 1192 // Map must never be empty, so check the first elements. 1193 Label install_optimized; 1194 HValue* first_context_slot = Add<HLoadNamedField>(optimized_map, 1195 HObjectAccess::ForFirstContextSlot()); 1196 IfBuilder already_in(this); 1197 already_in.If<HCompareObjectEqAndBranch>(native_context, 1198 first_context_slot); 1199 already_in.Then(); 1200 { 1201 HValue* code_object = Add<HLoadNamedField>(optimized_map, 1202 HObjectAccess::ForFirstCodeSlot()); 1203 BuildInstallOptimizedCode(js_function, native_context, code_object); 1204 } 1205 already_in.Else(); 1206 { 1207 HValue* shared_function_entry_length = 1208 Add<HConstant>(SharedFunctionInfo::kEntryLength); 1209 LoopBuilder loop_builder(this, 1210 context(), 1211 LoopBuilder::kPostDecrement, 1212 shared_function_entry_length); 1213 HValue* array_length = Add<HLoadNamedField>(optimized_map, 1214 HObjectAccess::ForFixedArrayLength()); 1215 HValue* key = loop_builder.BeginBody(array_length, 1216 graph()->GetConstant0(), 1217 Token::GT); 1218 { 1219 // Iterate through the rest of map backwards. 1220 // Do not double check first entry. 1221 HValue* second_entry_index = 1222 Add<HConstant>(SharedFunctionInfo::kSecondEntryIndex); 1223 IfBuilder restore_check(this); 1224 restore_check.If<HCompareNumericAndBranch>(key, second_entry_index, 1225 Token::EQ); 1226 restore_check.Then(); 1227 { 1228 // Store the unoptimized code 1229 BuildInstallCode(js_function, shared_info); 1230 loop_builder.Break(); 1231 } 1232 restore_check.Else(); 1233 { 1234 HValue* keyed_minus = AddUncasted<HSub>( 1235 key, shared_function_entry_length); 1236 HInstruction* keyed_lookup = Add<HLoadKeyed>(optimized_map, 1237 keyed_minus, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1238 IfBuilder done_check(this); 1239 done_check.If<HCompareObjectEqAndBranch>(native_context, 1240 keyed_lookup); 1241 done_check.Then(); 1242 { 1243 // Hit: fetch the optimized code. 1244 HValue* keyed_plus = AddUncasted<HAdd>( 1245 keyed_minus, graph()->GetConstant1()); 1246 HValue* code_object = Add<HLoadKeyed>(optimized_map, 1247 keyed_plus, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1248 BuildInstallOptimizedCode(js_function, native_context, code_object); 1249 1250 // Fall out of the loop 1251 loop_builder.Break(); 1252 } 1253 done_check.Else(); 1254 done_check.End(); 1255 } 1256 restore_check.End(); 1257 } 1258 loop_builder.EndBody(); 1259 } 1260 already_in.End(); 1261 } 1262 is_optimized.End(); 1263 } 1264 1265 1266 template<> 1267 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { 1268 Counters* counters = isolate()->counters(); 1269 Factory* factory = isolate()->factory(); 1270 HInstruction* empty_fixed_array = 1271 Add<HConstant>(factory->empty_fixed_array()); 1272 HValue* shared_info = GetParameter(0); 1273 1274 AddIncrementCounter(counters->fast_new_closure_total()); 1275 1276 // Create a new closure from the given function info in new space 1277 HValue* size = Add<HConstant>(JSFunction::kSize); 1278 HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(), 1279 NOT_TENURED, JS_FUNCTION_TYPE); 1280 1281 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(), 1282 casted_stub()->is_generator()); 1283 1284 // Compute the function map in the current native context and set that 1285 // as the map of the allocated object. 1286 HInstruction* native_context = BuildGetNativeContext(); 1287 HInstruction* map_slot_value = Add<HLoadNamedField>(native_context, 1288 HObjectAccess::ForContextSlot(map_index)); 1289 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value); 1290 1291 // Initialize the rest of the function. 1292 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(), 1293 empty_fixed_array); 1294 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(), 1295 empty_fixed_array); 1296 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(), 1297 empty_fixed_array); 1298 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(), 1299 graph()->GetConstantHole()); 1300 Add<HStoreNamedField>(js_function, 1301 HObjectAccess::ForSharedFunctionInfoPointer(), 1302 shared_info); 1303 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), 1304 context()); 1305 1306 // Initialize the code pointer in the function to be the one 1307 // found in the shared function info object. 1308 // But first check if there is an optimized version for our context. 1309 if (FLAG_cache_optimized_code) { 1310 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context); 1311 } else { 1312 BuildInstallCode(js_function, shared_info); 1313 } 1314 1315 return js_function; 1316 } 1317 1318 1319 Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) { 1320 return DoGenerateCode(isolate, this); 1321 } 1322 1323 1324 template<> 1325 HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() { 1326 HValue* receiver = GetParameter(0); 1327 HValue* key = GetParameter(1); 1328 1329 Add<HCheckSmi>(key); 1330 1331 return BuildUncheckedDictionaryElementLoad(receiver, key); 1332 } 1333 1334 1335 Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode(Isolate* isolate) { 1336 return DoGenerateCode(isolate, this); 1337 } 1338 1339 1340 } } // namespace v8::internal 1341