1 // Copyright 2011 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_CODE_STUBS_H_ 29 #define V8_CODE_STUBS_H_ 30 31 #include "globals.h" 32 33 namespace v8 { 34 namespace internal { 35 36 // List of code stubs used on all platforms. The order in this list is important 37 // as only the stubs up to and including Instanceof allows nested stub calls. 38 #define CODE_STUB_LIST_ALL_PLATFORMS(V) \ 39 V(CallFunction) \ 40 V(TypeRecordingBinaryOp) \ 41 V(StringAdd) \ 42 V(SubString) \ 43 V(StringCompare) \ 44 V(SmiOp) \ 45 V(Compare) \ 46 V(CompareIC) \ 47 V(MathPow) \ 48 V(TranscendentalCache) \ 49 V(Instanceof) \ 50 V(ConvertToDouble) \ 51 V(WriteInt32ToHeapNumber) \ 52 V(StackCheck) \ 53 V(FastNewClosure) \ 54 V(FastNewContext) \ 55 V(FastCloneShallowArray) \ 56 V(GenericUnaryOp) \ 57 V(RevertToNumber) \ 58 V(ToBoolean) \ 59 V(ToNumber) \ 60 V(CounterOp) \ 61 V(ArgumentsAccess) \ 62 V(RegExpExec) \ 63 V(RegExpConstructResult) \ 64 V(NumberToString) \ 65 V(CEntry) \ 66 V(JSEntry) \ 67 V(DebuggerStatement) 68 69 // List of code stubs only used on ARM platforms. 70 #ifdef V8_TARGET_ARCH_ARM 71 #define CODE_STUB_LIST_ARM(V) \ 72 V(GetProperty) \ 73 V(SetProperty) \ 74 V(InvokeBuiltin) \ 75 V(RegExpCEntry) \ 76 V(DirectCEntry) 77 #else 78 #define CODE_STUB_LIST_ARM(V) 79 #endif 80 81 // List of code stubs only used on MIPS platforms. 82 #ifdef V8_TARGET_ARCH_MIPS 83 #define CODE_STUB_LIST_MIPS(V) \ 84 V(RegExpCEntry) 85 #else 86 #define CODE_STUB_LIST_MIPS(V) 87 #endif 88 89 // Combined list of code stubs. 90 #define CODE_STUB_LIST(V) \ 91 CODE_STUB_LIST_ALL_PLATFORMS(V) \ 92 CODE_STUB_LIST_ARM(V) \ 93 CODE_STUB_LIST_MIPS(V) 94 95 // Mode to overwrite BinaryExpression values. 96 enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT }; 97 enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE }; 98 99 100 // Stub is base classes of all stubs. 101 class CodeStub BASE_EMBEDDED { 102 public: 103 enum Major { 104 #define DEF_ENUM(name) name, 105 CODE_STUB_LIST(DEF_ENUM) 106 #undef DEF_ENUM 107 NoCache, // marker for stubs that do custom caching 108 NUMBER_OF_IDS 109 }; 110 111 // Retrieve the code for the stub. Generate the code if needed. 112 Handle<Code> GetCode(); 113 114 // Retrieve the code for the stub if already generated. Do not 115 // generate the code if not already generated and instead return a 116 // retry after GC Failure object. 117 MUST_USE_RESULT MaybeObject* TryGetCode(); 118 119 static Major MajorKeyFromKey(uint32_t key) { 120 return static_cast<Major>(MajorKeyBits::decode(key)); 121 } 122 static int MinorKeyFromKey(uint32_t key) { 123 return MinorKeyBits::decode(key); 124 } 125 126 // Gets the major key from a code object that is a code stub or binary op IC. 127 static Major GetMajorKey(Code* code_stub) { 128 return static_cast<Major>(code_stub->major_key()); 129 } 130 131 static const char* MajorName(Major major_key, bool allow_unknown_keys); 132 133 virtual ~CodeStub() {} 134 135 protected: 136 static const int kMajorBits = 6; 137 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits; 138 139 private: 140 // Lookup the code in the (possibly custom) cache. 141 bool FindCodeInCache(Code** code_out); 142 143 // Nonvirtual wrapper around the stub-specific Generate function. Call 144 // this function to set up the macro assembler and generate the code. 145 void GenerateCode(MacroAssembler* masm); 146 147 // Generates the assembler code for the stub. 148 virtual void Generate(MacroAssembler* masm) = 0; 149 150 // Perform bookkeeping required after code generation when stub code is 151 // initially generated. 152 void RecordCodeGeneration(Code* code, MacroAssembler* masm); 153 154 // Finish the code object after it has been generated. 155 virtual void FinishCode(Code* code) { } 156 157 // Returns information for computing the number key. 158 virtual Major MajorKey() = 0; 159 virtual int MinorKey() = 0; 160 161 // The CallFunctionStub needs to override this so it can encode whether a 162 // lazily generated function should be fully optimized or not. 163 virtual InLoopFlag InLoop() { return NOT_IN_LOOP; } 164 165 // TypeRecordingBinaryOpStub needs to override this. 166 virtual int GetCodeKind(); 167 168 // TypeRecordingBinaryOpStub needs to override this. 169 virtual InlineCacheState GetICState() { 170 return UNINITIALIZED; 171 } 172 173 // Returns a name for logging/debugging purposes. 174 virtual const char* GetName() { return MajorName(MajorKey(), false); } 175 176 // Returns whether the code generated for this stub needs to be allocated as 177 // a fixed (non-moveable) code object. 178 virtual bool NeedsImmovableCode() { return false; } 179 180 #ifdef DEBUG 181 virtual void Print() { PrintF("%s\n", GetName()); } 182 #endif 183 184 // Computes the key based on major and minor. 185 uint32_t GetKey() { 186 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS); 187 return MinorKeyBits::encode(MinorKey()) | 188 MajorKeyBits::encode(MajorKey()); 189 } 190 191 bool AllowsStubCalls() { return MajorKey() <= Instanceof; } 192 193 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {}; 194 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {}; 195 196 friend class BreakPointIterator; 197 }; 198 199 200 // Helper interface to prepare to/restore after making runtime calls. 201 class RuntimeCallHelper { 202 public: 203 virtual ~RuntimeCallHelper() {} 204 205 virtual void BeforeCall(MacroAssembler* masm) const = 0; 206 207 virtual void AfterCall(MacroAssembler* masm) const = 0; 208 209 protected: 210 RuntimeCallHelper() {} 211 212 private: 213 DISALLOW_COPY_AND_ASSIGN(RuntimeCallHelper); 214 }; 215 216 } } // namespace v8::internal 217 218 #if V8_TARGET_ARCH_IA32 219 #include "ia32/code-stubs-ia32.h" 220 #elif V8_TARGET_ARCH_X64 221 #include "x64/code-stubs-x64.h" 222 #elif V8_TARGET_ARCH_ARM 223 #include "arm/code-stubs-arm.h" 224 #elif V8_TARGET_ARCH_MIPS 225 #include "mips/code-stubs-mips.h" 226 #else 227 #error Unsupported target architecture. 228 #endif 229 230 namespace v8 { 231 namespace internal { 232 233 234 // RuntimeCallHelper implementation used in stubs: enters/leaves a 235 // newly created internal frame before/after the runtime call. 236 class StubRuntimeCallHelper : public RuntimeCallHelper { 237 public: 238 StubRuntimeCallHelper() {} 239 240 virtual void BeforeCall(MacroAssembler* masm) const; 241 242 virtual void AfterCall(MacroAssembler* masm) const; 243 }; 244 245 246 // Trivial RuntimeCallHelper implementation. 247 class NopRuntimeCallHelper : public RuntimeCallHelper { 248 public: 249 NopRuntimeCallHelper() {} 250 251 virtual void BeforeCall(MacroAssembler* masm) const {} 252 253 virtual void AfterCall(MacroAssembler* masm) const {} 254 }; 255 256 257 class StackCheckStub : public CodeStub { 258 public: 259 StackCheckStub() { } 260 261 void Generate(MacroAssembler* masm); 262 263 private: 264 265 const char* GetName() { return "StackCheckStub"; } 266 267 Major MajorKey() { return StackCheck; } 268 int MinorKey() { return 0; } 269 }; 270 271 272 class ToNumberStub: public CodeStub { 273 public: 274 ToNumberStub() { } 275 276 void Generate(MacroAssembler* masm); 277 278 private: 279 Major MajorKey() { return ToNumber; } 280 int MinorKey() { return 0; } 281 const char* GetName() { return "ToNumberStub"; } 282 }; 283 284 285 class FastNewClosureStub : public CodeStub { 286 public: 287 explicit FastNewClosureStub(StrictModeFlag strict_mode) 288 : strict_mode_(strict_mode) { } 289 290 void Generate(MacroAssembler* masm); 291 292 private: 293 const char* GetName() { return "FastNewClosureStub"; } 294 Major MajorKey() { return FastNewClosure; } 295 int MinorKey() { return strict_mode_; } 296 297 StrictModeFlag strict_mode_; 298 }; 299 300 301 class FastNewContextStub : public CodeStub { 302 public: 303 static const int kMaximumSlots = 64; 304 305 explicit FastNewContextStub(int slots) : slots_(slots) { 306 ASSERT(slots_ > 0 && slots <= kMaximumSlots); 307 } 308 309 void Generate(MacroAssembler* masm); 310 311 private: 312 int slots_; 313 314 const char* GetName() { return "FastNewContextStub"; } 315 Major MajorKey() { return FastNewContext; } 316 int MinorKey() { return slots_; } 317 }; 318 319 320 class FastCloneShallowArrayStub : public CodeStub { 321 public: 322 // Maximum length of copied elements array. 323 static const int kMaximumClonedLength = 8; 324 325 enum Mode { 326 CLONE_ELEMENTS, 327 COPY_ON_WRITE_ELEMENTS 328 }; 329 330 FastCloneShallowArrayStub(Mode mode, int length) 331 : mode_(mode), 332 length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) { 333 ASSERT(length_ >= 0); 334 ASSERT(length_ <= kMaximumClonedLength); 335 } 336 337 void Generate(MacroAssembler* masm); 338 339 private: 340 Mode mode_; 341 int length_; 342 343 const char* GetName() { return "FastCloneShallowArrayStub"; } 344 Major MajorKey() { return FastCloneShallowArray; } 345 int MinorKey() { 346 ASSERT(mode_ == 0 || mode_ == 1); 347 return (length_ << 1) | mode_; 348 } 349 }; 350 351 352 class InstanceofStub: public CodeStub { 353 public: 354 enum Flags { 355 kNoFlags = 0, 356 kArgsInRegisters = 1 << 0, 357 kCallSiteInlineCheck = 1 << 1, 358 kReturnTrueFalseObject = 1 << 2 359 }; 360 361 explicit InstanceofStub(Flags flags) : flags_(flags), name_(NULL) { } 362 363 static Register left(); 364 static Register right(); 365 366 void Generate(MacroAssembler* masm); 367 368 private: 369 Major MajorKey() { return Instanceof; } 370 int MinorKey() { return static_cast<int>(flags_); } 371 372 bool HasArgsInRegisters() const { 373 return (flags_ & kArgsInRegisters) != 0; 374 } 375 376 bool HasCallSiteInlineCheck() const { 377 return (flags_ & kCallSiteInlineCheck) != 0; 378 } 379 380 bool ReturnTrueFalseObject() const { 381 return (flags_ & kReturnTrueFalseObject) != 0; 382 } 383 384 const char* GetName(); 385 386 Flags flags_; 387 char* name_; 388 }; 389 390 391 enum NegativeZeroHandling { 392 kStrictNegativeZero, 393 kIgnoreNegativeZero 394 }; 395 396 397 enum UnaryOpFlags { 398 NO_UNARY_FLAGS = 0, 399 NO_UNARY_SMI_CODE_IN_STUB = 1 << 0 400 }; 401 402 403 class GenericUnaryOpStub : public CodeStub { 404 public: 405 GenericUnaryOpStub(Token::Value op, 406 UnaryOverwriteMode overwrite, 407 UnaryOpFlags flags, 408 NegativeZeroHandling negative_zero = kStrictNegativeZero) 409 : op_(op), 410 overwrite_(overwrite), 411 include_smi_code_((flags & NO_UNARY_SMI_CODE_IN_STUB) == 0), 412 negative_zero_(negative_zero) { } 413 414 private: 415 Token::Value op_; 416 UnaryOverwriteMode overwrite_; 417 bool include_smi_code_; 418 NegativeZeroHandling negative_zero_; 419 420 class OverwriteField: public BitField<UnaryOverwriteMode, 0, 1> {}; 421 class IncludeSmiCodeField: public BitField<bool, 1, 1> {}; 422 class NegativeZeroField: public BitField<NegativeZeroHandling, 2, 1> {}; 423 class OpField: public BitField<Token::Value, 3, kMinorBits - 3> {}; 424 425 Major MajorKey() { return GenericUnaryOp; } 426 int MinorKey() { 427 return OpField::encode(op_) | 428 OverwriteField::encode(overwrite_) | 429 IncludeSmiCodeField::encode(include_smi_code_) | 430 NegativeZeroField::encode(negative_zero_); 431 } 432 433 void Generate(MacroAssembler* masm); 434 435 const char* GetName(); 436 }; 437 438 439 class MathPowStub: public CodeStub { 440 public: 441 MathPowStub() {} 442 virtual void Generate(MacroAssembler* masm); 443 444 private: 445 virtual CodeStub::Major MajorKey() { return MathPow; } 446 virtual int MinorKey() { return 0; } 447 448 const char* GetName() { return "MathPowStub"; } 449 }; 450 451 452 class ICCompareStub: public CodeStub { 453 public: 454 ICCompareStub(Token::Value op, CompareIC::State state) 455 : op_(op), state_(state) { 456 ASSERT(Token::IsCompareOp(op)); 457 } 458 459 virtual void Generate(MacroAssembler* masm); 460 461 private: 462 class OpField: public BitField<int, 0, 3> { }; 463 class StateField: public BitField<int, 3, 5> { }; 464 465 virtual void FinishCode(Code* code) { code->set_compare_state(state_); } 466 467 virtual CodeStub::Major MajorKey() { return CompareIC; } 468 virtual int MinorKey(); 469 470 virtual int GetCodeKind() { return Code::COMPARE_IC; } 471 472 void GenerateSmis(MacroAssembler* masm); 473 void GenerateHeapNumbers(MacroAssembler* masm); 474 void GenerateObjects(MacroAssembler* masm); 475 void GenerateMiss(MacroAssembler* masm); 476 477 bool strict() const { return op_ == Token::EQ_STRICT; } 478 Condition GetCondition() const { return CompareIC::ComputeCondition(op_); } 479 480 Token::Value op_; 481 CompareIC::State state_; 482 }; 483 484 485 // Flags that control the compare stub code generation. 486 enum CompareFlags { 487 NO_COMPARE_FLAGS = 0, 488 NO_SMI_COMPARE_IN_STUB = 1 << 0, 489 NO_NUMBER_COMPARE_IN_STUB = 1 << 1, 490 CANT_BOTH_BE_NAN = 1 << 2 491 }; 492 493 494 enum NaNInformation { 495 kBothCouldBeNaN, 496 kCantBothBeNaN 497 }; 498 499 500 class CompareStub: public CodeStub { 501 public: 502 CompareStub(Condition cc, 503 bool strict, 504 CompareFlags flags, 505 Register lhs, 506 Register rhs) : 507 cc_(cc), 508 strict_(strict), 509 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0), 510 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0), 511 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0), 512 lhs_(lhs), 513 rhs_(rhs), 514 name_(NULL) { } 515 516 CompareStub(Condition cc, 517 bool strict, 518 CompareFlags flags) : 519 cc_(cc), 520 strict_(strict), 521 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0), 522 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0), 523 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0), 524 lhs_(no_reg), 525 rhs_(no_reg), 526 name_(NULL) { } 527 528 void Generate(MacroAssembler* masm); 529 530 private: 531 Condition cc_; 532 bool strict_; 533 // Only used for 'equal' comparisons. Tells the stub that we already know 534 // that at least one side of the comparison is not NaN. This allows the 535 // stub to use object identity in the positive case. We ignore it when 536 // generating the minor key for other comparisons to avoid creating more 537 // stubs. 538 bool never_nan_nan_; 539 // Do generate the number comparison code in the stub. Stubs without number 540 // comparison code is used when the number comparison has been inlined, and 541 // the stub will be called if one of the operands is not a number. 542 bool include_number_compare_; 543 544 // Generate the comparison code for two smi operands in the stub. 545 bool include_smi_compare_; 546 547 // Register holding the left hand side of the comparison if the stub gives 548 // a choice, no_reg otherwise. 549 550 Register lhs_; 551 // Register holding the right hand side of the comparison if the stub gives 552 // a choice, no_reg otherwise. 553 Register rhs_; 554 555 // Encoding of the minor key in 16 bits. 556 class StrictField: public BitField<bool, 0, 1> {}; 557 class NeverNanNanField: public BitField<bool, 1, 1> {}; 558 class IncludeNumberCompareField: public BitField<bool, 2, 1> {}; 559 class IncludeSmiCompareField: public BitField<bool, 3, 1> {}; 560 class RegisterField: public BitField<bool, 4, 1> {}; 561 class ConditionField: public BitField<int, 5, 11> {}; 562 563 Major MajorKey() { return Compare; } 564 565 int MinorKey(); 566 567 virtual int GetCodeKind() { return Code::COMPARE_IC; } 568 virtual void FinishCode(Code* code) { 569 code->set_compare_state(CompareIC::GENERIC); 570 } 571 572 // Branch to the label if the given object isn't a symbol. 573 void BranchIfNonSymbol(MacroAssembler* masm, 574 Label* label, 575 Register object, 576 Register scratch); 577 578 // Unfortunately you have to run without snapshots to see most of these 579 // names in the profile since most compare stubs end up in the snapshot. 580 char* name_; 581 const char* GetName(); 582 #ifdef DEBUG 583 void Print() { 584 PrintF("CompareStub (minor %d) (cc %d), (strict %s), " 585 "(never_nan_nan %s), (smi_compare %s) (number_compare %s) ", 586 MinorKey(), 587 static_cast<int>(cc_), 588 strict_ ? "true" : "false", 589 never_nan_nan_ ? "true" : "false", 590 include_smi_compare_ ? "inluded" : "not included", 591 include_number_compare_ ? "included" : "not included"); 592 593 if (!lhs_.is(no_reg) && !rhs_.is(no_reg)) { 594 PrintF("(lhs r%d), (rhs r%d)\n", lhs_.code(), rhs_.code()); 595 } else { 596 PrintF("\n"); 597 } 598 } 599 #endif 600 }; 601 602 603 class CEntryStub : public CodeStub { 604 public: 605 explicit CEntryStub(int result_size) 606 : result_size_(result_size), save_doubles_(false) { } 607 608 void Generate(MacroAssembler* masm); 609 void SaveDoubles() { save_doubles_ = true; } 610 611 private: 612 void GenerateCore(MacroAssembler* masm, 613 Label* throw_normal_exception, 614 Label* throw_termination_exception, 615 Label* throw_out_of_memory_exception, 616 bool do_gc, 617 bool always_allocate_scope); 618 void GenerateThrowTOS(MacroAssembler* masm); 619 void GenerateThrowUncatchable(MacroAssembler* masm, 620 UncatchableExceptionType type); 621 622 // Number of pointers/values returned. 623 const int result_size_; 624 bool save_doubles_; 625 626 Major MajorKey() { return CEntry; } 627 int MinorKey(); 628 629 bool NeedsImmovableCode(); 630 631 const char* GetName() { return "CEntryStub"; } 632 }; 633 634 635 class JSEntryStub : public CodeStub { 636 public: 637 JSEntryStub() { } 638 639 void Generate(MacroAssembler* masm) { GenerateBody(masm, false); } 640 641 protected: 642 void GenerateBody(MacroAssembler* masm, bool is_construct); 643 644 private: 645 Major MajorKey() { return JSEntry; } 646 int MinorKey() { return 0; } 647 648 const char* GetName() { return "JSEntryStub"; } 649 }; 650 651 652 class JSConstructEntryStub : public JSEntryStub { 653 public: 654 JSConstructEntryStub() { } 655 656 void Generate(MacroAssembler* masm) { GenerateBody(masm, true); } 657 658 private: 659 int MinorKey() { return 1; } 660 661 const char* GetName() { return "JSConstructEntryStub"; } 662 }; 663 664 665 class ArgumentsAccessStub: public CodeStub { 666 public: 667 enum Type { 668 READ_ELEMENT, 669 NEW_NON_STRICT, 670 NEW_STRICT 671 }; 672 673 explicit ArgumentsAccessStub(Type type) : type_(type) { } 674 675 private: 676 Type type_; 677 678 Major MajorKey() { return ArgumentsAccess; } 679 int MinorKey() { return type_; } 680 681 void Generate(MacroAssembler* masm); 682 void GenerateReadElement(MacroAssembler* masm); 683 void GenerateNewObject(MacroAssembler* masm); 684 685 int GetArgumentsBoilerplateIndex() const { 686 return (type_ == NEW_STRICT) 687 ? Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX 688 : Context::ARGUMENTS_BOILERPLATE_INDEX; 689 } 690 691 int GetArgumentsObjectSize() const { 692 if (type_ == NEW_STRICT) 693 return Heap::kArgumentsObjectSizeStrict; 694 else 695 return Heap::kArgumentsObjectSize; 696 } 697 698 const char* GetName() { return "ArgumentsAccessStub"; } 699 700 #ifdef DEBUG 701 void Print() { 702 PrintF("ArgumentsAccessStub (type %d)\n", type_); 703 } 704 #endif 705 }; 706 707 708 class RegExpExecStub: public CodeStub { 709 public: 710 RegExpExecStub() { } 711 712 private: 713 Major MajorKey() { return RegExpExec; } 714 int MinorKey() { return 0; } 715 716 void Generate(MacroAssembler* masm); 717 718 const char* GetName() { return "RegExpExecStub"; } 719 720 #ifdef DEBUG 721 void Print() { 722 PrintF("RegExpExecStub\n"); 723 } 724 #endif 725 }; 726 727 728 class RegExpConstructResultStub: public CodeStub { 729 public: 730 RegExpConstructResultStub() { } 731 732 private: 733 Major MajorKey() { return RegExpConstructResult; } 734 int MinorKey() { return 0; } 735 736 void Generate(MacroAssembler* masm); 737 738 const char* GetName() { return "RegExpConstructResultStub"; } 739 740 #ifdef DEBUG 741 void Print() { 742 PrintF("RegExpConstructResultStub\n"); 743 } 744 #endif 745 }; 746 747 748 class CallFunctionStub: public CodeStub { 749 public: 750 CallFunctionStub(int argc, InLoopFlag in_loop, CallFunctionFlags flags) 751 : argc_(argc), in_loop_(in_loop), flags_(flags) { } 752 753 void Generate(MacroAssembler* masm); 754 755 static int ExtractArgcFromMinorKey(int minor_key) { 756 return ArgcBits::decode(minor_key); 757 } 758 759 private: 760 int argc_; 761 InLoopFlag in_loop_; 762 CallFunctionFlags flags_; 763 764 #ifdef DEBUG 765 void Print() { 766 PrintF("CallFunctionStub (args %d, in_loop %d, flags %d)\n", 767 argc_, 768 static_cast<int>(in_loop_), 769 static_cast<int>(flags_)); 770 } 771 #endif 772 773 // Minor key encoding in 32 bits with Bitfield <Type, shift, size>. 774 class InLoopBits: public BitField<InLoopFlag, 0, 1> {}; 775 class FlagBits: public BitField<CallFunctionFlags, 1, 1> {}; 776 class ArgcBits: public BitField<int, 2, 32 - 2> {}; 777 778 Major MajorKey() { return CallFunction; } 779 int MinorKey() { 780 // Encode the parameters in a unique 32 bit value. 781 return InLoopBits::encode(in_loop_) 782 | FlagBits::encode(flags_) 783 | ArgcBits::encode(argc_); 784 } 785 786 InLoopFlag InLoop() { return in_loop_; } 787 bool ReceiverMightBeValue() { 788 return (flags_ & RECEIVER_MIGHT_BE_VALUE) != 0; 789 } 790 }; 791 792 793 enum StringIndexFlags { 794 // Accepts smis or heap numbers. 795 STRING_INDEX_IS_NUMBER, 796 797 // Accepts smis or heap numbers that are valid array indices 798 // (ECMA-262 15.4). Invalid indices are reported as being out of 799 // range. 800 STRING_INDEX_IS_ARRAY_INDEX 801 }; 802 803 804 // Generates code implementing String.prototype.charCodeAt. 805 // 806 // Only supports the case when the receiver is a string and the index 807 // is a number (smi or heap number) that is a valid index into the 808 // string. Additional index constraints are specified by the 809 // flags. Otherwise, bails out to the provided labels. 810 // 811 // Register usage: |object| may be changed to another string in a way 812 // that doesn't affect charCodeAt/charAt semantics, |index| is 813 // preserved, |scratch| and |result| are clobbered. 814 class StringCharCodeAtGenerator { 815 public: 816 StringCharCodeAtGenerator(Register object, 817 Register index, 818 Register scratch, 819 Register result, 820 Label* receiver_not_string, 821 Label* index_not_number, 822 Label* index_out_of_range, 823 StringIndexFlags index_flags) 824 : object_(object), 825 index_(index), 826 scratch_(scratch), 827 result_(result), 828 receiver_not_string_(receiver_not_string), 829 index_not_number_(index_not_number), 830 index_out_of_range_(index_out_of_range), 831 index_flags_(index_flags) { 832 ASSERT(!scratch_.is(object_)); 833 ASSERT(!scratch_.is(index_)); 834 ASSERT(!scratch_.is(result_)); 835 ASSERT(!result_.is(object_)); 836 ASSERT(!result_.is(index_)); 837 } 838 839 // Generates the fast case code. On the fallthrough path |result| 840 // register contains the result. 841 void GenerateFast(MacroAssembler* masm); 842 843 // Generates the slow case code. Must not be naturally 844 // reachable. Expected to be put after a ret instruction (e.g., in 845 // deferred code). Always jumps back to the fast case. 846 void GenerateSlow(MacroAssembler* masm, 847 const RuntimeCallHelper& call_helper); 848 849 private: 850 Register object_; 851 Register index_; 852 Register scratch_; 853 Register result_; 854 855 Label* receiver_not_string_; 856 Label* index_not_number_; 857 Label* index_out_of_range_; 858 859 StringIndexFlags index_flags_; 860 861 Label call_runtime_; 862 Label index_not_smi_; 863 Label got_smi_index_; 864 Label exit_; 865 866 DISALLOW_COPY_AND_ASSIGN(StringCharCodeAtGenerator); 867 }; 868 869 870 // Generates code for creating a one-char string from a char code. 871 class StringCharFromCodeGenerator { 872 public: 873 StringCharFromCodeGenerator(Register code, 874 Register result) 875 : code_(code), 876 result_(result) { 877 ASSERT(!code_.is(result_)); 878 } 879 880 // Generates the fast case code. On the fallthrough path |result| 881 // register contains the result. 882 void GenerateFast(MacroAssembler* masm); 883 884 // Generates the slow case code. Must not be naturally 885 // reachable. Expected to be put after a ret instruction (e.g., in 886 // deferred code). Always jumps back to the fast case. 887 void GenerateSlow(MacroAssembler* masm, 888 const RuntimeCallHelper& call_helper); 889 890 private: 891 Register code_; 892 Register result_; 893 894 Label slow_case_; 895 Label exit_; 896 897 DISALLOW_COPY_AND_ASSIGN(StringCharFromCodeGenerator); 898 }; 899 900 901 // Generates code implementing String.prototype.charAt. 902 // 903 // Only supports the case when the receiver is a string and the index 904 // is a number (smi or heap number) that is a valid index into the 905 // string. Additional index constraints are specified by the 906 // flags. Otherwise, bails out to the provided labels. 907 // 908 // Register usage: |object| may be changed to another string in a way 909 // that doesn't affect charCodeAt/charAt semantics, |index| is 910 // preserved, |scratch1|, |scratch2|, and |result| are clobbered. 911 class StringCharAtGenerator { 912 public: 913 StringCharAtGenerator(Register object, 914 Register index, 915 Register scratch1, 916 Register scratch2, 917 Register result, 918 Label* receiver_not_string, 919 Label* index_not_number, 920 Label* index_out_of_range, 921 StringIndexFlags index_flags) 922 : char_code_at_generator_(object, 923 index, 924 scratch1, 925 scratch2, 926 receiver_not_string, 927 index_not_number, 928 index_out_of_range, 929 index_flags), 930 char_from_code_generator_(scratch2, result) {} 931 932 // Generates the fast case code. On the fallthrough path |result| 933 // register contains the result. 934 void GenerateFast(MacroAssembler* masm); 935 936 // Generates the slow case code. Must not be naturally 937 // reachable. Expected to be put after a ret instruction (e.g., in 938 // deferred code). Always jumps back to the fast case. 939 void GenerateSlow(MacroAssembler* masm, 940 const RuntimeCallHelper& call_helper); 941 942 private: 943 StringCharCodeAtGenerator char_code_at_generator_; 944 StringCharFromCodeGenerator char_from_code_generator_; 945 946 DISALLOW_COPY_AND_ASSIGN(StringCharAtGenerator); 947 }; 948 949 950 class AllowStubCallsScope { 951 public: 952 AllowStubCallsScope(MacroAssembler* masm, bool allow) 953 : masm_(masm), previous_allow_(masm->allow_stub_calls()) { 954 masm_->set_allow_stub_calls(allow); 955 } 956 ~AllowStubCallsScope() { 957 masm_->set_allow_stub_calls(previous_allow_); 958 } 959 960 private: 961 MacroAssembler* masm_; 962 bool previous_allow_; 963 964 DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope); 965 }; 966 967 } } // namespace v8::internal 968 969 #endif // V8_CODE_STUBS_H_ 970