1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_FULL_CODEGEN_H_ 29 #define V8_FULL_CODEGEN_H_ 30 31 #include "v8.h" 32 33 #include "allocation.h" 34 #include "assert-scope.h" 35 #include "ast.h" 36 #include "code-stubs.h" 37 #include "codegen.h" 38 #include "compiler.h" 39 #include "data-flow.h" 40 #include "globals.h" 41 #include "objects.h" 42 43 namespace v8 { 44 namespace internal { 45 46 // Forward declarations. 47 class JumpPatchSite; 48 49 // AST node visitor which can tell whether a given statement will be breakable 50 // when the code is compiled by the full compiler in the debugger. This means 51 // that there will be an IC (load/store/call) in the code generated for the 52 // debugger to piggybag on. 53 class BreakableStatementChecker: public AstVisitor { 54 public: 55 BreakableStatementChecker() : is_breakable_(false) { 56 InitializeAstVisitor(); 57 } 58 59 void Check(Statement* stmt); 60 void Check(Expression* stmt); 61 62 bool is_breakable() { return is_breakable_; } 63 64 private: 65 // AST node visit functions. 66 #define DECLARE_VISIT(type) virtual void Visit##type(type* node); 67 AST_NODE_LIST(DECLARE_VISIT) 68 #undef DECLARE_VISIT 69 70 bool is_breakable_; 71 72 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); 73 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker); 74 }; 75 76 77 // ----------------------------------------------------------------------------- 78 // Full code generator. 79 80 class FullCodeGenerator: public AstVisitor { 81 public: 82 enum State { 83 NO_REGISTERS, 84 TOS_REG 85 }; 86 87 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info) 88 : masm_(masm), 89 info_(info), 90 scope_(info->scope()), 91 nesting_stack_(NULL), 92 loop_depth_(0), 93 globals_(NULL), 94 context_(NULL), 95 bailout_entries_(info->HasDeoptimizationSupport() 96 ? info->function()->ast_node_count() : 0, 97 info->zone()), 98 back_edges_(2, info->zone()), 99 type_feedback_cells_(info->HasDeoptimizationSupport() 100 ? info->function()->ast_node_count() : 0, 101 info->zone()), 102 ic_total_count_(0), 103 zone_(info->zone()) { 104 Initialize(); 105 } 106 107 void Initialize(); 108 109 static bool MakeCode(CompilationInfo* info); 110 111 // Encode state and pc-offset as a BitField<type, start, size>. 112 // Only use 30 bits because we encode the result as a smi. 113 class StateField : public BitField<State, 0, 1> { }; 114 class PcField : public BitField<unsigned, 1, 30-1> { }; 115 116 static const char* State2String(State state) { 117 switch (state) { 118 case NO_REGISTERS: return "NO_REGISTERS"; 119 case TOS_REG: return "TOS_REG"; 120 } 121 UNREACHABLE(); 122 return NULL; 123 } 124 125 Zone* zone() const { return zone_; } 126 127 static const int kMaxBackEdgeWeight = 127; 128 129 // Platform-specific code size multiplier. 130 #if V8_TARGET_ARCH_IA32 131 static const int kCodeSizeMultiplier = 100; 132 #elif V8_TARGET_ARCH_X64 133 static const int kCodeSizeMultiplier = 162; 134 #elif V8_TARGET_ARCH_ARM 135 static const int kCodeSizeMultiplier = 142; 136 #elif V8_TARGET_ARCH_MIPS 137 static const int kCodeSizeMultiplier = 142; 138 #else 139 #error Unsupported target architecture. 140 #endif 141 142 class BackEdgeTableIterator { 143 public: 144 explicit BackEdgeTableIterator(Code* unoptimized) { 145 ASSERT(unoptimized->kind() == Code::FUNCTION); 146 instruction_start_ = unoptimized->instruction_start(); 147 cursor_ = instruction_start_ + unoptimized->back_edge_table_offset(); 148 ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size()); 149 table_length_ = Memory::uint32_at(cursor_); 150 cursor_ += kTableLengthSize; 151 end_ = cursor_ + table_length_ * kEntrySize; 152 } 153 154 bool Done() { return cursor_ >= end_; } 155 156 void Next() { 157 ASSERT(!Done()); 158 cursor_ += kEntrySize; 159 } 160 161 BailoutId ast_id() { 162 ASSERT(!Done()); 163 return BailoutId(static_cast<int>( 164 Memory::uint32_at(cursor_ + kAstIdOffset))); 165 } 166 167 uint32_t loop_depth() { 168 ASSERT(!Done()); 169 return Memory::uint32_at(cursor_ + kLoopDepthOffset); 170 } 171 172 uint32_t pc_offset() { 173 ASSERT(!Done()); 174 return Memory::uint32_at(cursor_ + kPcOffsetOffset); 175 } 176 177 Address pc() { 178 ASSERT(!Done()); 179 return instruction_start_ + pc_offset(); 180 } 181 182 uint32_t table_length() { return table_length_; } 183 184 private: 185 static const int kTableLengthSize = kIntSize; 186 static const int kAstIdOffset = 0 * kIntSize; 187 static const int kPcOffsetOffset = 1 * kIntSize; 188 static const int kLoopDepthOffset = 2 * kIntSize; 189 static const int kEntrySize = 3 * kIntSize; 190 191 Address cursor_; 192 Address end_; 193 Address instruction_start_; 194 uint32_t table_length_; 195 DisallowHeapAllocation no_gc_while_iterating_over_raw_addresses_; 196 197 DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator); 198 }; 199 200 201 private: 202 class Breakable; 203 class Iteration; 204 205 class TestContext; 206 207 class NestedStatement BASE_EMBEDDED { 208 public: 209 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) { 210 // Link into codegen's nesting stack. 211 previous_ = codegen->nesting_stack_; 212 codegen->nesting_stack_ = this; 213 } 214 virtual ~NestedStatement() { 215 // Unlink from codegen's nesting stack. 216 ASSERT_EQ(this, codegen_->nesting_stack_); 217 codegen_->nesting_stack_ = previous_; 218 } 219 220 virtual Breakable* AsBreakable() { return NULL; } 221 virtual Iteration* AsIteration() { return NULL; } 222 223 virtual bool IsContinueTarget(Statement* target) { return false; } 224 virtual bool IsBreakTarget(Statement* target) { return false; } 225 226 // Notify the statement that we are exiting it via break, continue, or 227 // return and give it a chance to generate cleanup code. Return the 228 // next outer statement in the nesting stack. We accumulate in 229 // *stack_depth the amount to drop the stack and in *context_length the 230 // number of context chain links to unwind as we traverse the nesting 231 // stack from an exit to its target. 232 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { 233 return previous_; 234 } 235 236 protected: 237 MacroAssembler* masm() { return codegen_->masm(); } 238 239 FullCodeGenerator* codegen_; 240 NestedStatement* previous_; 241 242 private: 243 DISALLOW_COPY_AND_ASSIGN(NestedStatement); 244 }; 245 246 // A breakable statement such as a block. 247 class Breakable : public NestedStatement { 248 public: 249 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement) 250 : NestedStatement(codegen), statement_(statement) { 251 } 252 virtual ~Breakable() {} 253 254 virtual Breakable* AsBreakable() { return this; } 255 virtual bool IsBreakTarget(Statement* target) { 256 return statement() == target; 257 } 258 259 BreakableStatement* statement() { return statement_; } 260 Label* break_label() { return &break_label_; } 261 262 private: 263 BreakableStatement* statement_; 264 Label break_label_; 265 }; 266 267 // An iteration statement such as a while, for, or do loop. 268 class Iteration : public Breakable { 269 public: 270 Iteration(FullCodeGenerator* codegen, IterationStatement* statement) 271 : Breakable(codegen, statement) { 272 } 273 virtual ~Iteration() {} 274 275 virtual Iteration* AsIteration() { return this; } 276 virtual bool IsContinueTarget(Statement* target) { 277 return statement() == target; 278 } 279 280 Label* continue_label() { return &continue_label_; } 281 282 private: 283 Label continue_label_; 284 }; 285 286 // A nested block statement. 287 class NestedBlock : public Breakable { 288 public: 289 NestedBlock(FullCodeGenerator* codegen, Block* block) 290 : Breakable(codegen, block) { 291 } 292 virtual ~NestedBlock() {} 293 294 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { 295 if (statement()->AsBlock()->scope() != NULL) { 296 ++(*context_length); 297 } 298 return previous_; 299 }; 300 }; 301 302 // The try block of a try/catch statement. 303 class TryCatch : public NestedStatement { 304 public: 305 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) { 306 } 307 virtual ~TryCatch() {} 308 309 virtual NestedStatement* Exit(int* stack_depth, int* context_length); 310 }; 311 312 // The try block of a try/finally statement. 313 class TryFinally : public NestedStatement { 314 public: 315 TryFinally(FullCodeGenerator* codegen, Label* finally_entry) 316 : NestedStatement(codegen), finally_entry_(finally_entry) { 317 } 318 virtual ~TryFinally() {} 319 320 virtual NestedStatement* Exit(int* stack_depth, int* context_length); 321 322 private: 323 Label* finally_entry_; 324 }; 325 326 // The finally block of a try/finally statement. 327 class Finally : public NestedStatement { 328 public: 329 static const int kElementCount = 5; 330 331 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { } 332 virtual ~Finally() {} 333 334 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { 335 *stack_depth += kElementCount; 336 return previous_; 337 } 338 }; 339 340 // The body of a for/in loop. 341 class ForIn : public Iteration { 342 public: 343 static const int kElementCount = 5; 344 345 ForIn(FullCodeGenerator* codegen, ForInStatement* statement) 346 : Iteration(codegen, statement) { 347 } 348 virtual ~ForIn() {} 349 350 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { 351 *stack_depth += kElementCount; 352 return previous_; 353 } 354 }; 355 356 357 // The body of a with or catch. 358 class WithOrCatch : public NestedStatement { 359 public: 360 explicit WithOrCatch(FullCodeGenerator* codegen) 361 : NestedStatement(codegen) { 362 } 363 virtual ~WithOrCatch() {} 364 365 virtual NestedStatement* Exit(int* stack_depth, int* context_length) { 366 ++(*context_length); 367 return previous_; 368 } 369 }; 370 371 // Type of a member function that generates inline code for a native function. 372 typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr); 373 374 static const InlineFunctionGenerator kInlineFunctionGenerators[]; 375 376 // A platform-specific utility to overwrite the accumulator register 377 // with a GC-safe value. 378 void ClearAccumulator(); 379 380 // Determine whether or not to inline the smi case for the given 381 // operation. 382 bool ShouldInlineSmiCase(Token::Value op); 383 384 // Helper function to convert a pure value into a test context. The value 385 // is expected on the stack or the accumulator, depending on the platform. 386 // See the platform-specific implementation for details. 387 void DoTest(Expression* condition, 388 Label* if_true, 389 Label* if_false, 390 Label* fall_through); 391 void DoTest(const TestContext* context); 392 393 // Helper function to split control flow and avoid a branch to the 394 // fall-through label if it is set up. 395 #if V8_TARGET_ARCH_MIPS 396 void Split(Condition cc, 397 Register lhs, 398 const Operand& rhs, 399 Label* if_true, 400 Label* if_false, 401 Label* fall_through); 402 #else // All non-mips arch. 403 void Split(Condition cc, 404 Label* if_true, 405 Label* if_false, 406 Label* fall_through); 407 #endif // V8_TARGET_ARCH_MIPS 408 409 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into 410 // a register. Emits a context chain walk if if necessary (so does 411 // SetVar) so avoid calling both on the same variable. 412 void GetVar(Register destination, Variable* var); 413 414 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in 415 // the context, the write barrier will be emitted and source, scratch0, 416 // scratch1 will be clobbered. Emits a context chain walk if if necessary 417 // (so does GetVar) so avoid calling both on the same variable. 418 void SetVar(Variable* var, 419 Register source, 420 Register scratch0, 421 Register scratch1); 422 423 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL) 424 // variable. Writing does not need the write barrier. 425 MemOperand StackOperand(Variable* var); 426 427 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT) 428 // variable. May emit code to traverse the context chain, loading the 429 // found context into the scratch register. Writing to this operand will 430 // need the write barrier if location is CONTEXT. 431 MemOperand VarOperand(Variable* var, Register scratch); 432 433 void VisitForEffect(Expression* expr) { 434 EffectContext context(this); 435 Visit(expr); 436 PrepareForBailout(expr, NO_REGISTERS); 437 } 438 439 void VisitForAccumulatorValue(Expression* expr) { 440 AccumulatorValueContext context(this); 441 Visit(expr); 442 PrepareForBailout(expr, TOS_REG); 443 } 444 445 void VisitForStackValue(Expression* expr) { 446 StackValueContext context(this); 447 Visit(expr); 448 PrepareForBailout(expr, NO_REGISTERS); 449 } 450 451 void VisitForControl(Expression* expr, 452 Label* if_true, 453 Label* if_false, 454 Label* fall_through) { 455 TestContext context(this, expr, if_true, if_false, fall_through); 456 Visit(expr); 457 // For test contexts, we prepare for bailout before branching, not at 458 // the end of the entire expression. This happens as part of visiting 459 // the expression. 460 } 461 462 void VisitInDuplicateContext(Expression* expr); 463 464 void VisitDeclarations(ZoneList<Declaration*>* declarations); 465 void DeclareModules(Handle<FixedArray> descriptions); 466 void DeclareGlobals(Handle<FixedArray> pairs); 467 int DeclareGlobalsFlags(); 468 469 // Generate code to allocate all (including nested) modules and contexts. 470 // Because of recursive linking and the presence of module alias declarations, 471 // this has to be a separate pass _before_ populating or executing any module. 472 void AllocateModules(ZoneList<Declaration*>* declarations); 473 474 // Generate code to create an iterator result object. The "value" property is 475 // set to a value popped from the stack, and "done" is set according to the 476 // argument. The result object is left in the result register. 477 void EmitCreateIteratorResult(bool done); 478 479 // Try to perform a comparison as a fast inlined literal compare if 480 // the operands allow it. Returns true if the compare operations 481 // has been matched and all code generated; false otherwise. 482 bool TryLiteralCompare(CompareOperation* compare); 483 484 // Platform-specific code for comparing the type of a value with 485 // a given literal string. 486 void EmitLiteralCompareTypeof(Expression* expr, 487 Expression* sub_expr, 488 Handle<String> check); 489 490 // Platform-specific code for equality comparison with a nil-like value. 491 void EmitLiteralCompareNil(CompareOperation* expr, 492 Expression* sub_expr, 493 NilValue nil); 494 495 // Bailout support. 496 void PrepareForBailout(Expression* node, State state); 497 void PrepareForBailoutForId(BailoutId id, State state); 498 499 // Cache cell support. This associates AST ids with global property cells 500 // that will be cleared during GC and collected by the type-feedback oracle. 501 void RecordTypeFeedbackCell(TypeFeedbackId id, Handle<Cell> cell); 502 503 // Record a call's return site offset, used to rebuild the frame if the 504 // called function was inlined at the site. 505 void RecordJSReturnSite(Call* call); 506 507 // Prepare for bailout before a test (or compare) and branch. If 508 // should_normalize, then the following comparison will not handle the 509 // canonical JS true value so we will insert a (dead) test against true at 510 // the actual bailout target from the optimized code. If not 511 // should_normalize, the true and false labels are ignored. 512 void PrepareForBailoutBeforeSplit(Expression* expr, 513 bool should_normalize, 514 Label* if_true, 515 Label* if_false); 516 517 // If enabled, emit debug code for checking that the current context is 518 // neither a with nor a catch context. 519 void EmitDebugCheckDeclarationContext(Variable* variable); 520 521 // This is meant to be called at loop back edges, |back_edge_target| is 522 // the jump target of the back edge and is used to approximate the amount 523 // of code inside the loop. 524 void EmitBackEdgeBookkeeping(IterationStatement* stmt, 525 Label* back_edge_target); 526 // Record the OSR AST id corresponding to a back edge in the code. 527 void RecordBackEdge(BailoutId osr_ast_id); 528 // Emit a table of back edge ids, pcs and loop depths into the code stream. 529 // Return the offset of the start of the table. 530 unsigned EmitBackEdgeTable(); 531 532 void EmitProfilingCounterDecrement(int delta); 533 void EmitProfilingCounterReset(); 534 535 // Emit code to pop values from the stack associated with nested statements 536 // like try/catch, try/finally, etc, running the finallies and unwinding the 537 // handlers as needed. 538 void EmitUnwindBeforeReturn(); 539 540 // Platform-specific return sequence 541 void EmitReturnSequence(); 542 543 // Platform-specific code sequences for calls 544 void EmitCallWithStub(Call* expr, CallFunctionFlags flags); 545 void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode); 546 void EmitKeyedCallWithIC(Call* expr, Expression* key); 547 548 // Platform-specific code for inline runtime calls. 549 InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id); 550 551 void EmitInlineRuntimeCall(CallRuntime* expr); 552 553 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \ 554 void Emit##name(CallRuntime* expr); 555 INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL) 556 INLINE_RUNTIME_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL) 557 #undef EMIT_INLINE_RUNTIME_CALL 558 559 void EmitSeqStringSetCharCheck(Register string, 560 Register index, 561 Register value, 562 uint32_t encoding_mask); 563 564 // Platform-specific code for resuming generators. 565 void EmitGeneratorResume(Expression *generator, 566 Expression *value, 567 JSGeneratorObject::ResumeMode resume_mode); 568 569 // Platform-specific code for loading variables. 570 void EmitLoadGlobalCheckExtensions(Variable* var, 571 TypeofState typeof_state, 572 Label* slow); 573 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow); 574 void EmitDynamicLookupFastCase(Variable* var, 575 TypeofState typeof_state, 576 Label* slow, 577 Label* done); 578 void EmitVariableLoad(VariableProxy* proxy); 579 580 void EmitAccessor(Expression* expression); 581 582 // Expects the arguments and the function already pushed. 583 void EmitResolvePossiblyDirectEval(int arg_count); 584 585 // Platform-specific support for allocating a new closure based on 586 // the given function info. 587 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure); 588 589 // Platform-specific support for compiling assignments. 590 591 // Load a value from a named property. 592 // The receiver is left on the stack by the IC. 593 void EmitNamedPropertyLoad(Property* expr); 594 595 // Load a value from a keyed property. 596 // The receiver and the key is left on the stack by the IC. 597 void EmitKeyedPropertyLoad(Property* expr); 598 599 // Apply the compound assignment operator. Expects the left operand on top 600 // of the stack and the right one in the accumulator. 601 void EmitBinaryOp(BinaryOperation* expr, 602 Token::Value op, 603 OverwriteMode mode); 604 605 // Helper functions for generating inlined smi code for certain 606 // binary operations. 607 void EmitInlineSmiBinaryOp(BinaryOperation* expr, 608 Token::Value op, 609 OverwriteMode mode, 610 Expression* left, 611 Expression* right); 612 613 // Assign to the given expression as if via '='. The right-hand-side value 614 // is expected in the accumulator. 615 void EmitAssignment(Expression* expr); 616 617 // Complete a variable assignment. The right-hand-side value is expected 618 // in the accumulator. 619 void EmitVariableAssignment(Variable* var, 620 Token::Value op); 621 622 // Complete a named property assignment. The receiver is expected on top 623 // of the stack and the right-hand-side value in the accumulator. 624 void EmitNamedPropertyAssignment(Assignment* expr); 625 626 // Complete a keyed property assignment. The receiver and key are 627 // expected on top of the stack and the right-hand-side value in the 628 // accumulator. 629 void EmitKeyedPropertyAssignment(Assignment* expr); 630 631 void CallIC(Handle<Code> code, 632 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET, 633 TypeFeedbackId id = TypeFeedbackId::None()); 634 635 void SetFunctionPosition(FunctionLiteral* fun); 636 void SetReturnPosition(FunctionLiteral* fun); 637 void SetStatementPosition(Statement* stmt); 638 void SetExpressionPosition(Expression* expr, int pos); 639 void SetStatementPosition(int pos); 640 void SetSourcePosition(int pos); 641 642 // Non-local control flow support. 643 void EnterFinallyBlock(); 644 void ExitFinallyBlock(); 645 646 // Loop nesting counter. 647 int loop_depth() { return loop_depth_; } 648 void increment_loop_depth() { loop_depth_++; } 649 void decrement_loop_depth() { 650 ASSERT(loop_depth_ > 0); 651 loop_depth_--; 652 } 653 654 MacroAssembler* masm() { return masm_; } 655 656 class ExpressionContext; 657 const ExpressionContext* context() { return context_; } 658 void set_new_context(const ExpressionContext* context) { context_ = context; } 659 660 Handle<Script> script() { return info_->script(); } 661 bool is_eval() { return info_->is_eval(); } 662 bool is_native() { return info_->is_native(); } 663 bool is_classic_mode() { return language_mode() == CLASSIC_MODE; } 664 LanguageMode language_mode() { return function()->language_mode(); } 665 FunctionLiteral* function() { return info_->function(); } 666 Scope* scope() { return scope_; } 667 668 static Register result_register(); 669 static Register context_register(); 670 671 // Set fields in the stack frame. Offsets are the frame pointer relative 672 // offsets defined in, e.g., StandardFrameConstants. 673 void StoreToFrameField(int frame_offset, Register value); 674 675 // Load a value from the current context. Indices are defined as an enum 676 // in v8::internal::Context. 677 void LoadContextField(Register dst, int context_index); 678 679 // Push the function argument for the runtime functions PushWithContext 680 // and PushCatchContext. 681 void PushFunctionArgumentForContextAllocation(); 682 683 // AST node visit functions. 684 #define DECLARE_VISIT(type) virtual void Visit##type(type* node); 685 AST_NODE_LIST(DECLARE_VISIT) 686 #undef DECLARE_VISIT 687 688 void VisitComma(BinaryOperation* expr); 689 void VisitLogicalExpression(BinaryOperation* expr); 690 void VisitArithmeticExpression(BinaryOperation* expr); 691 692 void VisitForTypeofValue(Expression* expr); 693 694 void Generate(); 695 void PopulateDeoptimizationData(Handle<Code> code); 696 void PopulateTypeFeedbackInfo(Handle<Code> code); 697 void PopulateTypeFeedbackCells(Handle<Code> code); 698 699 Handle<FixedArray> handler_table() { return handler_table_; } 700 701 struct BailoutEntry { 702 BailoutId id; 703 unsigned pc_and_state; 704 }; 705 706 struct BackEdgeEntry { 707 BailoutId id; 708 unsigned pc; 709 uint32_t loop_depth; 710 }; 711 712 struct TypeFeedbackCellEntry { 713 TypeFeedbackId ast_id; 714 Handle<Cell> cell; 715 }; 716 717 718 class ExpressionContext BASE_EMBEDDED { 719 public: 720 explicit ExpressionContext(FullCodeGenerator* codegen) 721 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) { 722 codegen->set_new_context(this); 723 } 724 725 virtual ~ExpressionContext() { 726 codegen_->set_new_context(old_); 727 } 728 729 Isolate* isolate() const { return codegen_->isolate(); } 730 731 // Convert constant control flow (true or false) to the result expected for 732 // this expression context. 733 virtual void Plug(bool flag) const = 0; 734 735 // Emit code to convert a pure value (in a register, known variable 736 // location, as a literal, or on top of the stack) into the result 737 // expected according to this expression context. 738 virtual void Plug(Register reg) const = 0; 739 virtual void Plug(Variable* var) const = 0; 740 virtual void Plug(Handle<Object> lit) const = 0; 741 virtual void Plug(Heap::RootListIndex index) const = 0; 742 virtual void PlugTOS() const = 0; 743 744 // Emit code to convert pure control flow to a pair of unbound labels into 745 // the result expected according to this expression context. The 746 // implementation will bind both labels unless it's a TestContext, which 747 // won't bind them at this point. 748 virtual void Plug(Label* materialize_true, 749 Label* materialize_false) const = 0; 750 751 // Emit code to discard count elements from the top of stack, then convert 752 // a pure value into the result expected according to this expression 753 // context. 754 virtual void DropAndPlug(int count, Register reg) const = 0; 755 756 // Set up branch labels for a test expression. The three Label** parameters 757 // are output parameters. 758 virtual void PrepareTest(Label* materialize_true, 759 Label* materialize_false, 760 Label** if_true, 761 Label** if_false, 762 Label** fall_through) const = 0; 763 764 // Returns true if we are evaluating only for side effects (i.e. if the 765 // result will be discarded). 766 virtual bool IsEffect() const { return false; } 767 768 // Returns true if we are evaluating for the value (in accu/on stack). 769 virtual bool IsAccumulatorValue() const { return false; } 770 virtual bool IsStackValue() const { return false; } 771 772 // Returns true if we are branching on the value rather than materializing 773 // it. Only used for asserts. 774 virtual bool IsTest() const { return false; } 775 776 protected: 777 FullCodeGenerator* codegen() const { return codegen_; } 778 MacroAssembler* masm() const { return masm_; } 779 MacroAssembler* masm_; 780 781 private: 782 const ExpressionContext* old_; 783 FullCodeGenerator* codegen_; 784 }; 785 786 class AccumulatorValueContext : public ExpressionContext { 787 public: 788 explicit AccumulatorValueContext(FullCodeGenerator* codegen) 789 : ExpressionContext(codegen) { } 790 791 virtual void Plug(bool flag) const; 792 virtual void Plug(Register reg) const; 793 virtual void Plug(Label* materialize_true, Label* materialize_false) const; 794 virtual void Plug(Variable* var) const; 795 virtual void Plug(Handle<Object> lit) const; 796 virtual void Plug(Heap::RootListIndex) const; 797 virtual void PlugTOS() const; 798 virtual void DropAndPlug(int count, Register reg) const; 799 virtual void PrepareTest(Label* materialize_true, 800 Label* materialize_false, 801 Label** if_true, 802 Label** if_false, 803 Label** fall_through) const; 804 virtual bool IsAccumulatorValue() const { return true; } 805 }; 806 807 class StackValueContext : public ExpressionContext { 808 public: 809 explicit StackValueContext(FullCodeGenerator* codegen) 810 : ExpressionContext(codegen) { } 811 812 virtual void Plug(bool flag) const; 813 virtual void Plug(Register reg) const; 814 virtual void Plug(Label* materialize_true, Label* materialize_false) const; 815 virtual void Plug(Variable* var) const; 816 virtual void Plug(Handle<Object> lit) const; 817 virtual void Plug(Heap::RootListIndex) const; 818 virtual void PlugTOS() const; 819 virtual void DropAndPlug(int count, Register reg) const; 820 virtual void PrepareTest(Label* materialize_true, 821 Label* materialize_false, 822 Label** if_true, 823 Label** if_false, 824 Label** fall_through) const; 825 virtual bool IsStackValue() const { return true; } 826 }; 827 828 class TestContext : public ExpressionContext { 829 public: 830 TestContext(FullCodeGenerator* codegen, 831 Expression* condition, 832 Label* true_label, 833 Label* false_label, 834 Label* fall_through) 835 : ExpressionContext(codegen), 836 condition_(condition), 837 true_label_(true_label), 838 false_label_(false_label), 839 fall_through_(fall_through) { } 840 841 static const TestContext* cast(const ExpressionContext* context) { 842 ASSERT(context->IsTest()); 843 return reinterpret_cast<const TestContext*>(context); 844 } 845 846 Expression* condition() const { return condition_; } 847 Label* true_label() const { return true_label_; } 848 Label* false_label() const { return false_label_; } 849 Label* fall_through() const { return fall_through_; } 850 851 virtual void Plug(bool flag) const; 852 virtual void Plug(Register reg) const; 853 virtual void Plug(Label* materialize_true, Label* materialize_false) const; 854 virtual void Plug(Variable* var) const; 855 virtual void Plug(Handle<Object> lit) const; 856 virtual void Plug(Heap::RootListIndex) const; 857 virtual void PlugTOS() const; 858 virtual void DropAndPlug(int count, Register reg) const; 859 virtual void PrepareTest(Label* materialize_true, 860 Label* materialize_false, 861 Label** if_true, 862 Label** if_false, 863 Label** fall_through) const; 864 virtual bool IsTest() const { return true; } 865 866 private: 867 Expression* condition_; 868 Label* true_label_; 869 Label* false_label_; 870 Label* fall_through_; 871 }; 872 873 class EffectContext : public ExpressionContext { 874 public: 875 explicit EffectContext(FullCodeGenerator* codegen) 876 : ExpressionContext(codegen) { } 877 878 virtual void Plug(bool flag) const; 879 virtual void Plug(Register reg) const; 880 virtual void Plug(Label* materialize_true, Label* materialize_false) const; 881 virtual void Plug(Variable* var) const; 882 virtual void Plug(Handle<Object> lit) const; 883 virtual void Plug(Heap::RootListIndex) const; 884 virtual void PlugTOS() const; 885 virtual void DropAndPlug(int count, Register reg) const; 886 virtual void PrepareTest(Label* materialize_true, 887 Label* materialize_false, 888 Label** if_true, 889 Label** if_false, 890 Label** fall_through) const; 891 virtual bool IsEffect() const { return true; } 892 }; 893 894 MacroAssembler* masm_; 895 CompilationInfo* info_; 896 Scope* scope_; 897 Label return_label_; 898 NestedStatement* nesting_stack_; 899 int loop_depth_; 900 ZoneList<Handle<Object> >* globals_; 901 Handle<FixedArray> modules_; 902 int module_index_; 903 const ExpressionContext* context_; 904 ZoneList<BailoutEntry> bailout_entries_; 905 GrowableBitVector prepared_bailout_ids_; 906 ZoneList<BackEdgeEntry> back_edges_; 907 ZoneList<TypeFeedbackCellEntry> type_feedback_cells_; 908 int ic_total_count_; 909 Handle<FixedArray> handler_table_; 910 Handle<Cell> profiling_counter_; 911 bool generate_debug_code_; 912 Zone* zone_; 913 914 friend class NestedStatement; 915 916 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); 917 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator); 918 }; 919 920 921 // A map from property names to getter/setter pairs allocated in the zone. 922 class AccessorTable: public TemplateHashMap<Literal, 923 ObjectLiteral::Accessors, 924 ZoneAllocationPolicy> { 925 public: 926 explicit AccessorTable(Zone* zone) : 927 TemplateHashMap<Literal, ObjectLiteral::Accessors, 928 ZoneAllocationPolicy>(Literal::Match, 929 ZoneAllocationPolicy(zone)), 930 zone_(zone) { } 931 932 Iterator lookup(Literal* literal) { 933 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_)); 934 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors(); 935 return it; 936 } 937 938 private: 939 Zone* zone_; 940 }; 941 942 943 } } // namespace v8::internal 944 945 #endif // V8_FULL_CODEGEN_H_ 946