Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_FULL_CODEGEN_H_
      6 #define V8_FULL_CODEGEN_H_
      7 
      8 #include "src/v8.h"
      9 
     10 #include "src/allocation.h"
     11 #include "src/assert-scope.h"
     12 #include "src/ast.h"
     13 #include "src/code-stubs.h"
     14 #include "src/codegen.h"
     15 #include "src/compiler.h"
     16 #include "src/data-flow.h"
     17 #include "src/globals.h"
     18 #include "src/objects.h"
     19 
     20 namespace v8 {
     21 namespace internal {
     22 
     23 // Forward declarations.
     24 class JumpPatchSite;
     25 
     26 // AST node visitor which can tell whether a given statement will be breakable
     27 // when the code is compiled by the full compiler in the debugger. This means
     28 // that there will be an IC (load/store/call) in the code generated for the
     29 // debugger to piggybag on.
     30 class BreakableStatementChecker: public AstVisitor {
     31  public:
     32   explicit BreakableStatementChecker(Zone* zone) : is_breakable_(false) {
     33     InitializeAstVisitor(zone);
     34   }
     35 
     36   void Check(Statement* stmt);
     37   void Check(Expression* stmt);
     38 
     39   bool is_breakable() { return is_breakable_; }
     40 
     41  private:
     42   // AST node visit functions.
     43 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
     44   AST_NODE_LIST(DECLARE_VISIT)
     45 #undef DECLARE_VISIT
     46 
     47   bool is_breakable_;
     48 
     49   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
     50   DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
     51 };
     52 
     53 
     54 // -----------------------------------------------------------------------------
     55 // Full code generator.
     56 
     57 class FullCodeGenerator: public AstVisitor {
     58  public:
     59   enum State {
     60     NO_REGISTERS,
     61     TOS_REG
     62   };
     63 
     64   FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
     65       : masm_(masm),
     66         info_(info),
     67         scope_(info->scope()),
     68         nesting_stack_(NULL),
     69         loop_depth_(0),
     70         globals_(NULL),
     71         context_(NULL),
     72         bailout_entries_(info->HasDeoptimizationSupport()
     73                          ? info->function()->ast_node_count() : 0,
     74                          info->zone()),
     75         back_edges_(2, info->zone()),
     76         ic_total_count_(0) {
     77     DCHECK(!info->IsStub());
     78     Initialize();
     79   }
     80 
     81   void Initialize();
     82 
     83   static bool MakeCode(CompilationInfo* info);
     84 
     85   // Encode state and pc-offset as a BitField<type, start, size>.
     86   // Only use 30 bits because we encode the result as a smi.
     87   class StateField : public BitField<State, 0, 1> { };
     88   class PcField    : public BitField<unsigned, 1, 30-1> { };
     89 
     90   static const char* State2String(State state) {
     91     switch (state) {
     92       case NO_REGISTERS: return "NO_REGISTERS";
     93       case TOS_REG: return "TOS_REG";
     94     }
     95     UNREACHABLE();
     96     return NULL;
     97   }
     98 
     99   static const int kMaxBackEdgeWeight = 127;
    100 
    101   // Platform-specific code size multiplier.
    102 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
    103   static const int kCodeSizeMultiplier = 105;
    104   static const int kBootCodeSizeMultiplier = 100;
    105 #elif V8_TARGET_ARCH_X64
    106   static const int kCodeSizeMultiplier = 170;
    107   static const int kBootCodeSizeMultiplier = 140;
    108 #elif V8_TARGET_ARCH_ARM
    109   static const int kCodeSizeMultiplier = 149;
    110   static const int kBootCodeSizeMultiplier = 110;
    111 #elif V8_TARGET_ARCH_ARM64
    112 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
    113   static const int kCodeSizeMultiplier = 149;
    114   static const int kBootCodeSizeMultiplier = 110;
    115 #elif V8_TARGET_ARCH_MIPS
    116   static const int kCodeSizeMultiplier = 149;
    117   static const int kBootCodeSizeMultiplier = 120;
    118 #elif V8_TARGET_ARCH_MIPS64
    119   static const int kCodeSizeMultiplier = 149;
    120   static const int kBootCodeSizeMultiplier = 120;
    121 #else
    122 #error Unsupported target architecture.
    123 #endif
    124 
    125  private:
    126   class Breakable;
    127   class Iteration;
    128 
    129   class TestContext;
    130 
    131   class NestedStatement BASE_EMBEDDED {
    132    public:
    133     explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
    134       // Link into codegen's nesting stack.
    135       previous_ = codegen->nesting_stack_;
    136       codegen->nesting_stack_ = this;
    137     }
    138     virtual ~NestedStatement() {
    139       // Unlink from codegen's nesting stack.
    140       DCHECK_EQ(this, codegen_->nesting_stack_);
    141       codegen_->nesting_stack_ = previous_;
    142     }
    143 
    144     virtual Breakable* AsBreakable() { return NULL; }
    145     virtual Iteration* AsIteration() { return NULL; }
    146 
    147     virtual bool IsContinueTarget(Statement* target) { return false; }
    148     virtual bool IsBreakTarget(Statement* target) { return false; }
    149 
    150     // Notify the statement that we are exiting it via break, continue, or
    151     // return and give it a chance to generate cleanup code.  Return the
    152     // next outer statement in the nesting stack.  We accumulate in
    153     // *stack_depth the amount to drop the stack and in *context_length the
    154     // number of context chain links to unwind as we traverse the nesting
    155     // stack from an exit to its target.
    156     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
    157       return previous_;
    158     }
    159 
    160    protected:
    161     MacroAssembler* masm() { return codegen_->masm(); }
    162 
    163     FullCodeGenerator* codegen_;
    164     NestedStatement* previous_;
    165 
    166    private:
    167     DISALLOW_COPY_AND_ASSIGN(NestedStatement);
    168   };
    169 
    170   // A breakable statement such as a block.
    171   class Breakable : public NestedStatement {
    172    public:
    173     Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
    174         : NestedStatement(codegen), statement_(statement) {
    175     }
    176     virtual ~Breakable() {}
    177 
    178     virtual Breakable* AsBreakable() { return this; }
    179     virtual bool IsBreakTarget(Statement* target) {
    180       return statement() == target;
    181     }
    182 
    183     BreakableStatement* statement() { return statement_; }
    184     Label* break_label() { return &break_label_; }
    185 
    186    private:
    187     BreakableStatement* statement_;
    188     Label break_label_;
    189   };
    190 
    191   // An iteration statement such as a while, for, or do loop.
    192   class Iteration : public Breakable {
    193    public:
    194     Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
    195         : Breakable(codegen, statement) {
    196     }
    197     virtual ~Iteration() {}
    198 
    199     virtual Iteration* AsIteration() { return this; }
    200     virtual bool IsContinueTarget(Statement* target) {
    201       return statement() == target;
    202     }
    203 
    204     Label* continue_label() { return &continue_label_; }
    205 
    206    private:
    207     Label continue_label_;
    208   };
    209 
    210   // A nested block statement.
    211   class NestedBlock : public Breakable {
    212    public:
    213     NestedBlock(FullCodeGenerator* codegen, Block* block)
    214         : Breakable(codegen, block) {
    215     }
    216     virtual ~NestedBlock() {}
    217 
    218     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
    219       if (statement()->AsBlock()->scope() != NULL) {
    220         ++(*context_length);
    221       }
    222       return previous_;
    223     }
    224   };
    225 
    226   // The try block of a try/catch statement.
    227   class TryCatch : public NestedStatement {
    228    public:
    229     explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
    230     }
    231     virtual ~TryCatch() {}
    232 
    233     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
    234   };
    235 
    236   // The try block of a try/finally statement.
    237   class TryFinally : public NestedStatement {
    238    public:
    239     TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
    240         : NestedStatement(codegen), finally_entry_(finally_entry) {
    241     }
    242     virtual ~TryFinally() {}
    243 
    244     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
    245 
    246    private:
    247     Label* finally_entry_;
    248   };
    249 
    250   // The finally block of a try/finally statement.
    251   class Finally : public NestedStatement {
    252    public:
    253     static const int kElementCount = 5;
    254 
    255     explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
    256     virtual ~Finally() {}
    257 
    258     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
    259       *stack_depth += kElementCount;
    260       return previous_;
    261     }
    262   };
    263 
    264   // The body of a for/in loop.
    265   class ForIn : public Iteration {
    266    public:
    267     static const int kElementCount = 5;
    268 
    269     ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
    270         : Iteration(codegen, statement) {
    271     }
    272     virtual ~ForIn() {}
    273 
    274     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
    275       *stack_depth += kElementCount;
    276       return previous_;
    277     }
    278   };
    279 
    280 
    281   // The body of a with or catch.
    282   class WithOrCatch : public NestedStatement {
    283    public:
    284     explicit WithOrCatch(FullCodeGenerator* codegen)
    285         : NestedStatement(codegen) {
    286     }
    287     virtual ~WithOrCatch() {}
    288 
    289     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
    290       ++(*context_length);
    291       return previous_;
    292     }
    293   };
    294 
    295   // Type of a member function that generates inline code for a native function.
    296   typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
    297 
    298   static const InlineFunctionGenerator kInlineFunctionGenerators[];
    299 
    300   // A platform-specific utility to overwrite the accumulator register
    301   // with a GC-safe value.
    302   void ClearAccumulator();
    303 
    304   // Determine whether or not to inline the smi case for the given
    305   // operation.
    306   bool ShouldInlineSmiCase(Token::Value op);
    307 
    308   // Helper function to convert a pure value into a test context.  The value
    309   // is expected on the stack or the accumulator, depending on the platform.
    310   // See the platform-specific implementation for details.
    311   void DoTest(Expression* condition,
    312               Label* if_true,
    313               Label* if_false,
    314               Label* fall_through);
    315   void DoTest(const TestContext* context);
    316 
    317   // Helper function to split control flow and avoid a branch to the
    318   // fall-through label if it is set up.
    319 #if V8_TARGET_ARCH_MIPS
    320   void Split(Condition cc,
    321              Register lhs,
    322              const Operand&  rhs,
    323              Label* if_true,
    324              Label* if_false,
    325              Label* fall_through);
    326 #elif V8_TARGET_ARCH_MIPS64
    327   void Split(Condition cc,
    328              Register lhs,
    329              const Operand&  rhs,
    330              Label* if_true,
    331              Label* if_false,
    332              Label* fall_through);
    333 #else  // All non-mips arch.
    334   void Split(Condition cc,
    335              Label* if_true,
    336              Label* if_false,
    337              Label* fall_through);
    338 #endif  // V8_TARGET_ARCH_MIPS
    339 
    340   // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
    341   // a register.  Emits a context chain walk if if necessary (so does
    342   // SetVar) so avoid calling both on the same variable.
    343   void GetVar(Register destination, Variable* var);
    344 
    345   // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
    346   // the context, the write barrier will be emitted and source, scratch0,
    347   // scratch1 will be clobbered.  Emits a context chain walk if if necessary
    348   // (so does GetVar) so avoid calling both on the same variable.
    349   void SetVar(Variable* var,
    350               Register source,
    351               Register scratch0,
    352               Register scratch1);
    353 
    354   // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
    355   // variable.  Writing does not need the write barrier.
    356   MemOperand StackOperand(Variable* var);
    357 
    358   // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
    359   // variable.  May emit code to traverse the context chain, loading the
    360   // found context into the scratch register.  Writing to this operand will
    361   // need the write barrier if location is CONTEXT.
    362   MemOperand VarOperand(Variable* var, Register scratch);
    363 
    364   void VisitForEffect(Expression* expr) {
    365     EffectContext context(this);
    366     Visit(expr);
    367     PrepareForBailout(expr, NO_REGISTERS);
    368   }
    369 
    370   void VisitForAccumulatorValue(Expression* expr) {
    371     AccumulatorValueContext context(this);
    372     Visit(expr);
    373     PrepareForBailout(expr, TOS_REG);
    374   }
    375 
    376   void VisitForStackValue(Expression* expr) {
    377     StackValueContext context(this);
    378     Visit(expr);
    379     PrepareForBailout(expr, NO_REGISTERS);
    380   }
    381 
    382   void VisitForControl(Expression* expr,
    383                        Label* if_true,
    384                        Label* if_false,
    385                        Label* fall_through) {
    386     TestContext context(this, expr, if_true, if_false, fall_through);
    387     Visit(expr);
    388     // For test contexts, we prepare for bailout before branching, not at
    389     // the end of the entire expression.  This happens as part of visiting
    390     // the expression.
    391   }
    392 
    393   void VisitInDuplicateContext(Expression* expr);
    394 
    395   void VisitDeclarations(ZoneList<Declaration*>* declarations);
    396   void DeclareModules(Handle<FixedArray> descriptions);
    397   void DeclareGlobals(Handle<FixedArray> pairs);
    398   int DeclareGlobalsFlags();
    399 
    400   // Generate code to allocate all (including nested) modules and contexts.
    401   // Because of recursive linking and the presence of module alias declarations,
    402   // this has to be a separate pass _before_ populating or executing any module.
    403   void AllocateModules(ZoneList<Declaration*>* declarations);
    404 
    405   // Generate code to create an iterator result object.  The "value" property is
    406   // set to a value popped from the stack, and "done" is set according to the
    407   // argument.  The result object is left in the result register.
    408   void EmitCreateIteratorResult(bool done);
    409 
    410   // Try to perform a comparison as a fast inlined literal compare if
    411   // the operands allow it.  Returns true if the compare operations
    412   // has been matched and all code generated; false otherwise.
    413   bool TryLiteralCompare(CompareOperation* compare);
    414 
    415   // Platform-specific code for comparing the type of a value with
    416   // a given literal string.
    417   void EmitLiteralCompareTypeof(Expression* expr,
    418                                 Expression* sub_expr,
    419                                 Handle<String> check);
    420 
    421   // Platform-specific code for equality comparison with a nil-like value.
    422   void EmitLiteralCompareNil(CompareOperation* expr,
    423                              Expression* sub_expr,
    424                              NilValue nil);
    425 
    426   // Bailout support.
    427   void PrepareForBailout(Expression* node, State state);
    428   void PrepareForBailoutForId(BailoutId id, State state);
    429 
    430   // Feedback slot support. The feedback vector will be cleared during gc and
    431   // collected by the type-feedback oracle.
    432   Handle<FixedArray> FeedbackVector() {
    433     return info_->feedback_vector();
    434   }
    435   void EnsureSlotContainsAllocationSite(int slot);
    436 
    437   // Record a call's return site offset, used to rebuild the frame if the
    438   // called function was inlined at the site.
    439   void RecordJSReturnSite(Call* call);
    440 
    441   // Prepare for bailout before a test (or compare) and branch.  If
    442   // should_normalize, then the following comparison will not handle the
    443   // canonical JS true value so we will insert a (dead) test against true at
    444   // the actual bailout target from the optimized code. If not
    445   // should_normalize, the true and false labels are ignored.
    446   void PrepareForBailoutBeforeSplit(Expression* expr,
    447                                     bool should_normalize,
    448                                     Label* if_true,
    449                                     Label* if_false);
    450 
    451   // If enabled, emit debug code for checking that the current context is
    452   // neither a with nor a catch context.
    453   void EmitDebugCheckDeclarationContext(Variable* variable);
    454 
    455   // This is meant to be called at loop back edges, |back_edge_target| is
    456   // the jump target of the back edge and is used to approximate the amount
    457   // of code inside the loop.
    458   void EmitBackEdgeBookkeeping(IterationStatement* stmt,
    459                                Label* back_edge_target);
    460   // Record the OSR AST id corresponding to a back edge in the code.
    461   void RecordBackEdge(BailoutId osr_ast_id);
    462   // Emit a table of back edge ids, pcs and loop depths into the code stream.
    463   // Return the offset of the start of the table.
    464   unsigned EmitBackEdgeTable();
    465 
    466   void EmitProfilingCounterDecrement(int delta);
    467   void EmitProfilingCounterReset();
    468 
    469   // Emit code to pop values from the stack associated with nested statements
    470   // like try/catch, try/finally, etc, running the finallies and unwinding the
    471   // handlers as needed.
    472   void EmitUnwindBeforeReturn();
    473 
    474   // Platform-specific return sequence
    475   void EmitReturnSequence();
    476 
    477   // Platform-specific code sequences for calls
    478   void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION);
    479   void EmitCallWithLoadIC(Call* expr);
    480   void EmitSuperCallWithLoadIC(Call* expr);
    481   void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
    482 
    483   // Platform-specific code for inline runtime calls.
    484   InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
    485 
    486   void EmitInlineRuntimeCall(CallRuntime* expr);
    487 
    488 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
    489   void Emit##name(CallRuntime* expr);
    490   INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
    491 #undef EMIT_INLINE_RUNTIME_CALL
    492 
    493   // Platform-specific code for resuming generators.
    494   void EmitGeneratorResume(Expression *generator,
    495                            Expression *value,
    496                            JSGeneratorObject::ResumeMode resume_mode);
    497 
    498   // Platform-specific code for loading variables.
    499   void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
    500                                      TypeofState typeof_state,
    501                                      Label* slow);
    502   MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
    503   void EmitDynamicLookupFastCase(VariableProxy* proxy,
    504                                  TypeofState typeof_state,
    505                                  Label* slow,
    506                                  Label* done);
    507   void EmitVariableLoad(VariableProxy* proxy);
    508 
    509   void EmitAccessor(Expression* expression);
    510 
    511   // Expects the arguments and the function already pushed.
    512   void EmitResolvePossiblyDirectEval(int arg_count);
    513 
    514   // Platform-specific support for allocating a new closure based on
    515   // the given function info.
    516   void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
    517 
    518   // Platform-specific support for compiling assignments.
    519 
    520   // Load a value from a named property.
    521   // The receiver is left on the stack by the IC.
    522   void EmitNamedPropertyLoad(Property* expr);
    523 
    524   void EmitNamedSuperPropertyLoad(Property* expr);
    525 
    526   // Load a value from a keyed property.
    527   // The receiver and the key is left on the stack by the IC.
    528   void EmitKeyedPropertyLoad(Property* expr);
    529 
    530   // Apply the compound assignment operator. Expects the left operand on top
    531   // of the stack and the right one in the accumulator.
    532   void EmitBinaryOp(BinaryOperation* expr,
    533                     Token::Value op,
    534                     OverwriteMode mode);
    535 
    536   // Helper functions for generating inlined smi code for certain
    537   // binary operations.
    538   void EmitInlineSmiBinaryOp(BinaryOperation* expr,
    539                              Token::Value op,
    540                              OverwriteMode mode,
    541                              Expression* left,
    542                              Expression* right);
    543 
    544   // Assign to the given expression as if via '='. The right-hand-side value
    545   // is expected in the accumulator.
    546   void EmitAssignment(Expression* expr);
    547 
    548   // Complete a variable assignment.  The right-hand-side value is expected
    549   // in the accumulator.
    550   void EmitVariableAssignment(Variable* var,
    551                               Token::Value op);
    552 
    553   // Helper functions to EmitVariableAssignment
    554   void EmitStoreToStackLocalOrContextSlot(Variable* var,
    555                                           MemOperand location);
    556 
    557   // Complete a named property assignment.  The receiver is expected on top
    558   // of the stack and the right-hand-side value in the accumulator.
    559   void EmitNamedPropertyAssignment(Assignment* expr);
    560 
    561   // Complete a keyed property assignment.  The receiver and key are
    562   // expected on top of the stack and the right-hand-side value in the
    563   // accumulator.
    564   void EmitKeyedPropertyAssignment(Assignment* expr);
    565 
    566   void EmitLoadHomeObject(SuperReference* expr);
    567 
    568   void CallIC(Handle<Code> code,
    569               TypeFeedbackId id = TypeFeedbackId::None());
    570 
    571   void CallLoadIC(ContextualMode mode,
    572                   TypeFeedbackId id = TypeFeedbackId::None());
    573   void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
    574 
    575   void SetFunctionPosition(FunctionLiteral* fun);
    576   void SetReturnPosition(FunctionLiteral* fun);
    577   void SetStatementPosition(Statement* stmt);
    578   void SetExpressionPosition(Expression* expr);
    579   void SetSourcePosition(int pos);
    580 
    581   // Non-local control flow support.
    582   void EnterFinallyBlock();
    583   void ExitFinallyBlock();
    584 
    585   // Loop nesting counter.
    586   int loop_depth() { return loop_depth_; }
    587   void increment_loop_depth() { loop_depth_++; }
    588   void decrement_loop_depth() {
    589     DCHECK(loop_depth_ > 0);
    590     loop_depth_--;
    591   }
    592 
    593   MacroAssembler* masm() { return masm_; }
    594 
    595   class ExpressionContext;
    596   const ExpressionContext* context() { return context_; }
    597   void set_new_context(const ExpressionContext* context) { context_ = context; }
    598 
    599   Handle<Script> script() { return info_->script(); }
    600   bool is_eval() { return info_->is_eval(); }
    601   bool is_native() { return info_->is_native(); }
    602   StrictMode strict_mode() { return function()->strict_mode(); }
    603   FunctionLiteral* function() { return info_->function(); }
    604   Scope* scope() { return scope_; }
    605 
    606   static Register result_register();
    607   static Register context_register();
    608 
    609   // Set fields in the stack frame. Offsets are the frame pointer relative
    610   // offsets defined in, e.g., StandardFrameConstants.
    611   void StoreToFrameField(int frame_offset, Register value);
    612 
    613   // Load a value from the current context. Indices are defined as an enum
    614   // in v8::internal::Context.
    615   void LoadContextField(Register dst, int context_index);
    616 
    617   // Push the function argument for the runtime functions PushWithContext
    618   // and PushCatchContext.
    619   void PushFunctionArgumentForContextAllocation();
    620 
    621   // AST node visit functions.
    622 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
    623   AST_NODE_LIST(DECLARE_VISIT)
    624 #undef DECLARE_VISIT
    625 
    626   void VisitComma(BinaryOperation* expr);
    627   void VisitLogicalExpression(BinaryOperation* expr);
    628   void VisitArithmeticExpression(BinaryOperation* expr);
    629 
    630   void VisitForTypeofValue(Expression* expr);
    631 
    632   void Generate();
    633   void PopulateDeoptimizationData(Handle<Code> code);
    634   void PopulateTypeFeedbackInfo(Handle<Code> code);
    635 
    636   Handle<FixedArray> handler_table() { return handler_table_; }
    637 
    638   struct BailoutEntry {
    639     BailoutId id;
    640     unsigned pc_and_state;
    641   };
    642 
    643   struct BackEdgeEntry {
    644     BailoutId id;
    645     unsigned pc;
    646     uint32_t loop_depth;
    647   };
    648 
    649   class ExpressionContext BASE_EMBEDDED {
    650    public:
    651     explicit ExpressionContext(FullCodeGenerator* codegen)
    652         : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
    653       codegen->set_new_context(this);
    654     }
    655 
    656     virtual ~ExpressionContext() {
    657       codegen_->set_new_context(old_);
    658     }
    659 
    660     Isolate* isolate() const { return codegen_->isolate(); }
    661 
    662     // Convert constant control flow (true or false) to the result expected for
    663     // this expression context.
    664     virtual void Plug(bool flag) const = 0;
    665 
    666     // Emit code to convert a pure value (in a register, known variable
    667     // location, as a literal, or on top of the stack) into the result
    668     // expected according to this expression context.
    669     virtual void Plug(Register reg) const = 0;
    670     virtual void Plug(Variable* var) const = 0;
    671     virtual void Plug(Handle<Object> lit) const = 0;
    672     virtual void Plug(Heap::RootListIndex index) const = 0;
    673     virtual void PlugTOS() const = 0;
    674 
    675     // Emit code to convert pure control flow to a pair of unbound labels into
    676     // the result expected according to this expression context.  The
    677     // implementation will bind both labels unless it's a TestContext, which
    678     // won't bind them at this point.
    679     virtual void Plug(Label* materialize_true,
    680                       Label* materialize_false) const = 0;
    681 
    682     // Emit code to discard count elements from the top of stack, then convert
    683     // a pure value into the result expected according to this expression
    684     // context.
    685     virtual void DropAndPlug(int count, Register reg) const = 0;
    686 
    687     // Set up branch labels for a test expression.  The three Label** parameters
    688     // are output parameters.
    689     virtual void PrepareTest(Label* materialize_true,
    690                              Label* materialize_false,
    691                              Label** if_true,
    692                              Label** if_false,
    693                              Label** fall_through) const = 0;
    694 
    695     // Returns true if we are evaluating only for side effects (i.e. if the
    696     // result will be discarded).
    697     virtual bool IsEffect() const { return false; }
    698 
    699     // Returns true if we are evaluating for the value (in accu/on stack).
    700     virtual bool IsAccumulatorValue() const { return false; }
    701     virtual bool IsStackValue() const { return false; }
    702 
    703     // Returns true if we are branching on the value rather than materializing
    704     // it.  Only used for asserts.
    705     virtual bool IsTest() const { return false; }
    706 
    707    protected:
    708     FullCodeGenerator* codegen() const { return codegen_; }
    709     MacroAssembler* masm() const { return masm_; }
    710     MacroAssembler* masm_;
    711 
    712    private:
    713     const ExpressionContext* old_;
    714     FullCodeGenerator* codegen_;
    715   };
    716 
    717   class AccumulatorValueContext : public ExpressionContext {
    718    public:
    719     explicit AccumulatorValueContext(FullCodeGenerator* codegen)
    720         : ExpressionContext(codegen) { }
    721 
    722     virtual void Plug(bool flag) const;
    723     virtual void Plug(Register reg) const;
    724     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
    725     virtual void Plug(Variable* var) const;
    726     virtual void Plug(Handle<Object> lit) const;
    727     virtual void Plug(Heap::RootListIndex) const;
    728     virtual void PlugTOS() const;
    729     virtual void DropAndPlug(int count, Register reg) const;
    730     virtual void PrepareTest(Label* materialize_true,
    731                              Label* materialize_false,
    732                              Label** if_true,
    733                              Label** if_false,
    734                              Label** fall_through) const;
    735     virtual bool IsAccumulatorValue() const { return true; }
    736   };
    737 
    738   class StackValueContext : public ExpressionContext {
    739    public:
    740     explicit StackValueContext(FullCodeGenerator* codegen)
    741         : ExpressionContext(codegen) { }
    742 
    743     virtual void Plug(bool flag) const;
    744     virtual void Plug(Register reg) const;
    745     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
    746     virtual void Plug(Variable* var) const;
    747     virtual void Plug(Handle<Object> lit) const;
    748     virtual void Plug(Heap::RootListIndex) const;
    749     virtual void PlugTOS() const;
    750     virtual void DropAndPlug(int count, Register reg) const;
    751     virtual void PrepareTest(Label* materialize_true,
    752                              Label* materialize_false,
    753                              Label** if_true,
    754                              Label** if_false,
    755                              Label** fall_through) const;
    756     virtual bool IsStackValue() const { return true; }
    757   };
    758 
    759   class TestContext : public ExpressionContext {
    760    public:
    761     TestContext(FullCodeGenerator* codegen,
    762                 Expression* condition,
    763                 Label* true_label,
    764                 Label* false_label,
    765                 Label* fall_through)
    766         : ExpressionContext(codegen),
    767           condition_(condition),
    768           true_label_(true_label),
    769           false_label_(false_label),
    770           fall_through_(fall_through) { }
    771 
    772     static const TestContext* cast(const ExpressionContext* context) {
    773       DCHECK(context->IsTest());
    774       return reinterpret_cast<const TestContext*>(context);
    775     }
    776 
    777     Expression* condition() const { return condition_; }
    778     Label* true_label() const { return true_label_; }
    779     Label* false_label() const { return false_label_; }
    780     Label* fall_through() const { return fall_through_; }
    781 
    782     virtual void Plug(bool flag) const;
    783     virtual void Plug(Register reg) const;
    784     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
    785     virtual void Plug(Variable* var) const;
    786     virtual void Plug(Handle<Object> lit) const;
    787     virtual void Plug(Heap::RootListIndex) const;
    788     virtual void PlugTOS() const;
    789     virtual void DropAndPlug(int count, Register reg) const;
    790     virtual void PrepareTest(Label* materialize_true,
    791                              Label* materialize_false,
    792                              Label** if_true,
    793                              Label** if_false,
    794                              Label** fall_through) const;
    795     virtual bool IsTest() const { return true; }
    796 
    797    private:
    798     Expression* condition_;
    799     Label* true_label_;
    800     Label* false_label_;
    801     Label* fall_through_;
    802   };
    803 
    804   class EffectContext : public ExpressionContext {
    805    public:
    806     explicit EffectContext(FullCodeGenerator* codegen)
    807         : ExpressionContext(codegen) { }
    808 
    809     virtual void Plug(bool flag) const;
    810     virtual void Plug(Register reg) const;
    811     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
    812     virtual void Plug(Variable* var) const;
    813     virtual void Plug(Handle<Object> lit) const;
    814     virtual void Plug(Heap::RootListIndex) const;
    815     virtual void PlugTOS() const;
    816     virtual void DropAndPlug(int count, Register reg) const;
    817     virtual void PrepareTest(Label* materialize_true,
    818                              Label* materialize_false,
    819                              Label** if_true,
    820                              Label** if_false,
    821                              Label** fall_through) const;
    822     virtual bool IsEffect() const { return true; }
    823   };
    824 
    825   MacroAssembler* masm_;
    826   CompilationInfo* info_;
    827   Scope* scope_;
    828   Label return_label_;
    829   NestedStatement* nesting_stack_;
    830   int loop_depth_;
    831   ZoneList<Handle<Object> >* globals_;
    832   Handle<FixedArray> modules_;
    833   int module_index_;
    834   const ExpressionContext* context_;
    835   ZoneList<BailoutEntry> bailout_entries_;
    836   ZoneList<BackEdgeEntry> back_edges_;
    837   int ic_total_count_;
    838   Handle<FixedArray> handler_table_;
    839   Handle<Cell> profiling_counter_;
    840   bool generate_debug_code_;
    841 
    842   friend class NestedStatement;
    843 
    844   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
    845   DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
    846 };
    847 
    848 
    849 // A map from property names to getter/setter pairs allocated in the zone.
    850 class AccessorTable: public TemplateHashMap<Literal,
    851                                             ObjectLiteral::Accessors,
    852                                             ZoneAllocationPolicy> {
    853  public:
    854   explicit AccessorTable(Zone* zone) :
    855       TemplateHashMap<Literal, ObjectLiteral::Accessors,
    856                       ZoneAllocationPolicy>(Literal::Match,
    857                                             ZoneAllocationPolicy(zone)),
    858       zone_(zone) { }
    859 
    860   Iterator lookup(Literal* literal) {
    861     Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
    862     if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
    863     return it;
    864   }
    865 
    866  private:
    867   Zone* zone_;
    868 };
    869 
    870 
    871 class BackEdgeTable {
    872  public:
    873   BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
    874     DCHECK(code->kind() == Code::FUNCTION);
    875     instruction_start_ = code->instruction_start();
    876     Address table_address = instruction_start_ + code->back_edge_table_offset();
    877     length_ = Memory::uint32_at(table_address);
    878     start_ = table_address + kTableLengthSize;
    879   }
    880 
    881   uint32_t length() { return length_; }
    882 
    883   BailoutId ast_id(uint32_t index) {
    884     return BailoutId(static_cast<int>(
    885         Memory::uint32_at(entry_at(index) + kAstIdOffset)));
    886   }
    887 
    888   uint32_t loop_depth(uint32_t index) {
    889     return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
    890   }
    891 
    892   uint32_t pc_offset(uint32_t index) {
    893     return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
    894   }
    895 
    896   Address pc(uint32_t index) {
    897     return instruction_start_ + pc_offset(index);
    898   }
    899 
    900   enum BackEdgeState {
    901     INTERRUPT,
    902     ON_STACK_REPLACEMENT,
    903     OSR_AFTER_STACK_CHECK
    904   };
    905 
    906   // Increase allowed loop nesting level by one and patch those matching loops.
    907   static void Patch(Isolate* isolate, Code* unoptimized_code);
    908 
    909   // Patch the back edge to the target state, provided the correct callee.
    910   static void PatchAt(Code* unoptimized_code,
    911                       Address pc,
    912                       BackEdgeState target_state,
    913                       Code* replacement_code);
    914 
    915   // Change all patched back edges back to normal interrupts.
    916   static void Revert(Isolate* isolate,
    917                      Code* unoptimized_code);
    918 
    919   // Change a back edge patched for on-stack replacement to perform a
    920   // stack check first.
    921   static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
    922 
    923   // Revert the patch by AddStackCheck.
    924   static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
    925 
    926   // Return the current patch state of the back edge.
    927   static BackEdgeState GetBackEdgeState(Isolate* isolate,
    928                                         Code* unoptimized_code,
    929                                         Address pc_after);
    930 
    931 #ifdef DEBUG
    932   // Verify that all back edges of a certain loop depth are patched.
    933   static bool Verify(Isolate* isolate, Code* unoptimized_code);
    934 #endif  // DEBUG
    935 
    936  private:
    937   Address entry_at(uint32_t index) {
    938     DCHECK(index < length_);
    939     return start_ + index * kEntrySize;
    940   }
    941 
    942   static const int kTableLengthSize = kIntSize;
    943   static const int kAstIdOffset = 0 * kIntSize;
    944   static const int kPcOffsetOffset = 1 * kIntSize;
    945   static const int kLoopDepthOffset = 2 * kIntSize;
    946   static const int kEntrySize = 3 * kIntSize;
    947 
    948   Address start_;
    949   Address instruction_start_;
    950   uint32_t length_;
    951 };
    952 
    953 
    954 } }  // namespace v8::internal
    955 
    956 #endif  // V8_FULL_CODEGEN_H_
    957