Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #include "src/codegen.h"
      8 #include "src/compiler.h"
      9 #include "src/debug.h"
     10 #include "src/full-codegen.h"
     11 #include "src/liveedit.h"
     12 #include "src/macro-assembler.h"
     13 #include "src/prettyprinter.h"
     14 #include "src/scopes.h"
     15 #include "src/scopeinfo.h"
     16 #include "src/snapshot.h"
     17 #include "src/stub-cache.h"
     18 
     19 namespace v8 {
     20 namespace internal {
     21 
     22 void BreakableStatementChecker::Check(Statement* stmt) {
     23   Visit(stmt);
     24 }
     25 
     26 
     27 void BreakableStatementChecker::Check(Expression* expr) {
     28   Visit(expr);
     29 }
     30 
     31 
     32 void BreakableStatementChecker::VisitVariableDeclaration(
     33     VariableDeclaration* decl) {
     34 }
     35 
     36 void BreakableStatementChecker::VisitFunctionDeclaration(
     37     FunctionDeclaration* decl) {
     38 }
     39 
     40 void BreakableStatementChecker::VisitModuleDeclaration(
     41     ModuleDeclaration* decl) {
     42 }
     43 
     44 void BreakableStatementChecker::VisitImportDeclaration(
     45     ImportDeclaration* decl) {
     46 }
     47 
     48 void BreakableStatementChecker::VisitExportDeclaration(
     49     ExportDeclaration* decl) {
     50 }
     51 
     52 
     53 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
     54 }
     55 
     56 
     57 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
     58 }
     59 
     60 
     61 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
     62 }
     63 
     64 
     65 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
     66 }
     67 
     68 
     69 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
     70 }
     71 
     72 
     73 void BreakableStatementChecker::VisitBlock(Block* stmt) {
     74 }
     75 
     76 
     77 void BreakableStatementChecker::VisitExpressionStatement(
     78     ExpressionStatement* stmt) {
     79   // Check if expression is breakable.
     80   Visit(stmt->expression());
     81 }
     82 
     83 
     84 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
     85 }
     86 
     87 
     88 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
     89   // If the condition is breakable the if statement is breakable.
     90   Visit(stmt->condition());
     91 }
     92 
     93 
     94 void BreakableStatementChecker::VisitContinueStatement(
     95     ContinueStatement* stmt) {
     96 }
     97 
     98 
     99 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
    100 }
    101 
    102 
    103 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
    104   // Return is breakable if the expression is.
    105   Visit(stmt->expression());
    106 }
    107 
    108 
    109 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
    110   Visit(stmt->expression());
    111 }
    112 
    113 
    114 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
    115   // Switch statements breakable if the tag expression is.
    116   Visit(stmt->tag());
    117 }
    118 
    119 
    120 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
    121   // Mark do while as breakable to avoid adding a break slot in front of it.
    122   is_breakable_ = true;
    123 }
    124 
    125 
    126 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
    127   // Mark while statements breakable if the condition expression is.
    128   Visit(stmt->cond());
    129 }
    130 
    131 
    132 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
    133   // Mark for statements breakable if the condition expression is.
    134   if (stmt->cond() != NULL) {
    135     Visit(stmt->cond());
    136   }
    137 }
    138 
    139 
    140 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
    141   // Mark for in statements breakable if the enumerable expression is.
    142   Visit(stmt->enumerable());
    143 }
    144 
    145 
    146 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
    147   // For-of is breakable because of the next() call.
    148   is_breakable_ = true;
    149 }
    150 
    151 
    152 void BreakableStatementChecker::VisitTryCatchStatement(
    153     TryCatchStatement* stmt) {
    154   // Mark try catch as breakable to avoid adding a break slot in front of it.
    155   is_breakable_ = true;
    156 }
    157 
    158 
    159 void BreakableStatementChecker::VisitTryFinallyStatement(
    160     TryFinallyStatement* stmt) {
    161   // Mark try finally as breakable to avoid adding a break slot in front of it.
    162   is_breakable_ = true;
    163 }
    164 
    165 
    166 void BreakableStatementChecker::VisitDebuggerStatement(
    167     DebuggerStatement* stmt) {
    168   // The debugger statement is breakable.
    169   is_breakable_ = true;
    170 }
    171 
    172 
    173 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
    174 }
    175 
    176 
    177 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
    178 }
    179 
    180 
    181 void BreakableStatementChecker::VisitNativeFunctionLiteral(
    182     NativeFunctionLiteral* expr) {
    183 }
    184 
    185 
    186 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
    187 }
    188 
    189 
    190 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
    191 }
    192 
    193 
    194 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
    195 }
    196 
    197 
    198 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
    199 }
    200 
    201 
    202 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
    203 }
    204 
    205 
    206 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
    207 }
    208 
    209 
    210 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
    211   // If assigning to a property (including a global property) the assignment is
    212   // breakable.
    213   VariableProxy* proxy = expr->target()->AsVariableProxy();
    214   Property* prop = expr->target()->AsProperty();
    215   if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
    216     is_breakable_ = true;
    217     return;
    218   }
    219 
    220   // Otherwise the assignment is breakable if the assigned value is.
    221   Visit(expr->value());
    222 }
    223 
    224 
    225 void BreakableStatementChecker::VisitYield(Yield* expr) {
    226   // Yield is breakable if the expression is.
    227   Visit(expr->expression());
    228 }
    229 
    230 
    231 void BreakableStatementChecker::VisitThrow(Throw* expr) {
    232   // Throw is breakable if the expression is.
    233   Visit(expr->exception());
    234 }
    235 
    236 
    237 void BreakableStatementChecker::VisitProperty(Property* expr) {
    238   // Property load is breakable.
    239   is_breakable_ = true;
    240 }
    241 
    242 
    243 void BreakableStatementChecker::VisitCall(Call* expr) {
    244   // Function calls both through IC and call stub are breakable.
    245   is_breakable_ = true;
    246 }
    247 
    248 
    249 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
    250   // Function calls through new are breakable.
    251   is_breakable_ = true;
    252 }
    253 
    254 
    255 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
    256 }
    257 
    258 
    259 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
    260   Visit(expr->expression());
    261 }
    262 
    263 
    264 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
    265   Visit(expr->expression());
    266 }
    267 
    268 
    269 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
    270   Visit(expr->left());
    271   if (expr->op() != Token::AND &&
    272       expr->op() != Token::OR) {
    273     Visit(expr->right());
    274   }
    275 }
    276 
    277 
    278 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
    279   Visit(expr->left());
    280   Visit(expr->right());
    281 }
    282 
    283 
    284 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
    285 }
    286 
    287 
    288 #define __ ACCESS_MASM(masm())
    289 
    290 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
    291   Isolate* isolate = info->isolate();
    292 
    293   Logger::TimerEventScope timer(
    294       isolate, Logger::TimerEventScope::v8_compile_full_code);
    295 
    296   Handle<Script> script = info->script();
    297   if (!script->IsUndefined() && !script->source()->IsUndefined()) {
    298     int len = String::cast(script->source())->length();
    299     isolate->counters()->total_full_codegen_source_size()->Increment(len);
    300   }
    301   CodeGenerator::MakeCodePrologue(info, "full");
    302   const int kInitialBufferSize = 4 * KB;
    303   MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
    304 #ifdef ENABLE_GDB_JIT_INTERFACE
    305   masm.positions_recorder()->StartGDBJITLineInfoRecording();
    306 #endif
    307   LOG_CODE_EVENT(isolate,
    308                  CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
    309 
    310   FullCodeGenerator cgen(&masm, info);
    311   cgen.Generate();
    312   if (cgen.HasStackOverflow()) {
    313     ASSERT(!isolate->has_pending_exception());
    314     return false;
    315   }
    316   unsigned table_offset = cgen.EmitBackEdgeTable();
    317 
    318   Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
    319   Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
    320   code->set_optimizable(info->IsOptimizable() &&
    321                         !info->function()->dont_optimize() &&
    322                         info->function()->scope()->AllowsLazyCompilation());
    323   cgen.PopulateDeoptimizationData(code);
    324   cgen.PopulateTypeFeedbackInfo(code);
    325   code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
    326   code->set_handler_table(*cgen.handler_table());
    327   code->set_compiled_optimizable(info->IsOptimizable());
    328   code->set_allow_osr_at_loop_nesting_level(0);
    329   code->set_profiler_ticks(0);
    330   code->set_back_edge_table_offset(table_offset);
    331   code->set_back_edges_patched_for_osr(false);
    332   CodeGenerator::PrintCode(code, info);
    333   info->SetCode(code);
    334 #ifdef ENABLE_GDB_JIT_INTERFACE
    335   if (FLAG_gdbjit) {
    336     GDBJITLineInfo* lineinfo =
    337         masm.positions_recorder()->DetachGDBJITLineInfo();
    338     GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
    339   }
    340 #endif
    341   void* line_info = masm.positions_recorder()->DetachJITHandlerData();
    342   LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
    343   return true;
    344 }
    345 
    346 
    347 unsigned FullCodeGenerator::EmitBackEdgeTable() {
    348   // The back edge table consists of a length (in number of entries)
    349   // field, and then a sequence of entries.  Each entry is a pair of AST id
    350   // and code-relative pc offset.
    351   masm()->Align(kIntSize);
    352   unsigned offset = masm()->pc_offset();
    353   unsigned length = back_edges_.length();
    354   __ dd(length);
    355   for (unsigned i = 0; i < length; ++i) {
    356     __ dd(back_edges_[i].id.ToInt());
    357     __ dd(back_edges_[i].pc);
    358     __ dd(back_edges_[i].loop_depth);
    359   }
    360   return offset;
    361 }
    362 
    363 
    364 void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) {
    365   Handle<FixedArray> vector = FeedbackVector();
    366   if (!vector->get(slot)->IsAllocationSite()) {
    367     Handle<AllocationSite> allocation_site =
    368         isolate()->factory()->NewAllocationSite();
    369     vector->set(slot, *allocation_site);
    370   }
    371 }
    372 
    373 
    374 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
    375   // Fill in the deoptimization information.
    376   ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
    377   if (!info_->HasDeoptimizationSupport()) return;
    378   int length = bailout_entries_.length();
    379   Handle<DeoptimizationOutputData> data =
    380       DeoptimizationOutputData::New(isolate(), length, TENURED);
    381   for (int i = 0; i < length; i++) {
    382     data->SetAstId(i, bailout_entries_[i].id);
    383     data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
    384   }
    385   code->set_deoptimization_data(*data);
    386 }
    387 
    388 
    389 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
    390   Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
    391   info->set_ic_total_count(ic_total_count_);
    392   ASSERT(!isolate()->heap()->InNewSpace(*info));
    393   code->set_type_feedback_info(*info);
    394 }
    395 
    396 
    397 void FullCodeGenerator::Initialize() {
    398   InitializeAstVisitor(info_->zone());
    399   // The generation of debug code must match between the snapshot code and the
    400   // code that is generated later.  This is assumed by the debugger when it is
    401   // calculating PC offsets after generating a debug version of code.  Therefore
    402   // we disable the production of debug code in the full compiler if we are
    403   // either generating a snapshot or we booted from a snapshot.
    404   generate_debug_code_ = FLAG_debug_code &&
    405                          !masm_->serializer_enabled() &&
    406                          !Snapshot::HaveASnapshotToStartFrom();
    407   masm_->set_emit_debug_code(generate_debug_code_);
    408   masm_->set_predictable_code_size(true);
    409 }
    410 
    411 
    412 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
    413   PrepareForBailoutForId(node->id(), state);
    414 }
    415 
    416 
    417 void FullCodeGenerator::CallLoadIC(ContextualMode contextual_mode,
    418                                    TypeFeedbackId id) {
    419   ExtraICState extra_state = LoadIC::ComputeExtraICState(contextual_mode);
    420   Handle<Code> ic = LoadIC::initialize_stub(isolate(), extra_state);
    421   CallIC(ic, id);
    422 }
    423 
    424 
    425 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
    426   Handle<Code> ic = StoreIC::initialize_stub(isolate(), strict_mode());
    427   CallIC(ic, id);
    428 }
    429 
    430 
    431 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
    432   // We record the offset of the function return so we can rebuild the frame
    433   // if the function was inlined, i.e., this is the return address in the
    434   // inlined function's frame.
    435   //
    436   // The state is ignored.  We defensively set it to TOS_REG, which is the
    437   // real state of the unoptimized code at the return site.
    438   PrepareForBailoutForId(call->ReturnId(), TOS_REG);
    439 #ifdef DEBUG
    440   // In debug builds, mark the return so we can verify that this function
    441   // was called.
    442   ASSERT(!call->return_is_recorded_);
    443   call->return_is_recorded_ = true;
    444 #endif
    445 }
    446 
    447 
    448 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
    449   // There's no need to prepare this code for bailouts from already optimized
    450   // code or code that can't be optimized.
    451   if (!info_->HasDeoptimizationSupport()) return;
    452   unsigned pc_and_state =
    453       StateField::encode(state) | PcField::encode(masm_->pc_offset());
    454   ASSERT(Smi::IsValid(pc_and_state));
    455 #ifdef DEBUG
    456   for (int i = 0; i < bailout_entries_.length(); ++i) {
    457     ASSERT(bailout_entries_[i].id != id);
    458   }
    459 #endif
    460   BailoutEntry entry = { id, pc_and_state };
    461   bailout_entries_.Add(entry, zone());
    462 }
    463 
    464 
    465 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
    466   // The pc offset does not need to be encoded and packed together with a state.
    467   ASSERT(masm_->pc_offset() > 0);
    468   ASSERT(loop_depth() > 0);
    469   uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
    470   BackEdgeEntry entry =
    471       { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
    472   back_edges_.Add(entry, zone());
    473 }
    474 
    475 
    476 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
    477   // Inline smi case inside loops, but not division and modulo which
    478   // are too complicated and take up too much space.
    479   if (op == Token::DIV ||op == Token::MOD) return false;
    480   if (FLAG_always_inline_smi_code) return true;
    481   return loop_depth_ > 0;
    482 }
    483 
    484 
    485 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
    486 }
    487 
    488 
    489 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
    490   __ Move(result_register(), reg);
    491 }
    492 
    493 
    494 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
    495   __ Push(reg);
    496 }
    497 
    498 
    499 void FullCodeGenerator::TestContext::Plug(Register reg) const {
    500   // For simplicity we always test the accumulator register.
    501   __ Move(result_register(), reg);
    502   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    503   codegen()->DoTest(this);
    504 }
    505 
    506 
    507 void FullCodeGenerator::EffectContext::PlugTOS() const {
    508   __ Drop(1);
    509 }
    510 
    511 
    512 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
    513   __ Pop(result_register());
    514 }
    515 
    516 
    517 void FullCodeGenerator::StackValueContext::PlugTOS() const {
    518 }
    519 
    520 
    521 void FullCodeGenerator::TestContext::PlugTOS() const {
    522   // For simplicity we always test the accumulator register.
    523   __ Pop(result_register());
    524   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
    525   codegen()->DoTest(this);
    526 }
    527 
    528 
    529 void FullCodeGenerator::EffectContext::PrepareTest(
    530     Label* materialize_true,
    531     Label* materialize_false,
    532     Label** if_true,
    533     Label** if_false,
    534     Label** fall_through) const {
    535   // In an effect context, the true and the false case branch to the
    536   // same label.
    537   *if_true = *if_false = *fall_through = materialize_true;
    538 }
    539 
    540 
    541 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
    542     Label* materialize_true,
    543     Label* materialize_false,
    544     Label** if_true,
    545     Label** if_false,
    546     Label** fall_through) const {
    547   *if_true = *fall_through = materialize_true;
    548   *if_false = materialize_false;
    549 }
    550 
    551 
    552 void FullCodeGenerator::StackValueContext::PrepareTest(
    553     Label* materialize_true,
    554     Label* materialize_false,
    555     Label** if_true,
    556     Label** if_false,
    557     Label** fall_through) const {
    558   *if_true = *fall_through = materialize_true;
    559   *if_false = materialize_false;
    560 }
    561 
    562 
    563 void FullCodeGenerator::TestContext::PrepareTest(
    564     Label* materialize_true,
    565     Label* materialize_false,
    566     Label** if_true,
    567     Label** if_false,
    568     Label** fall_through) const {
    569   *if_true = true_label_;
    570   *if_false = false_label_;
    571   *fall_through = fall_through_;
    572 }
    573 
    574 
    575 void FullCodeGenerator::DoTest(const TestContext* context) {
    576   DoTest(context->condition(),
    577          context->true_label(),
    578          context->false_label(),
    579          context->fall_through());
    580 }
    581 
    582 
    583 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
    584   ASSERT(scope_->is_global_scope());
    585 
    586   for (int i = 0; i < declarations->length(); i++) {
    587     ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
    588     if (declaration != NULL) {
    589       ModuleLiteral* module = declaration->module()->AsModuleLiteral();
    590       if (module != NULL) {
    591         Comment cmnt(masm_, "[ Link nested modules");
    592         Scope* scope = module->body()->scope();
    593         Interface* interface = scope->interface();
    594         ASSERT(interface->IsModule() && interface->IsFrozen());
    595 
    596         interface->Allocate(scope->module_var()->index());
    597 
    598         // Set up module context.
    599         ASSERT(scope->interface()->Index() >= 0);
    600         __ Push(Smi::FromInt(scope->interface()->Index()));
    601         __ Push(scope->GetScopeInfo());
    602         __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
    603         StoreToFrameField(StandardFrameConstants::kContextOffset,
    604                           context_register());
    605 
    606         AllocateModules(scope->declarations());
    607 
    608         // Pop module context.
    609         LoadContextField(context_register(), Context::PREVIOUS_INDEX);
    610         // Update local stack frame context field.
    611         StoreToFrameField(StandardFrameConstants::kContextOffset,
    612                           context_register());
    613       }
    614     }
    615   }
    616 }
    617 
    618 
    619 // Modules have their own local scope, represented by their own context.
    620 // Module instance objects have an accessor for every export that forwards
    621 // access to the respective slot from the module's context. (Exports that are
    622 // modules themselves, however, are simple data properties.)
    623 //
    624 // All modules have a _hosting_ scope/context, which (currently) is the
    625 // (innermost) enclosing global scope. To deal with recursion, nested modules
    626 // are hosted by the same scope as global ones.
    627 //
    628 // For every (global or nested) module literal, the hosting context has an
    629 // internal slot that points directly to the respective module context. This
    630 // enables quick access to (statically resolved) module members by 2-dimensional
    631 // access through the hosting context. For example,
    632 //
    633 //   module A {
    634 //     let x;
    635 //     module B { let y; }
    636 //   }
    637 //   module C { let z; }
    638 //
    639 // allocates contexts as follows:
    640 //
    641 // [header| .A | .B | .C | A | C ]  (global)
    642 //           |    |    |
    643 //           |    |    +-- [header| z ]  (module)
    644 //           |    |
    645 //           |    +------- [header| y ]  (module)
    646 //           |
    647 //           +------------ [header| x | B ]  (module)
    648 //
    649 // Here, .A, .B, .C are the internal slots pointing to the hosted module
    650 // contexts, whereas A, B, C hold the actual instance objects (note that every
    651 // module context also points to the respective instance object through its
    652 // extension slot in the header).
    653 //
    654 // To deal with arbitrary recursion and aliases between modules,
    655 // they are created and initialized in several stages. Each stage applies to
    656 // all modules in the hosting global scope, including nested ones.
    657 //
    658 // 1. Allocate: for each module _literal_, allocate the module contexts and
    659 //    respective instance object and wire them up. This happens in the
    660 //    PushModuleContext runtime function, as generated by AllocateModules
    661 //    (invoked by VisitDeclarations in the hosting scope).
    662 //
    663 // 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
    664 //    assign the respective instance object to respective local variables. This
    665 //    happens in VisitModuleDeclaration, and uses the instance objects created
    666 //    in the previous stage.
    667 //    For each module _literal_, this phase also constructs a module descriptor
    668 //    for the next stage. This happens in VisitModuleLiteral.
    669 //
    670 // 3. Populate: invoke the DeclareModules runtime function to populate each
    671 //    _instance_ object with accessors for it exports. This is generated by
    672 //    DeclareModules (invoked by VisitDeclarations in the hosting scope again),
    673 //    and uses the descriptors generated in the previous stage.
    674 //
    675 // 4. Initialize: execute the module bodies (and other code) in sequence. This
    676 //    happens by the separate statements generated for module bodies. To reenter
    677 //    the module scopes properly, the parser inserted ModuleStatements.
    678 
    679 void FullCodeGenerator::VisitDeclarations(
    680     ZoneList<Declaration*>* declarations) {
    681   Handle<FixedArray> saved_modules = modules_;
    682   int saved_module_index = module_index_;
    683   ZoneList<Handle<Object> >* saved_globals = globals_;
    684   ZoneList<Handle<Object> > inner_globals(10, zone());
    685   globals_ = &inner_globals;
    686 
    687   if (scope_->num_modules() != 0) {
    688     // This is a scope hosting modules. Allocate a descriptor array to pass
    689     // to the runtime for initialization.
    690     Comment cmnt(masm_, "[ Allocate modules");
    691     ASSERT(scope_->is_global_scope());
    692     modules_ =
    693         isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
    694     module_index_ = 0;
    695 
    696     // Generate code for allocating all modules, including nested ones.
    697     // The allocated contexts are stored in internal variables in this scope.
    698     AllocateModules(declarations);
    699   }
    700 
    701   AstVisitor::VisitDeclarations(declarations);
    702 
    703   if (scope_->num_modules() != 0) {
    704     // Initialize modules from descriptor array.
    705     ASSERT(module_index_ == modules_->length());
    706     DeclareModules(modules_);
    707     modules_ = saved_modules;
    708     module_index_ = saved_module_index;
    709   }
    710 
    711   if (!globals_->is_empty()) {
    712     // Invoke the platform-dependent code generator to do the actual
    713     // declaration of the global functions and variables.
    714     Handle<FixedArray> array =
    715        isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
    716     for (int i = 0; i < globals_->length(); ++i)
    717       array->set(i, *globals_->at(i));
    718     DeclareGlobals(array);
    719   }
    720 
    721   globals_ = saved_globals;
    722 }
    723 
    724 
    725 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
    726   Block* block = module->body();
    727   Scope* saved_scope = scope();
    728   scope_ = block->scope();
    729   Interface* interface = scope_->interface();
    730 
    731   Comment cmnt(masm_, "[ ModuleLiteral");
    732   SetStatementPosition(block);
    733 
    734   ASSERT(!modules_.is_null());
    735   ASSERT(module_index_ < modules_->length());
    736   int index = module_index_++;
    737 
    738   // Set up module context.
    739   ASSERT(interface->Index() >= 0);
    740   __ Push(Smi::FromInt(interface->Index()));
    741   __ Push(Smi::FromInt(0));
    742   __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
    743   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
    744 
    745   {
    746     Comment cmnt(masm_, "[ Declarations");
    747     VisitDeclarations(scope_->declarations());
    748   }
    749 
    750   // Populate the module description.
    751   Handle<ModuleInfo> description =
    752       ModuleInfo::Create(isolate(), interface, scope_);
    753   modules_->set(index, *description);
    754 
    755   scope_ = saved_scope;
    756   // Pop module context.
    757   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
    758   // Update local stack frame context field.
    759   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
    760 }
    761 
    762 
    763 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
    764   // Nothing to do.
    765   // The instance object is resolved statically through the module's interface.
    766 }
    767 
    768 
    769 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
    770   // Nothing to do.
    771   // The instance object is resolved statically through the module's interface.
    772 }
    773 
    774 
    775 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
    776   // TODO(rossberg): dummy allocation for now.
    777   Scope* scope = module->body()->scope();
    778   Interface* interface = scope_->interface();
    779 
    780   ASSERT(interface->IsModule() && interface->IsFrozen());
    781   ASSERT(!modules_.is_null());
    782   ASSERT(module_index_ < modules_->length());
    783   interface->Allocate(scope->module_var()->index());
    784   int index = module_index_++;
    785 
    786   Handle<ModuleInfo> description =
    787       ModuleInfo::Create(isolate(), interface, scope_);
    788   modules_->set(index, *description);
    789 }
    790 
    791 
    792 int FullCodeGenerator::DeclareGlobalsFlags() {
    793   ASSERT(DeclareGlobalsStrictMode::is_valid(strict_mode()));
    794   return DeclareGlobalsEvalFlag::encode(is_eval()) |
    795       DeclareGlobalsNativeFlag::encode(is_native()) |
    796       DeclareGlobalsStrictMode::encode(strict_mode());
    797 }
    798 
    799 
    800 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
    801   CodeGenerator::RecordPositions(masm_, fun->start_position());
    802 }
    803 
    804 
    805 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
    806   CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
    807 }
    808 
    809 
    810 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
    811   if (!info_->is_debug()) {
    812     CodeGenerator::RecordPositions(masm_, stmt->position());
    813   } else {
    814     // Check if the statement will be breakable without adding a debug break
    815     // slot.
    816     BreakableStatementChecker checker(zone());
    817     checker.Check(stmt);
    818     // Record the statement position right here if the statement is not
    819     // breakable. For breakable statements the actual recording of the
    820     // position will be postponed to the breakable code (typically an IC).
    821     bool position_recorded = CodeGenerator::RecordPositions(
    822         masm_, stmt->position(), !checker.is_breakable());
    823     // If the position recording did record a new position generate a debug
    824     // break slot to make the statement breakable.
    825     if (position_recorded) {
    826       DebugCodegen::GenerateSlot(masm_);
    827     }
    828   }
    829 }
    830 
    831 
    832 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
    833   if (!info_->is_debug()) {
    834     CodeGenerator::RecordPositions(masm_, expr->position());
    835   } else {
    836     // Check if the expression will be breakable without adding a debug break
    837     // slot.
    838     BreakableStatementChecker checker(zone());
    839     checker.Check(expr);
    840     // Record a statement position right here if the expression is not
    841     // breakable. For breakable expressions the actual recording of the
    842     // position will be postponed to the breakable code (typically an IC).
    843     // NOTE this will record a statement position for something which might
    844     // not be a statement. As stepping in the debugger will only stop at
    845     // statement positions this is used for e.g. the condition expression of
    846     // a do while loop.
    847     bool position_recorded = CodeGenerator::RecordPositions(
    848         masm_, expr->position(), !checker.is_breakable());
    849     // If the position recording did record a new position generate a debug
    850     // break slot to make the statement breakable.
    851     if (position_recorded) {
    852       DebugCodegen::GenerateSlot(masm_);
    853     }
    854   }
    855 }
    856 
    857 
    858 void FullCodeGenerator::SetStatementPosition(int pos) {
    859   CodeGenerator::RecordPositions(masm_, pos);
    860 }
    861 
    862 
    863 void FullCodeGenerator::SetSourcePosition(int pos) {
    864   if (pos != RelocInfo::kNoPosition) {
    865     masm_->positions_recorder()->RecordPosition(pos);
    866   }
    867 }
    868 
    869 
    870 // Lookup table for code generators for  special runtime calls which are
    871 // generated inline.
    872 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)          \
    873     &FullCodeGenerator::Emit##Name,
    874 
    875 const FullCodeGenerator::InlineFunctionGenerator
    876   FullCodeGenerator::kInlineFunctionGenerators[] = {
    877     INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
    878   };
    879 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
    880 
    881 
    882 FullCodeGenerator::InlineFunctionGenerator
    883   FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
    884     int lookup_index =
    885         static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
    886     ASSERT(lookup_index >= 0);
    887     ASSERT(static_cast<size_t>(lookup_index) <
    888            ARRAY_SIZE(kInlineFunctionGenerators));
    889     return kInlineFunctionGenerators[lookup_index];
    890 }
    891 
    892 
    893 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
    894   const Runtime::Function* function = expr->function();
    895   ASSERT(function != NULL);
    896   ASSERT(function->intrinsic_type == Runtime::INLINE);
    897   InlineFunctionGenerator generator =
    898       FindInlineFunctionGenerator(function->function_id);
    899   ((*this).*(generator))(expr);
    900 }
    901 
    902 
    903 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
    904   ZoneList<Expression*>* args = expr->arguments();
    905   ASSERT(args->length() == 2);
    906   EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
    907 }
    908 
    909 
    910 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
    911   ZoneList<Expression*>* args = expr->arguments();
    912   ASSERT(args->length() == 2);
    913   EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
    914 }
    915 
    916 
    917 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
    918   context()->Plug(handle(Smi::FromInt(0), isolate()));
    919 }
    920 
    921 
    922 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
    923   switch (expr->op()) {
    924     case Token::COMMA:
    925       return VisitComma(expr);
    926     case Token::OR:
    927     case Token::AND:
    928       return VisitLogicalExpression(expr);
    929     default:
    930       return VisitArithmeticExpression(expr);
    931   }
    932 }
    933 
    934 
    935 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
    936   if (context()->IsEffect()) {
    937     VisitForEffect(expr);
    938   } else if (context()->IsAccumulatorValue()) {
    939     VisitForAccumulatorValue(expr);
    940   } else if (context()->IsStackValue()) {
    941     VisitForStackValue(expr);
    942   } else if (context()->IsTest()) {
    943     const TestContext* test = TestContext::cast(context());
    944     VisitForControl(expr, test->true_label(), test->false_label(),
    945                     test->fall_through());
    946   }
    947 }
    948 
    949 
    950 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
    951   Comment cmnt(masm_, "[ Comma");
    952   VisitForEffect(expr->left());
    953   VisitInDuplicateContext(expr->right());
    954 }
    955 
    956 
    957 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
    958   bool is_logical_and = expr->op() == Token::AND;
    959   Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
    960   Expression* left = expr->left();
    961   Expression* right = expr->right();
    962   BailoutId right_id = expr->RightId();
    963   Label done;
    964 
    965   if (context()->IsTest()) {
    966     Label eval_right;
    967     const TestContext* test = TestContext::cast(context());
    968     if (is_logical_and) {
    969       VisitForControl(left, &eval_right, test->false_label(), &eval_right);
    970     } else {
    971       VisitForControl(left, test->true_label(), &eval_right, &eval_right);
    972     }
    973     PrepareForBailoutForId(right_id, NO_REGISTERS);
    974     __ bind(&eval_right);
    975 
    976   } else if (context()->IsAccumulatorValue()) {
    977     VisitForAccumulatorValue(left);
    978     // We want the value in the accumulator for the test, and on the stack in
    979     // case we need it.
    980     __ Push(result_register());
    981     Label discard, restore;
    982     if (is_logical_and) {
    983       DoTest(left, &discard, &restore, &restore);
    984     } else {
    985       DoTest(left, &restore, &discard, &restore);
    986     }
    987     __ bind(&restore);
    988     __ Pop(result_register());
    989     __ jmp(&done);
    990     __ bind(&discard);
    991     __ Drop(1);
    992     PrepareForBailoutForId(right_id, NO_REGISTERS);
    993 
    994   } else if (context()->IsStackValue()) {
    995     VisitForAccumulatorValue(left);
    996     // We want the value in the accumulator for the test, and on the stack in
    997     // case we need it.
    998     __ Push(result_register());
    999     Label discard;
   1000     if (is_logical_and) {
   1001       DoTest(left, &discard, &done, &discard);
   1002     } else {
   1003       DoTest(left, &done, &discard, &discard);
   1004     }
   1005     __ bind(&discard);
   1006     __ Drop(1);
   1007     PrepareForBailoutForId(right_id, NO_REGISTERS);
   1008 
   1009   } else {
   1010     ASSERT(context()->IsEffect());
   1011     Label eval_right;
   1012     if (is_logical_and) {
   1013       VisitForControl(left, &eval_right, &done, &eval_right);
   1014     } else {
   1015       VisitForControl(left, &done, &eval_right, &eval_right);
   1016     }
   1017     PrepareForBailoutForId(right_id, NO_REGISTERS);
   1018     __ bind(&eval_right);
   1019   }
   1020 
   1021   VisitInDuplicateContext(right);
   1022   __ bind(&done);
   1023 }
   1024 
   1025 
   1026 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
   1027   Token::Value op = expr->op();
   1028   Comment cmnt(masm_, "[ ArithmeticExpression");
   1029   Expression* left = expr->left();
   1030   Expression* right = expr->right();
   1031   OverwriteMode mode =
   1032       left->ResultOverwriteAllowed()
   1033       ? OVERWRITE_LEFT
   1034       : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
   1035 
   1036   VisitForStackValue(left);
   1037   VisitForAccumulatorValue(right);
   1038 
   1039   SetSourcePosition(expr->position());
   1040   if (ShouldInlineSmiCase(op)) {
   1041     EmitInlineSmiBinaryOp(expr, op, mode, left, right);
   1042   } else {
   1043     EmitBinaryOp(expr, op, mode);
   1044   }
   1045 }
   1046 
   1047 
   1048 void FullCodeGenerator::VisitBlock(Block* stmt) {
   1049   Comment cmnt(masm_, "[ Block");
   1050   NestedBlock nested_block(this, stmt);
   1051   SetStatementPosition(stmt);
   1052 
   1053   Scope* saved_scope = scope();
   1054   // Push a block context when entering a block with block scoped variables.
   1055   if (stmt->scope() == NULL) {
   1056     PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   1057   } else {
   1058     scope_ = stmt->scope();
   1059     ASSERT(!scope_->is_module_scope());
   1060     { Comment cmnt(masm_, "[ Extend block context");
   1061       __ Push(scope_->GetScopeInfo());
   1062       PushFunctionArgumentForContextAllocation();
   1063       __ CallRuntime(Runtime::kHiddenPushBlockContext, 2);
   1064 
   1065       // Replace the context stored in the frame.
   1066       StoreToFrameField(StandardFrameConstants::kContextOffset,
   1067                         context_register());
   1068       PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   1069     }
   1070     { Comment cmnt(masm_, "[ Declarations");
   1071       VisitDeclarations(scope_->declarations());
   1072       PrepareForBailoutForId(stmt->DeclsId(), NO_REGISTERS);
   1073     }
   1074   }
   1075 
   1076   VisitStatements(stmt->statements());
   1077   scope_ = saved_scope;
   1078   __ bind(nested_block.break_label());
   1079 
   1080   // Pop block context if necessary.
   1081   if (stmt->scope() != NULL) {
   1082     LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1083     // Update local stack frame context field.
   1084     StoreToFrameField(StandardFrameConstants::kContextOffset,
   1085                       context_register());
   1086   }
   1087   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1088 }
   1089 
   1090 
   1091 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
   1092   Comment cmnt(masm_, "[ Module context");
   1093 
   1094   __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
   1095   __ Push(Smi::FromInt(0));
   1096   __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
   1097   StoreToFrameField(
   1098       StandardFrameConstants::kContextOffset, context_register());
   1099 
   1100   Scope* saved_scope = scope_;
   1101   scope_ = stmt->body()->scope();
   1102   VisitStatements(stmt->body()->statements());
   1103   scope_ = saved_scope;
   1104   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1105   // Update local stack frame context field.
   1106   StoreToFrameField(StandardFrameConstants::kContextOffset,
   1107                     context_register());
   1108 }
   1109 
   1110 
   1111 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
   1112   Comment cmnt(masm_, "[ ExpressionStatement");
   1113   SetStatementPosition(stmt);
   1114   VisitForEffect(stmt->expression());
   1115 }
   1116 
   1117 
   1118 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
   1119   Comment cmnt(masm_, "[ EmptyStatement");
   1120   SetStatementPosition(stmt);
   1121 }
   1122 
   1123 
   1124 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
   1125   Comment cmnt(masm_, "[ IfStatement");
   1126   SetStatementPosition(stmt);
   1127   Label then_part, else_part, done;
   1128 
   1129   if (stmt->HasElseStatement()) {
   1130     VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
   1131     PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
   1132     __ bind(&then_part);
   1133     Visit(stmt->then_statement());
   1134     __ jmp(&done);
   1135 
   1136     PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
   1137     __ bind(&else_part);
   1138     Visit(stmt->else_statement());
   1139   } else {
   1140     VisitForControl(stmt->condition(), &then_part, &done, &then_part);
   1141     PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
   1142     __ bind(&then_part);
   1143     Visit(stmt->then_statement());
   1144 
   1145     PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
   1146   }
   1147   __ bind(&done);
   1148   PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
   1149 }
   1150 
   1151 
   1152 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
   1153   Comment cmnt(masm_,  "[ ContinueStatement");
   1154   SetStatementPosition(stmt);
   1155   NestedStatement* current = nesting_stack_;
   1156   int stack_depth = 0;
   1157   int context_length = 0;
   1158   // When continuing, we clobber the unpredictable value in the accumulator
   1159   // with one that's safe for GC.  If we hit an exit from the try block of
   1160   // try...finally on our way out, we will unconditionally preserve the
   1161   // accumulator on the stack.
   1162   ClearAccumulator();
   1163   while (!current->IsContinueTarget(stmt->target())) {
   1164     current = current->Exit(&stack_depth, &context_length);
   1165   }
   1166   __ Drop(stack_depth);
   1167   if (context_length > 0) {
   1168     while (context_length > 0) {
   1169       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1170       --context_length;
   1171     }
   1172     StoreToFrameField(StandardFrameConstants::kContextOffset,
   1173                       context_register());
   1174   }
   1175 
   1176   __ jmp(current->AsIteration()->continue_label());
   1177 }
   1178 
   1179 
   1180 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
   1181   Comment cmnt(masm_,  "[ BreakStatement");
   1182   SetStatementPosition(stmt);
   1183   NestedStatement* current = nesting_stack_;
   1184   int stack_depth = 0;
   1185   int context_length = 0;
   1186   // When breaking, we clobber the unpredictable value in the accumulator
   1187   // with one that's safe for GC.  If we hit an exit from the try block of
   1188   // try...finally on our way out, we will unconditionally preserve the
   1189   // accumulator on the stack.
   1190   ClearAccumulator();
   1191   while (!current->IsBreakTarget(stmt->target())) {
   1192     current = current->Exit(&stack_depth, &context_length);
   1193   }
   1194   __ Drop(stack_depth);
   1195   if (context_length > 0) {
   1196     while (context_length > 0) {
   1197       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1198       --context_length;
   1199     }
   1200     StoreToFrameField(StandardFrameConstants::kContextOffset,
   1201                       context_register());
   1202   }
   1203 
   1204   __ jmp(current->AsBreakable()->break_label());
   1205 }
   1206 
   1207 
   1208 void FullCodeGenerator::EmitUnwindBeforeReturn() {
   1209   NestedStatement* current = nesting_stack_;
   1210   int stack_depth = 0;
   1211   int context_length = 0;
   1212   while (current != NULL) {
   1213     current = current->Exit(&stack_depth, &context_length);
   1214   }
   1215   __ Drop(stack_depth);
   1216 }
   1217 
   1218 
   1219 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
   1220   Comment cmnt(masm_, "[ ReturnStatement");
   1221   SetStatementPosition(stmt);
   1222   Expression* expr = stmt->expression();
   1223   VisitForAccumulatorValue(expr);
   1224   EmitUnwindBeforeReturn();
   1225   EmitReturnSequence();
   1226 }
   1227 
   1228 
   1229 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
   1230   Comment cmnt(masm_, "[ WithStatement");
   1231   SetStatementPosition(stmt);
   1232 
   1233   VisitForStackValue(stmt->expression());
   1234   PushFunctionArgumentForContextAllocation();
   1235   __ CallRuntime(Runtime::kHiddenPushWithContext, 2);
   1236   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
   1237 
   1238   Scope* saved_scope = scope();
   1239   scope_ = stmt->scope();
   1240   { WithOrCatch body(this);
   1241     Visit(stmt->statement());
   1242   }
   1243   scope_ = saved_scope;
   1244 
   1245   // Pop context.
   1246   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1247   // Update local stack frame context field.
   1248   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
   1249 }
   1250 
   1251 
   1252 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
   1253   Comment cmnt(masm_, "[ DoWhileStatement");
   1254   SetStatementPosition(stmt);
   1255   Label body, book_keeping;
   1256 
   1257   Iteration loop_statement(this, stmt);
   1258   increment_loop_depth();
   1259 
   1260   __ bind(&body);
   1261   Visit(stmt->body());
   1262 
   1263   // Record the position of the do while condition and make sure it is
   1264   // possible to break on the condition.
   1265   __ bind(loop_statement.continue_label());
   1266   PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
   1267   SetExpressionPosition(stmt->cond());
   1268   VisitForControl(stmt->cond(),
   1269                   &book_keeping,
   1270                   loop_statement.break_label(),
   1271                   &book_keeping);
   1272 
   1273   // Check stack before looping.
   1274   PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
   1275   __ bind(&book_keeping);
   1276   EmitBackEdgeBookkeeping(stmt, &body);
   1277   __ jmp(&body);
   1278 
   1279   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1280   __ bind(loop_statement.break_label());
   1281   decrement_loop_depth();
   1282 }
   1283 
   1284 
   1285 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
   1286   Comment cmnt(masm_, "[ WhileStatement");
   1287   Label test, body;
   1288 
   1289   Iteration loop_statement(this, stmt);
   1290   increment_loop_depth();
   1291 
   1292   // Emit the test at the bottom of the loop.
   1293   __ jmp(&test);
   1294 
   1295   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1296   __ bind(&body);
   1297   Visit(stmt->body());
   1298 
   1299   // Emit the statement position here as this is where the while
   1300   // statement code starts.
   1301   __ bind(loop_statement.continue_label());
   1302   SetStatementPosition(stmt);
   1303 
   1304   // Check stack before looping.
   1305   EmitBackEdgeBookkeeping(stmt, &body);
   1306 
   1307   __ bind(&test);
   1308   VisitForControl(stmt->cond(),
   1309                   &body,
   1310                   loop_statement.break_label(),
   1311                   loop_statement.break_label());
   1312 
   1313   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1314   __ bind(loop_statement.break_label());
   1315   decrement_loop_depth();
   1316 }
   1317 
   1318 
   1319 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
   1320   Comment cmnt(masm_, "[ ForStatement");
   1321   Label test, body;
   1322 
   1323   Iteration loop_statement(this, stmt);
   1324 
   1325   // Set statement position for a break slot before entering the for-body.
   1326   SetStatementPosition(stmt);
   1327 
   1328   if (stmt->init() != NULL) {
   1329     Visit(stmt->init());
   1330   }
   1331 
   1332   increment_loop_depth();
   1333   // Emit the test at the bottom of the loop (even if empty).
   1334   __ jmp(&test);
   1335 
   1336   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
   1337   __ bind(&body);
   1338   Visit(stmt->body());
   1339 
   1340   PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
   1341   __ bind(loop_statement.continue_label());
   1342   if (stmt->next() != NULL) {
   1343     Visit(stmt->next());
   1344   }
   1345 
   1346   // Emit the statement position here as this is where the for
   1347   // statement code starts.
   1348   SetStatementPosition(stmt);
   1349 
   1350   // Check stack before looping.
   1351   EmitBackEdgeBookkeeping(stmt, &body);
   1352 
   1353   __ bind(&test);
   1354   if (stmt->cond() != NULL) {
   1355     VisitForControl(stmt->cond(),
   1356                     &body,
   1357                     loop_statement.break_label(),
   1358                     loop_statement.break_label());
   1359   } else {
   1360     __ jmp(&body);
   1361   }
   1362 
   1363   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
   1364   __ bind(loop_statement.break_label());
   1365   decrement_loop_depth();
   1366 }
   1367 
   1368 
   1369 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
   1370   Comment cmnt(masm_, "[ TryCatchStatement");
   1371   SetStatementPosition(stmt);
   1372   // The try block adds a handler to the exception handler chain before
   1373   // entering, and removes it again when exiting normally.  If an exception
   1374   // is thrown during execution of the try block, the handler is consumed
   1375   // and control is passed to the catch block with the exception in the
   1376   // result register.
   1377 
   1378   Label try_entry, handler_entry, exit;
   1379   __ jmp(&try_entry);
   1380   __ bind(&handler_entry);
   1381   handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
   1382   // Exception handler code, the exception is in the result register.
   1383   // Extend the context before executing the catch block.
   1384   { Comment cmnt(masm_, "[ Extend catch context");
   1385     __ Push(stmt->variable()->name());
   1386     __ Push(result_register());
   1387     PushFunctionArgumentForContextAllocation();
   1388     __ CallRuntime(Runtime::kHiddenPushCatchContext, 3);
   1389     StoreToFrameField(StandardFrameConstants::kContextOffset,
   1390                       context_register());
   1391   }
   1392 
   1393   Scope* saved_scope = scope();
   1394   scope_ = stmt->scope();
   1395   ASSERT(scope_->declarations()->is_empty());
   1396   { WithOrCatch catch_body(this);
   1397     Visit(stmt->catch_block());
   1398   }
   1399   // Restore the context.
   1400   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
   1401   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
   1402   scope_ = saved_scope;
   1403   __ jmp(&exit);
   1404 
   1405   // Try block code. Sets up the exception handler chain.
   1406   __ bind(&try_entry);
   1407   __ PushTryHandler(StackHandler::CATCH, stmt->index());
   1408   { TryCatch try_body(this);
   1409     Visit(stmt->try_block());
   1410   }
   1411   __ PopTryHandler();
   1412   __ bind(&exit);
   1413 }
   1414 
   1415 
   1416 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
   1417   Comment cmnt(masm_, "[ TryFinallyStatement");
   1418   SetStatementPosition(stmt);
   1419   // Try finally is compiled by setting up a try-handler on the stack while
   1420   // executing the try body, and removing it again afterwards.
   1421   //
   1422   // The try-finally construct can enter the finally block in three ways:
   1423   // 1. By exiting the try-block normally. This removes the try-handler and
   1424   //    calls the finally block code before continuing.
   1425   // 2. By exiting the try-block with a function-local control flow transfer
   1426   //    (break/continue/return). The site of the, e.g., break removes the
   1427   //    try handler and calls the finally block code before continuing
   1428   //    its outward control transfer.
   1429   // 3. By exiting the try-block with a thrown exception.
   1430   //    This can happen in nested function calls. It traverses the try-handler
   1431   //    chain and consumes the try-handler entry before jumping to the
   1432   //    handler code. The handler code then calls the finally-block before
   1433   //    rethrowing the exception.
   1434   //
   1435   // The finally block must assume a return address on top of the stack
   1436   // (or in the link register on ARM chips) and a value (return value or
   1437   // exception) in the result register (rax/eax/r0), both of which must
   1438   // be preserved. The return address isn't GC-safe, so it should be
   1439   // cooked before GC.
   1440   Label try_entry, handler_entry, finally_entry;
   1441 
   1442   // Jump to try-handler setup and try-block code.
   1443   __ jmp(&try_entry);
   1444   __ bind(&handler_entry);
   1445   handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
   1446   // Exception handler code.  This code is only executed when an exception
   1447   // is thrown.  The exception is in the result register, and must be
   1448   // preserved by the finally block.  Call the finally block and then
   1449   // rethrow the exception if it returns.
   1450   __ Call(&finally_entry);
   1451   __ Push(result_register());
   1452   __ CallRuntime(Runtime::kHiddenReThrow, 1);
   1453 
   1454   // Finally block implementation.
   1455   __ bind(&finally_entry);
   1456   EnterFinallyBlock();
   1457   { Finally finally_body(this);
   1458     Visit(stmt->finally_block());
   1459   }
   1460   ExitFinallyBlock();  // Return to the calling code.
   1461 
   1462   // Set up try handler.
   1463   __ bind(&try_entry);
   1464   __ PushTryHandler(StackHandler::FINALLY, stmt->index());
   1465   { TryFinally try_body(this, &finally_entry);
   1466     Visit(stmt->try_block());
   1467   }
   1468   __ PopTryHandler();
   1469   // Execute the finally block on the way out.  Clobber the unpredictable
   1470   // value in the result register with one that's safe for GC because the
   1471   // finally block will unconditionally preserve the result register on the
   1472   // stack.
   1473   ClearAccumulator();
   1474   __ Call(&finally_entry);
   1475 }
   1476 
   1477 
   1478 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
   1479   Comment cmnt(masm_, "[ DebuggerStatement");
   1480   SetStatementPosition(stmt);
   1481 
   1482   __ DebugBreak();
   1483   // Ignore the return value.
   1484 }
   1485 
   1486 
   1487 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
   1488   UNREACHABLE();
   1489 }
   1490 
   1491 
   1492 void FullCodeGenerator::VisitConditional(Conditional* expr) {
   1493   Comment cmnt(masm_, "[ Conditional");
   1494   Label true_case, false_case, done;
   1495   VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
   1496 
   1497   PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
   1498   __ bind(&true_case);
   1499   SetExpressionPosition(expr->then_expression());
   1500   if (context()->IsTest()) {
   1501     const TestContext* for_test = TestContext::cast(context());
   1502     VisitForControl(expr->then_expression(),
   1503                     for_test->true_label(),
   1504                     for_test->false_label(),
   1505                     NULL);
   1506   } else {
   1507     VisitInDuplicateContext(expr->then_expression());
   1508     __ jmp(&done);
   1509   }
   1510 
   1511   PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
   1512   __ bind(&false_case);
   1513   SetExpressionPosition(expr->else_expression());
   1514   VisitInDuplicateContext(expr->else_expression());
   1515   // If control flow falls through Visit, merge it with true case here.
   1516   if (!context()->IsTest()) {
   1517     __ bind(&done);
   1518   }
   1519 }
   1520 
   1521 
   1522 void FullCodeGenerator::VisitLiteral(Literal* expr) {
   1523   Comment cmnt(masm_, "[ Literal");
   1524   context()->Plug(expr->value());
   1525 }
   1526 
   1527 
   1528 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
   1529   Comment cmnt(masm_, "[ FunctionLiteral");
   1530 
   1531   // Build the function boilerplate and instantiate it.
   1532   Handle<SharedFunctionInfo> function_info =
   1533       Compiler::BuildFunctionInfo(expr, script());
   1534   if (function_info.is_null()) {
   1535     SetStackOverflow();
   1536     return;
   1537   }
   1538   EmitNewClosure(function_info, expr->pretenure());
   1539 }
   1540 
   1541 
   1542 void FullCodeGenerator::VisitNativeFunctionLiteral(
   1543     NativeFunctionLiteral* expr) {
   1544   Comment cmnt(masm_, "[ NativeFunctionLiteral");
   1545 
   1546   // Compute the function template for the native function.
   1547   Handle<String> name = expr->name();
   1548   v8::Handle<v8::FunctionTemplate> fun_template =
   1549       expr->extension()->GetNativeFunctionTemplate(
   1550           reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
   1551   ASSERT(!fun_template.IsEmpty());
   1552 
   1553   // Instantiate the function and create a shared function info from it.
   1554   Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
   1555   const int literals = fun->NumberOfLiterals();
   1556   Handle<Code> code = Handle<Code>(fun->shared()->code());
   1557   Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
   1558   bool is_generator = false;
   1559   Handle<SharedFunctionInfo> shared =
   1560       isolate()->factory()->NewSharedFunctionInfo(
   1561           name, literals, is_generator,
   1562           code, Handle<ScopeInfo>(fun->shared()->scope_info()),
   1563           Handle<FixedArray>(fun->shared()->feedback_vector()));
   1564   shared->set_construct_stub(*construct_stub);
   1565 
   1566   // Copy the function data to the shared function info.
   1567   shared->set_function_data(fun->shared()->function_data());
   1568   int parameters = fun->shared()->formal_parameter_count();
   1569   shared->set_formal_parameter_count(parameters);
   1570 
   1571   EmitNewClosure(shared, false);
   1572 }
   1573 
   1574 
   1575 void FullCodeGenerator::VisitThrow(Throw* expr) {
   1576   Comment cmnt(masm_, "[ Throw");
   1577   VisitForStackValue(expr->exception());
   1578   __ CallRuntime(Runtime::kHiddenThrow, 1);
   1579   // Never returns here.
   1580 }
   1581 
   1582 
   1583 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
   1584     int* stack_depth,
   1585     int* context_length) {
   1586   // The macros used here must preserve the result register.
   1587   __ Drop(*stack_depth);
   1588   __ PopTryHandler();
   1589   *stack_depth = 0;
   1590   return previous_;
   1591 }
   1592 
   1593 
   1594 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
   1595   Expression* sub_expr;
   1596   Handle<String> check;
   1597   if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
   1598     EmitLiteralCompareTypeof(expr, sub_expr, check);
   1599     return true;
   1600   }
   1601 
   1602   if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
   1603     EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
   1604     return true;
   1605   }
   1606 
   1607   if (expr->IsLiteralCompareNull(&sub_expr)) {
   1608     EmitLiteralCompareNil(expr, sub_expr, kNullValue);
   1609     return true;
   1610   }
   1611 
   1612   return false;
   1613 }
   1614 
   1615 
   1616 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
   1617   DisallowHeapAllocation no_gc;
   1618   Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
   1619 
   1620   // Iterate over the back edge table and patch every interrupt
   1621   // call to an unconditional call to the replacement code.
   1622   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
   1623 
   1624   BackEdgeTable back_edges(unoptimized, &no_gc);
   1625   for (uint32_t i = 0; i < back_edges.length(); i++) {
   1626     if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
   1627       ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
   1628                                             unoptimized,
   1629                                             back_edges.pc(i)));
   1630       PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
   1631     }
   1632   }
   1633 
   1634   unoptimized->set_back_edges_patched_for_osr(true);
   1635   ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
   1636 }
   1637 
   1638 
   1639 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
   1640   DisallowHeapAllocation no_gc;
   1641   Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
   1642 
   1643   // Iterate over the back edge table and revert the patched interrupt calls.
   1644   ASSERT(unoptimized->back_edges_patched_for_osr());
   1645   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
   1646 
   1647   BackEdgeTable back_edges(unoptimized, &no_gc);
   1648   for (uint32_t i = 0; i < back_edges.length(); i++) {
   1649     if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
   1650       ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
   1651                                             unoptimized,
   1652                                             back_edges.pc(i)));
   1653       PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
   1654     }
   1655   }
   1656 
   1657   unoptimized->set_back_edges_patched_for_osr(false);
   1658   unoptimized->set_allow_osr_at_loop_nesting_level(0);
   1659   // Assert that none of the back edges are patched anymore.
   1660   ASSERT(Verify(isolate, unoptimized, -1));
   1661 }
   1662 
   1663 
   1664 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
   1665   DisallowHeapAllocation no_gc;
   1666   Isolate* isolate = code->GetIsolate();
   1667   Address pc = code->instruction_start() + pc_offset;
   1668   Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
   1669   PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
   1670 }
   1671 
   1672 
   1673 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
   1674   DisallowHeapAllocation no_gc;
   1675   Isolate* isolate = code->GetIsolate();
   1676   Address pc = code->instruction_start() + pc_offset;
   1677 
   1678   if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
   1679     Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
   1680     PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
   1681   }
   1682 }
   1683 
   1684 
   1685 #ifdef DEBUG
   1686 bool BackEdgeTable::Verify(Isolate* isolate,
   1687                            Code* unoptimized,
   1688                            int loop_nesting_level) {
   1689   DisallowHeapAllocation no_gc;
   1690   BackEdgeTable back_edges(unoptimized, &no_gc);
   1691   for (uint32_t i = 0; i < back_edges.length(); i++) {
   1692     uint32_t loop_depth = back_edges.loop_depth(i);
   1693     CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
   1694     // Assert that all back edges for shallower loops (and only those)
   1695     // have already been patched.
   1696     CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
   1697              GetBackEdgeState(isolate,
   1698                               unoptimized,
   1699                               back_edges.pc(i)) != INTERRUPT);
   1700   }
   1701   return true;
   1702 }
   1703 #endif  // DEBUG
   1704 
   1705 
   1706 #undef __
   1707 
   1708 
   1709 } }  // namespace v8::internal
   1710