Home | History | Annotate | Download | only in compiler
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/pipeline.h"
      6 
      7 #include <fstream>  // NOLINT(readability/streams)
      8 #include <sstream>
      9 
     10 #include "src/base/adapters.h"
     11 #include "src/base/platform/elapsed-timer.h"
     12 #include "src/compiler/ast-graph-builder.h"
     13 #include "src/compiler/ast-loop-assignment-analyzer.h"
     14 #include "src/compiler/basic-block-instrumentor.h"
     15 #include "src/compiler/branch-elimination.h"
     16 #include "src/compiler/bytecode-graph-builder.h"
     17 #include "src/compiler/checkpoint-elimination.h"
     18 #include "src/compiler/code-generator.h"
     19 #include "src/compiler/common-operator-reducer.h"
     20 #include "src/compiler/control-flow-optimizer.h"
     21 #include "src/compiler/dead-code-elimination.h"
     22 #include "src/compiler/effect-control-linearizer.h"
     23 #include "src/compiler/escape-analysis-reducer.h"
     24 #include "src/compiler/escape-analysis.h"
     25 #include "src/compiler/frame-elider.h"
     26 #include "src/compiler/graph-replay.h"
     27 #include "src/compiler/graph-trimmer.h"
     28 #include "src/compiler/graph-visualizer.h"
     29 #include "src/compiler/instruction-selector.h"
     30 #include "src/compiler/instruction.h"
     31 #include "src/compiler/js-builtin-reducer.h"
     32 #include "src/compiler/js-call-reducer.h"
     33 #include "src/compiler/js-context-specialization.h"
     34 #include "src/compiler/js-create-lowering.h"
     35 #include "src/compiler/js-frame-specialization.h"
     36 #include "src/compiler/js-generic-lowering.h"
     37 #include "src/compiler/js-global-object-specialization.h"
     38 #include "src/compiler/js-inlining-heuristic.h"
     39 #include "src/compiler/js-intrinsic-lowering.h"
     40 #include "src/compiler/js-native-context-specialization.h"
     41 #include "src/compiler/js-typed-lowering.h"
     42 #include "src/compiler/jump-threading.h"
     43 #include "src/compiler/live-range-separator.h"
     44 #include "src/compiler/load-elimination.h"
     45 #include "src/compiler/loop-analysis.h"
     46 #include "src/compiler/loop-peeling.h"
     47 #include "src/compiler/machine-operator-reducer.h"
     48 #include "src/compiler/memory-optimizer.h"
     49 #include "src/compiler/move-optimizer.h"
     50 #include "src/compiler/osr.h"
     51 #include "src/compiler/pipeline-statistics.h"
     52 #include "src/compiler/redundancy-elimination.h"
     53 #include "src/compiler/register-allocator-verifier.h"
     54 #include "src/compiler/register-allocator.h"
     55 #include "src/compiler/schedule.h"
     56 #include "src/compiler/scheduler.h"
     57 #include "src/compiler/select-lowering.h"
     58 #include "src/compiler/simplified-lowering.h"
     59 #include "src/compiler/simplified-operator-reducer.h"
     60 #include "src/compiler/simplified-operator.h"
     61 #include "src/compiler/store-store-elimination.h"
     62 #include "src/compiler/tail-call-optimization.h"
     63 #include "src/compiler/type-hint-analyzer.h"
     64 #include "src/compiler/typer.h"
     65 #include "src/compiler/value-numbering-reducer.h"
     66 #include "src/compiler/verifier.h"
     67 #include "src/compiler/zone-pool.h"
     68 #include "src/isolate-inl.h"
     69 #include "src/ostreams.h"
     70 #include "src/parsing/parser.h"
     71 #include "src/register-configuration.h"
     72 #include "src/type-info.h"
     73 #include "src/utils.h"
     74 
     75 namespace v8 {
     76 namespace internal {
     77 namespace compiler {
     78 
     79 class PipelineData {
     80  public:
     81   // For main entry point.
     82   PipelineData(ZonePool* zone_pool, CompilationInfo* info,
     83                PipelineStatistics* pipeline_statistics)
     84       : isolate_(info->isolate()),
     85         info_(info),
     86         debug_name_(info_->GetDebugName()),
     87         outer_zone_(info_->zone()),
     88         zone_pool_(zone_pool),
     89         pipeline_statistics_(pipeline_statistics),
     90         graph_zone_scope_(zone_pool_),
     91         graph_zone_(graph_zone_scope_.zone()),
     92         instruction_zone_scope_(zone_pool_),
     93         instruction_zone_(instruction_zone_scope_.zone()),
     94         register_allocation_zone_scope_(zone_pool_),
     95         register_allocation_zone_(register_allocation_zone_scope_.zone()) {
     96     PhaseScope scope(pipeline_statistics, "init pipeline data");
     97     graph_ = new (graph_zone_) Graph(graph_zone_);
     98     source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
     99     simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
    100     machine_ = new (graph_zone_) MachineOperatorBuilder(
    101         graph_zone_, MachineType::PointerRepresentation(),
    102         InstructionSelector::SupportedMachineOperatorFlags());
    103     common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
    104     javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
    105     jsgraph_ = new (graph_zone_)
    106         JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
    107   }
    108 
    109   // For WASM compile entry point.
    110   PipelineData(ZonePool* zone_pool, CompilationInfo* info, Graph* graph,
    111                SourcePositionTable* source_positions)
    112       : isolate_(info->isolate()),
    113         info_(info),
    114         debug_name_(info_->GetDebugName()),
    115         zone_pool_(zone_pool),
    116         graph_zone_scope_(zone_pool_),
    117         graph_(graph),
    118         source_positions_(source_positions),
    119         instruction_zone_scope_(zone_pool_),
    120         instruction_zone_(instruction_zone_scope_.zone()),
    121         register_allocation_zone_scope_(zone_pool_),
    122         register_allocation_zone_(register_allocation_zone_scope_.zone()) {}
    123 
    124   // For machine graph testing entry point.
    125   PipelineData(ZonePool* zone_pool, CompilationInfo* info, Graph* graph,
    126                Schedule* schedule)
    127       : isolate_(info->isolate()),
    128         info_(info),
    129         debug_name_(info_->GetDebugName()),
    130         zone_pool_(zone_pool),
    131         graph_zone_scope_(zone_pool_),
    132         graph_(graph),
    133         source_positions_(new (info->zone()) SourcePositionTable(graph_)),
    134         schedule_(schedule),
    135         instruction_zone_scope_(zone_pool_),
    136         instruction_zone_(instruction_zone_scope_.zone()),
    137         register_allocation_zone_scope_(zone_pool_),
    138         register_allocation_zone_(register_allocation_zone_scope_.zone()) {}
    139 
    140   // For register allocation testing entry point.
    141   PipelineData(ZonePool* zone_pool, CompilationInfo* info,
    142                InstructionSequence* sequence)
    143       : isolate_(info->isolate()),
    144         info_(info),
    145         debug_name_(info_->GetDebugName()),
    146         zone_pool_(zone_pool),
    147         graph_zone_scope_(zone_pool_),
    148         instruction_zone_scope_(zone_pool_),
    149         instruction_zone_(sequence->zone()),
    150         sequence_(sequence),
    151         register_allocation_zone_scope_(zone_pool_),
    152         register_allocation_zone_(register_allocation_zone_scope_.zone()) {}
    153 
    154   ~PipelineData() {
    155     DeleteRegisterAllocationZone();
    156     DeleteInstructionZone();
    157     DeleteGraphZone();
    158   }
    159 
    160   Isolate* isolate() const { return isolate_; }
    161   CompilationInfo* info() const { return info_; }
    162   ZonePool* zone_pool() const { return zone_pool_; }
    163   PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
    164   bool compilation_failed() const { return compilation_failed_; }
    165   void set_compilation_failed() { compilation_failed_ = true; }
    166   Handle<Code> code() { return code_; }
    167   void set_code(Handle<Code> code) {
    168     DCHECK(code_.is_null());
    169     code_ = code;
    170   }
    171 
    172   // RawMachineAssembler generally produces graphs which cannot be verified.
    173   bool MayHaveUnverifiableGraph() const { return outer_zone_ == nullptr; }
    174 
    175   Zone* graph_zone() const { return graph_zone_; }
    176   Graph* graph() const { return graph_; }
    177   SourcePositionTable* source_positions() const { return source_positions_; }
    178   MachineOperatorBuilder* machine() const { return machine_; }
    179   CommonOperatorBuilder* common() const { return common_; }
    180   JSOperatorBuilder* javascript() const { return javascript_; }
    181   JSGraph* jsgraph() const { return jsgraph_; }
    182   MaybeHandle<Context> native_context() const {
    183     if (info()->is_native_context_specializing()) {
    184       return handle(info()->native_context(), isolate());
    185     }
    186     return MaybeHandle<Context>();
    187   }
    188 
    189   LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
    190   void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
    191     DCHECK(!loop_assignment_);
    192     loop_assignment_ = loop_assignment;
    193   }
    194 
    195   TypeHintAnalysis* type_hint_analysis() const { return type_hint_analysis_; }
    196   void set_type_hint_analysis(TypeHintAnalysis* type_hint_analysis) {
    197     DCHECK_NULL(type_hint_analysis_);
    198     type_hint_analysis_ = type_hint_analysis;
    199   }
    200 
    201   Schedule* schedule() const { return schedule_; }
    202   void set_schedule(Schedule* schedule) {
    203     DCHECK(!schedule_);
    204     schedule_ = schedule;
    205   }
    206   void reset_schedule() { schedule_ = nullptr; }
    207 
    208   Zone* instruction_zone() const { return instruction_zone_; }
    209   InstructionSequence* sequence() const { return sequence_; }
    210   Frame* frame() const { return frame_; }
    211 
    212   Zone* register_allocation_zone() const { return register_allocation_zone_; }
    213   RegisterAllocationData* register_allocation_data() const {
    214     return register_allocation_data_;
    215   }
    216 
    217   BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
    218   void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
    219     profiler_data_ = profiler_data;
    220   }
    221 
    222   std::string const& source_position_output() const {
    223     return source_position_output_;
    224   }
    225   void set_source_position_output(std::string const& source_position_output) {
    226     source_position_output_ = source_position_output;
    227   }
    228 
    229   void DeleteGraphZone() {
    230     if (graph_zone_ == nullptr) return;
    231     graph_zone_scope_.Destroy();
    232     graph_zone_ = nullptr;
    233     graph_ = nullptr;
    234     source_positions_ = nullptr;
    235     loop_assignment_ = nullptr;
    236     type_hint_analysis_ = nullptr;
    237     simplified_ = nullptr;
    238     machine_ = nullptr;
    239     common_ = nullptr;
    240     javascript_ = nullptr;
    241     jsgraph_ = nullptr;
    242     schedule_ = nullptr;
    243   }
    244 
    245   void DeleteInstructionZone() {
    246     if (instruction_zone_ == nullptr) return;
    247     instruction_zone_scope_.Destroy();
    248     instruction_zone_ = nullptr;
    249     sequence_ = nullptr;
    250     frame_ = nullptr;
    251   }
    252 
    253   void DeleteRegisterAllocationZone() {
    254     if (register_allocation_zone_ == nullptr) return;
    255     register_allocation_zone_scope_.Destroy();
    256     register_allocation_zone_ = nullptr;
    257     register_allocation_data_ = nullptr;
    258   }
    259 
    260   void InitializeInstructionSequence(const CallDescriptor* descriptor) {
    261     DCHECK(sequence_ == nullptr);
    262     InstructionBlocks* instruction_blocks =
    263         InstructionSequence::InstructionBlocksFor(instruction_zone(),
    264                                                   schedule());
    265     sequence_ = new (instruction_zone()) InstructionSequence(
    266         info()->isolate(), instruction_zone(), instruction_blocks);
    267     if (descriptor && descriptor->RequiresFrameAsIncoming()) {
    268       sequence_->instruction_blocks()[0]->mark_needs_frame();
    269     } else {
    270       DCHECK_EQ(0, descriptor->CalleeSavedFPRegisters());
    271       DCHECK_EQ(0, descriptor->CalleeSavedRegisters());
    272     }
    273   }
    274 
    275   void InitializeFrameData(CallDescriptor* descriptor) {
    276     DCHECK(frame_ == nullptr);
    277     int fixed_frame_size = 0;
    278     if (descriptor != nullptr) {
    279       fixed_frame_size = CalculateFixedFrameSize(descriptor);
    280     }
    281     frame_ = new (instruction_zone()) Frame(fixed_frame_size);
    282   }
    283 
    284   void InitializeRegisterAllocationData(const RegisterConfiguration* config,
    285                                         CallDescriptor* descriptor) {
    286     DCHECK(register_allocation_data_ == nullptr);
    287     register_allocation_data_ = new (register_allocation_zone())
    288         RegisterAllocationData(config, register_allocation_zone(), frame(),
    289                                sequence(), debug_name_.get());
    290   }
    291 
    292   void BeginPhaseKind(const char* phase_kind_name) {
    293     if (pipeline_statistics() != nullptr) {
    294       pipeline_statistics()->BeginPhaseKind(phase_kind_name);
    295     }
    296   }
    297 
    298   void EndPhaseKind() {
    299     if (pipeline_statistics() != nullptr) {
    300       pipeline_statistics()->EndPhaseKind();
    301     }
    302   }
    303 
    304  private:
    305   Isolate* const isolate_;
    306   CompilationInfo* const info_;
    307   base::SmartArrayPointer<char> debug_name_;
    308   Zone* outer_zone_ = nullptr;
    309   ZonePool* const zone_pool_;
    310   PipelineStatistics* pipeline_statistics_ = nullptr;
    311   bool compilation_failed_ = false;
    312   Handle<Code> code_ = Handle<Code>::null();
    313 
    314   // All objects in the following group of fields are allocated in graph_zone_.
    315   // They are all set to nullptr when the graph_zone_ is destroyed.
    316   ZonePool::Scope graph_zone_scope_;
    317   Zone* graph_zone_ = nullptr;
    318   Graph* graph_ = nullptr;
    319   SourcePositionTable* source_positions_ = nullptr;
    320   LoopAssignmentAnalysis* loop_assignment_ = nullptr;
    321   TypeHintAnalysis* type_hint_analysis_ = nullptr;
    322   SimplifiedOperatorBuilder* simplified_ = nullptr;
    323   MachineOperatorBuilder* machine_ = nullptr;
    324   CommonOperatorBuilder* common_ = nullptr;
    325   JSOperatorBuilder* javascript_ = nullptr;
    326   JSGraph* jsgraph_ = nullptr;
    327   Schedule* schedule_ = nullptr;
    328 
    329   // All objects in the following group of fields are allocated in
    330   // instruction_zone_.  They are all set to nullptr when the instruction_zone_
    331   // is
    332   // destroyed.
    333   ZonePool::Scope instruction_zone_scope_;
    334   Zone* instruction_zone_;
    335   InstructionSequence* sequence_ = nullptr;
    336   Frame* frame_ = nullptr;
    337 
    338   // All objects in the following group of fields are allocated in
    339   // register_allocation_zone_.  They are all set to nullptr when the zone is
    340   // destroyed.
    341   ZonePool::Scope register_allocation_zone_scope_;
    342   Zone* register_allocation_zone_;
    343   RegisterAllocationData* register_allocation_data_ = nullptr;
    344 
    345   // Basic block profiling support.
    346   BasicBlockProfiler::Data* profiler_data_ = nullptr;
    347 
    348   // Source position output for --trace-turbo.
    349   std::string source_position_output_;
    350 
    351   int CalculateFixedFrameSize(CallDescriptor* descriptor) {
    352     if (descriptor->IsJSFunctionCall()) {
    353       return StandardFrameConstants::kFixedSlotCount;
    354     }
    355     return descriptor->IsCFunctionCall()
    356                ? (CommonFrameConstants::kFixedSlotCountAboveFp +
    357                   CommonFrameConstants::kCPSlotCount)
    358                : TypedFrameConstants::kFixedSlotCount;
    359   }
    360 
    361   DISALLOW_COPY_AND_ASSIGN(PipelineData);
    362 };
    363 
    364 class PipelineImpl final {
    365  public:
    366   explicit PipelineImpl(PipelineData* data) : data_(data) {}
    367 
    368   // Helpers for executing pipeline phases.
    369   template <typename Phase>
    370   void Run();
    371   template <typename Phase, typename Arg0>
    372   void Run(Arg0 arg_0);
    373   template <typename Phase, typename Arg0, typename Arg1>
    374   void Run(Arg0 arg_0, Arg1 arg_1);
    375 
    376   // Run the graph creation and initial optimization passes.
    377   bool CreateGraph();
    378 
    379   // Run the concurrent optimization passes.
    380   bool OptimizeGraph(Linkage* linkage);
    381 
    382   // Perform the actual code generation and return handle to a code object.
    383   Handle<Code> GenerateCode(Linkage* linkage);
    384 
    385   bool ScheduleAndSelectInstructions(Linkage* linkage);
    386   void RunPrintAndVerify(const char* phase, bool untyped = false);
    387   Handle<Code> ScheduleAndGenerateCode(CallDescriptor* call_descriptor);
    388   void AllocateRegisters(const RegisterConfiguration* config,
    389                          CallDescriptor* descriptor, bool run_verifier);
    390 
    391   CompilationInfo* info() const;
    392   Isolate* isolate() const;
    393 
    394   PipelineData* const data_;
    395 };
    396 
    397 namespace {
    398 
    399 struct TurboCfgFile : public std::ofstream {
    400   explicit TurboCfgFile(Isolate* isolate)
    401       : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
    402                       std::ios_base::app) {}
    403 };
    404 
    405 struct TurboJsonFile : public std::ofstream {
    406   TurboJsonFile(CompilationInfo* info, std::ios_base::openmode mode)
    407       : std::ofstream(GetVisualizerLogFileName(info, nullptr, "json").get(),
    408                       mode) {}
    409 };
    410 
    411 void TraceSchedule(CompilationInfo* info, Schedule* schedule) {
    412   if (FLAG_trace_turbo) {
    413     AllowHandleDereference allow_deref;
    414     TurboJsonFile json_of(info, std::ios_base::app);
    415     json_of << "{\"name\":\"Schedule\",\"type\":\"schedule\",\"data\":\"";
    416     std::stringstream schedule_stream;
    417     schedule_stream << *schedule;
    418     std::string schedule_string(schedule_stream.str());
    419     for (const auto& c : schedule_string) {
    420       json_of << AsEscapedUC16ForJSON(c);
    421     }
    422     json_of << "\"},\n";
    423   }
    424   if (FLAG_trace_turbo_graph || FLAG_trace_turbo_scheduler) {
    425     AllowHandleDereference allow_deref;
    426     OFStream os(stdout);
    427     os << "-- Schedule --------------------------------------\n" << *schedule;
    428   }
    429 }
    430 
    431 
    432 class AstGraphBuilderWithPositions final : public AstGraphBuilder {
    433  public:
    434   AstGraphBuilderWithPositions(Zone* local_zone, CompilationInfo* info,
    435                                JSGraph* jsgraph,
    436                                LoopAssignmentAnalysis* loop_assignment,
    437                                TypeHintAnalysis* type_hint_analysis,
    438                                SourcePositionTable* source_positions)
    439       : AstGraphBuilder(local_zone, info, jsgraph, loop_assignment,
    440                         type_hint_analysis),
    441         source_positions_(source_positions),
    442         start_position_(info->shared_info()->start_position()) {}
    443 
    444   bool CreateGraph(bool stack_check) {
    445     SourcePositionTable::Scope pos_scope(source_positions_, start_position_);
    446     return AstGraphBuilder::CreateGraph(stack_check);
    447   }
    448 
    449 #define DEF_VISIT(type)                                               \
    450   void Visit##type(type* node) override {                             \
    451     SourcePositionTable::Scope pos(source_positions_,                 \
    452                                    SourcePosition(node->position())); \
    453     AstGraphBuilder::Visit##type(node);                               \
    454   }
    455   AST_NODE_LIST(DEF_VISIT)
    456 #undef DEF_VISIT
    457 
    458  private:
    459   SourcePositionTable* const source_positions_;
    460   SourcePosition const start_position_;
    461 };
    462 
    463 
    464 class SourcePositionWrapper final : public Reducer {
    465  public:
    466   SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
    467       : reducer_(reducer), table_(table) {}
    468   ~SourcePositionWrapper() final {}
    469 
    470   Reduction Reduce(Node* node) final {
    471     SourcePosition const pos = table_->GetSourcePosition(node);
    472     SourcePositionTable::Scope position(table_, pos);
    473     return reducer_->Reduce(node);
    474   }
    475 
    476   void Finalize() final { reducer_->Finalize(); }
    477 
    478  private:
    479   Reducer* const reducer_;
    480   SourcePositionTable* const table_;
    481 
    482   DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
    483 };
    484 
    485 
    486 class JSGraphReducer final : public GraphReducer {
    487  public:
    488   JSGraphReducer(JSGraph* jsgraph, Zone* zone)
    489       : GraphReducer(zone, jsgraph->graph(), jsgraph->Dead()) {}
    490   ~JSGraphReducer() final {}
    491 };
    492 
    493 
    494 void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
    495                 Reducer* reducer) {
    496   if (data->info()->is_source_positions_enabled()) {
    497     void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
    498     SourcePositionWrapper* const wrapper =
    499         new (buffer) SourcePositionWrapper(reducer, data->source_positions());
    500     graph_reducer->AddReducer(wrapper);
    501   } else {
    502     graph_reducer->AddReducer(reducer);
    503   }
    504 }
    505 
    506 
    507 class PipelineRunScope {
    508  public:
    509   PipelineRunScope(PipelineData* data, const char* phase_name)
    510       : phase_scope_(
    511             phase_name == nullptr ? nullptr : data->pipeline_statistics(),
    512             phase_name),
    513         zone_scope_(data->zone_pool()) {}
    514 
    515   Zone* zone() { return zone_scope_.zone(); }
    516 
    517  private:
    518   PhaseScope phase_scope_;
    519   ZonePool::Scope zone_scope_;
    520 };
    521 
    522 PipelineStatistics* CreatePipelineStatistics(CompilationInfo* info,
    523                                              ZonePool* zone_pool) {
    524   PipelineStatistics* pipeline_statistics = nullptr;
    525 
    526   if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
    527     pipeline_statistics = new PipelineStatistics(info, zone_pool);
    528     pipeline_statistics->BeginPhaseKind("initializing");
    529   }
    530 
    531   if (FLAG_trace_turbo) {
    532     TurboJsonFile json_of(info, std::ios_base::trunc);
    533     Handle<Script> script = info->script();
    534     base::SmartArrayPointer<char> function_name = info->GetDebugName();
    535     int pos = info->shared_info()->start_position();
    536     json_of << "{\"function\":\"" << function_name.get()
    537             << "\", \"sourcePosition\":" << pos << ", \"source\":\"";
    538     Isolate* isolate = info->isolate();
    539     if (!script->IsUndefined(isolate) &&
    540         !script->source()->IsUndefined(isolate)) {
    541       DisallowHeapAllocation no_allocation;
    542       int start = info->shared_info()->start_position();
    543       int len = info->shared_info()->end_position() - start;
    544       String::SubStringRange source(String::cast(script->source()), start, len);
    545       for (const auto& c : source) {
    546         json_of << AsEscapedUC16ForJSON(c);
    547       }
    548     }
    549     json_of << "\",\n\"phases\":[";
    550   }
    551 
    552   return pipeline_statistics;
    553 }
    554 
    555 }  // namespace
    556 
    557 class PipelineCompilationJob final : public CompilationJob {
    558  public:
    559   PipelineCompilationJob(Isolate* isolate, Handle<JSFunction> function)
    560       // Note that the CompilationInfo is not initialized at the time we pass it
    561       // to the CompilationJob constructor, but it is not dereferenced there.
    562       : CompilationJob(&info_, "TurboFan"),
    563         zone_(isolate->allocator()),
    564         zone_pool_(isolate->allocator()),
    565         parse_info_(&zone_, function),
    566         info_(&parse_info_, function),
    567         pipeline_statistics_(CreatePipelineStatistics(info(), &zone_pool_)),
    568         data_(&zone_pool_, info(), pipeline_statistics_.get()),
    569         pipeline_(&data_),
    570         linkage_(nullptr) {}
    571 
    572  protected:
    573   Status CreateGraphImpl() final;
    574   Status OptimizeGraphImpl() final;
    575   Status GenerateCodeImpl() final;
    576 
    577  private:
    578   Zone zone_;
    579   ZonePool zone_pool_;
    580   ParseInfo parse_info_;
    581   CompilationInfo info_;
    582   base::SmartPointer<PipelineStatistics> pipeline_statistics_;
    583   PipelineData data_;
    584   PipelineImpl pipeline_;
    585   Linkage* linkage_;
    586 };
    587 
    588 PipelineCompilationJob::Status PipelineCompilationJob::CreateGraphImpl() {
    589   if (info()->shared_info()->asm_function()) {
    590     if (info()->osr_frame()) info()->MarkAsFrameSpecializing();
    591     info()->MarkAsFunctionContextSpecializing();
    592   } else {
    593     if (!FLAG_always_opt) {
    594       info()->MarkAsBailoutOnUninitialized();
    595     }
    596     if (FLAG_native_context_specialization) {
    597       info()->MarkAsNativeContextSpecializing();
    598     }
    599   }
    600   if (!info()->shared_info()->asm_function() || FLAG_turbo_asm_deoptimization) {
    601     info()->MarkAsDeoptimizationEnabled();
    602   }
    603   if (!info()->is_optimizing_from_bytecode()) {
    604     if (info()->is_deoptimization_enabled() && FLAG_turbo_type_feedback) {
    605       info()->MarkAsTypeFeedbackEnabled();
    606     }
    607     if (!Compiler::EnsureDeoptimizationSupport(info())) return FAILED;
    608   }
    609 
    610   linkage_ = new (&zone_) Linkage(Linkage::ComputeIncoming(&zone_, info()));
    611 
    612   if (!pipeline_.CreateGraph()) {
    613     if (isolate()->has_pending_exception()) return FAILED;  // Stack overflowed.
    614     return AbortOptimization(kGraphBuildingFailed);
    615   }
    616 
    617   return SUCCEEDED;
    618 }
    619 
    620 PipelineCompilationJob::Status PipelineCompilationJob::OptimizeGraphImpl() {
    621   if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
    622   return SUCCEEDED;
    623 }
    624 
    625 PipelineCompilationJob::Status PipelineCompilationJob::GenerateCodeImpl() {
    626   Handle<Code> code = pipeline_.GenerateCode(linkage_);
    627   if (code.is_null()) {
    628     if (info()->bailout_reason() == kNoReason) {
    629       return AbortOptimization(kCodeGenerationFailed);
    630     }
    631     return FAILED;
    632   }
    633   info()->dependencies()->Commit(code);
    634   info()->SetCode(code);
    635   if (info()->is_deoptimization_enabled()) {
    636     info()->context()->native_context()->AddOptimizedCode(*code);
    637     RegisterWeakObjectsInOptimizedCode(code);
    638   }
    639   return SUCCEEDED;
    640 }
    641 
    642 class PipelineWasmCompilationJob final : public CompilationJob {
    643  public:
    644   explicit PipelineWasmCompilationJob(CompilationInfo* info, Graph* graph,
    645                                       CallDescriptor* descriptor,
    646                                       SourcePositionTable* source_positions)
    647       : CompilationJob(info, "TurboFan"),
    648         zone_pool_(info->isolate()->allocator()),
    649         data_(&zone_pool_, info, graph, source_positions),
    650         pipeline_(&data_),
    651         linkage_(descriptor) {}
    652 
    653  protected:
    654   Status CreateGraphImpl() final;
    655   Status OptimizeGraphImpl() final;
    656   Status GenerateCodeImpl() final;
    657 
    658  private:
    659   ZonePool zone_pool_;
    660   PipelineData data_;
    661   PipelineImpl pipeline_;
    662   Linkage linkage_;
    663 };
    664 
    665 PipelineWasmCompilationJob::Status
    666 PipelineWasmCompilationJob::CreateGraphImpl() {
    667   return SUCCEEDED;
    668 }
    669 
    670 PipelineWasmCompilationJob::Status
    671 PipelineWasmCompilationJob::OptimizeGraphImpl() {
    672   if (FLAG_trace_turbo) {
    673     TurboJsonFile json_of(info(), std::ios_base::trunc);
    674     json_of << "{\"function\":\"" << info()->GetDebugName().get()
    675             << "\", \"source\":\"\",\n\"phases\":[";
    676   }
    677 
    678   pipeline_.RunPrintAndVerify("Machine", true);
    679 
    680   if (!pipeline_.ScheduleAndSelectInstructions(&linkage_)) return FAILED;
    681   return SUCCEEDED;
    682 }
    683 
    684 PipelineWasmCompilationJob::Status
    685 PipelineWasmCompilationJob::GenerateCodeImpl() {
    686   pipeline_.GenerateCode(&linkage_);
    687   return SUCCEEDED;
    688 }
    689 
    690 template <typename Phase>
    691 void PipelineImpl::Run() {
    692   PipelineRunScope scope(this->data_, Phase::phase_name());
    693   Phase phase;
    694   phase.Run(this->data_, scope.zone());
    695 }
    696 
    697 template <typename Phase, typename Arg0>
    698 void PipelineImpl::Run(Arg0 arg_0) {
    699   PipelineRunScope scope(this->data_, Phase::phase_name());
    700   Phase phase;
    701   phase.Run(this->data_, scope.zone(), arg_0);
    702 }
    703 
    704 template <typename Phase, typename Arg0, typename Arg1>
    705 void PipelineImpl::Run(Arg0 arg_0, Arg1 arg_1) {
    706   PipelineRunScope scope(this->data_, Phase::phase_name());
    707   Phase phase;
    708   phase.Run(this->data_, scope.zone(), arg_0, arg_1);
    709 }
    710 
    711 struct LoopAssignmentAnalysisPhase {
    712   static const char* phase_name() { return "loop assignment analysis"; }
    713 
    714   void Run(PipelineData* data, Zone* temp_zone) {
    715     if (!data->info()->is_optimizing_from_bytecode()) {
    716       AstLoopAssignmentAnalyzer analyzer(data->graph_zone(), data->info());
    717       LoopAssignmentAnalysis* loop_assignment = analyzer.Analyze();
    718       data->set_loop_assignment(loop_assignment);
    719     }
    720   }
    721 };
    722 
    723 
    724 struct TypeHintAnalysisPhase {
    725   static const char* phase_name() { return "type hint analysis"; }
    726 
    727   void Run(PipelineData* data, Zone* temp_zone) {
    728     if (data->info()->is_type_feedback_enabled()) {
    729       TypeHintAnalyzer analyzer(data->graph_zone());
    730       Handle<Code> code(data->info()->shared_info()->code(), data->isolate());
    731       TypeHintAnalysis* type_hint_analysis = analyzer.Analyze(code);
    732       data->set_type_hint_analysis(type_hint_analysis);
    733     }
    734   }
    735 };
    736 
    737 
    738 struct GraphBuilderPhase {
    739   static const char* phase_name() { return "graph builder"; }
    740 
    741   void Run(PipelineData* data, Zone* temp_zone) {
    742     bool stack_check = !data->info()->IsStub();
    743     bool succeeded = false;
    744 
    745     if (data->info()->is_optimizing_from_bytecode()) {
    746       BytecodeGraphBuilder graph_builder(temp_zone, data->info(),
    747                                          data->jsgraph());
    748       succeeded = graph_builder.CreateGraph();
    749     } else {
    750       AstGraphBuilderWithPositions graph_builder(
    751           temp_zone, data->info(), data->jsgraph(), data->loop_assignment(),
    752           data->type_hint_analysis(), data->source_positions());
    753       succeeded = graph_builder.CreateGraph(stack_check);
    754     }
    755 
    756     if (!succeeded) {
    757       data->set_compilation_failed();
    758     }
    759   }
    760 };
    761 
    762 
    763 struct InliningPhase {
    764   static const char* phase_name() { return "inlining"; }
    765 
    766   void Run(PipelineData* data, Zone* temp_zone) {
    767     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    768     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
    769                                               data->common());
    770     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
    771                                          data->common(), data->machine());
    772     JSCallReducer call_reducer(data->jsgraph(),
    773                                data->info()->is_deoptimization_enabled()
    774                                    ? JSCallReducer::kDeoptimizationEnabled
    775                                    : JSCallReducer::kNoFlags,
    776                                data->native_context());
    777     JSContextSpecialization context_specialization(
    778         &graph_reducer, data->jsgraph(),
    779         data->info()->is_function_context_specializing()
    780             ? data->info()->context()
    781             : MaybeHandle<Context>());
    782     JSFrameSpecialization frame_specialization(data->info()->osr_frame(),
    783                                                data->jsgraph());
    784     JSGlobalObjectSpecialization global_object_specialization(
    785         &graph_reducer, data->jsgraph(), data->native_context(),
    786         data->info()->dependencies());
    787     JSNativeContextSpecialization::Flags flags =
    788         JSNativeContextSpecialization::kNoFlags;
    789     if (data->info()->is_bailout_on_uninitialized()) {
    790       flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
    791     }
    792     if (data->info()->is_deoptimization_enabled()) {
    793       flags |= JSNativeContextSpecialization::kDeoptimizationEnabled;
    794     }
    795     JSNativeContextSpecialization native_context_specialization(
    796         &graph_reducer, data->jsgraph(), flags, data->native_context(),
    797         data->info()->dependencies(), temp_zone);
    798     JSInliningHeuristic inlining(&graph_reducer,
    799                                  data->info()->is_inlining_enabled()
    800                                      ? JSInliningHeuristic::kGeneralInlining
    801                                      : JSInliningHeuristic::kRestrictedInlining,
    802                                  temp_zone, data->info(), data->jsgraph());
    803     AddReducer(data, &graph_reducer, &dead_code_elimination);
    804     AddReducer(data, &graph_reducer, &common_reducer);
    805     if (data->info()->is_frame_specializing()) {
    806       AddReducer(data, &graph_reducer, &frame_specialization);
    807     }
    808     if (data->info()->is_deoptimization_enabled()) {
    809       AddReducer(data, &graph_reducer, &global_object_specialization);
    810     }
    811     AddReducer(data, &graph_reducer, &native_context_specialization);
    812     AddReducer(data, &graph_reducer, &context_specialization);
    813     AddReducer(data, &graph_reducer, &call_reducer);
    814     if (!data->info()->is_optimizing_from_bytecode()) {
    815       AddReducer(data, &graph_reducer, &inlining);
    816     }
    817     graph_reducer.ReduceGraph();
    818   }
    819 };
    820 
    821 
    822 struct TyperPhase {
    823   static const char* phase_name() { return "typer"; }
    824 
    825   void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
    826     NodeVector roots(temp_zone);
    827     data->jsgraph()->GetCachedNodes(&roots);
    828     typer->Run(roots);
    829   }
    830 };
    831 
    832 #ifdef DEBUG
    833 
    834 struct UntyperPhase {
    835   static const char* phase_name() { return "untyper"; }
    836 
    837   void Run(PipelineData* data, Zone* temp_zone) {
    838     class RemoveTypeReducer final : public Reducer {
    839      public:
    840       Reduction Reduce(Node* node) final {
    841         if (NodeProperties::IsTyped(node)) {
    842           NodeProperties::RemoveType(node);
    843           return Changed(node);
    844         }
    845         return NoChange();
    846       }
    847     };
    848 
    849     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    850     RemoveTypeReducer remove_type_reducer;
    851     AddReducer(data, &graph_reducer, &remove_type_reducer);
    852     graph_reducer.ReduceGraph();
    853   }
    854 };
    855 
    856 #endif  // DEBUG
    857 
    858 struct OsrDeconstructionPhase {
    859   static const char* phase_name() { return "OSR deconstruction"; }
    860 
    861   void Run(PipelineData* data, Zone* temp_zone) {
    862     OsrHelper osr_helper(data->info());
    863     osr_helper.Deconstruct(data->jsgraph(), data->common(), temp_zone);
    864   }
    865 };
    866 
    867 
    868 struct TypedLoweringPhase {
    869   static const char* phase_name() { return "typed lowering"; }
    870 
    871   void Run(PipelineData* data, Zone* temp_zone) {
    872     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    873     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
    874                                               data->common());
    875     LoadElimination load_elimination(&graph_reducer, data->graph(),
    876                                      data->jsgraph()->simplified());
    877     JSBuiltinReducer builtin_reducer(&graph_reducer, data->jsgraph());
    878     MaybeHandle<LiteralsArray> literals_array =
    879         data->info()->is_native_context_specializing()
    880             ? handle(data->info()->closure()->literals(), data->isolate())
    881             : MaybeHandle<LiteralsArray>();
    882     JSCreateLowering create_lowering(
    883         &graph_reducer, data->info()->dependencies(), data->jsgraph(),
    884         literals_array, temp_zone);
    885     JSTypedLowering::Flags typed_lowering_flags = JSTypedLowering::kNoFlags;
    886     if (data->info()->is_deoptimization_enabled()) {
    887       typed_lowering_flags |= JSTypedLowering::kDeoptimizationEnabled;
    888     }
    889     if (data->info()->shared_info()->HasBytecodeArray()) {
    890       typed_lowering_flags |= JSTypedLowering::kDisableBinaryOpReduction;
    891     }
    892     if (data->info()->is_type_feedback_enabled()) {
    893       typed_lowering_flags |= JSTypedLowering::kTypeFeedbackEnabled;
    894     }
    895     JSTypedLowering typed_lowering(&graph_reducer, data->info()->dependencies(),
    896                                    typed_lowering_flags, data->jsgraph(),
    897                                    temp_zone);
    898     JSIntrinsicLowering intrinsic_lowering(
    899         &graph_reducer, data->jsgraph(),
    900         data->info()->is_deoptimization_enabled()
    901             ? JSIntrinsicLowering::kDeoptimizationEnabled
    902             : JSIntrinsicLowering::kDeoptimizationDisabled);
    903     SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
    904     CheckpointElimination checkpoint_elimination(&graph_reducer);
    905     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
    906                                          data->common(), data->machine());
    907     AddReducer(data, &graph_reducer, &dead_code_elimination);
    908     AddReducer(data, &graph_reducer, &builtin_reducer);
    909     if (data->info()->is_deoptimization_enabled()) {
    910       AddReducer(data, &graph_reducer, &create_lowering);
    911     }
    912     AddReducer(data, &graph_reducer, &typed_lowering);
    913     AddReducer(data, &graph_reducer, &intrinsic_lowering);
    914     AddReducer(data, &graph_reducer, &load_elimination);
    915     AddReducer(data, &graph_reducer, &simple_reducer);
    916     AddReducer(data, &graph_reducer, &checkpoint_elimination);
    917     AddReducer(data, &graph_reducer, &common_reducer);
    918     graph_reducer.ReduceGraph();
    919   }
    920 };
    921 
    922 
    923 struct BranchEliminationPhase {
    924   static const char* phase_name() { return "branch condition elimination"; }
    925 
    926   void Run(PipelineData* data, Zone* temp_zone) {
    927     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    928     BranchElimination branch_condition_elimination(&graph_reducer,
    929                                                    data->jsgraph(), temp_zone);
    930     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
    931                                               data->common());
    932     AddReducer(data, &graph_reducer, &branch_condition_elimination);
    933     AddReducer(data, &graph_reducer, &dead_code_elimination);
    934     graph_reducer.ReduceGraph();
    935   }
    936 };
    937 
    938 
    939 struct EscapeAnalysisPhase {
    940   static const char* phase_name() { return "escape analysis"; }
    941 
    942   void Run(PipelineData* data, Zone* temp_zone) {
    943     EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
    944                                    temp_zone);
    945     escape_analysis.Run();
    946     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    947     EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
    948                                          &escape_analysis, temp_zone);
    949     AddReducer(data, &graph_reducer, &escape_reducer);
    950     graph_reducer.ReduceGraph();
    951     escape_reducer.VerifyReplacement();
    952   }
    953 };
    954 
    955 struct RepresentationSelectionPhase {
    956   static const char* phase_name() { return "representation selection"; }
    957 
    958   void Run(PipelineData* data, Zone* temp_zone) {
    959     SimplifiedLowering::Flags flags =
    960         data->info()->is_type_feedback_enabled()
    961             ? SimplifiedLowering::kTypeFeedbackEnabled
    962             : SimplifiedLowering::kNoFlag;
    963     SimplifiedLowering lowering(data->jsgraph(), temp_zone,
    964                                 data->source_positions(), flags);
    965     lowering.LowerAllNodes();
    966   }
    967 };
    968 
    969 struct EarlyOptimizationPhase {
    970   static const char* phase_name() { return "early optimization"; }
    971 
    972   void Run(PipelineData* data, Zone* temp_zone) {
    973     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
    974     JSGenericLowering generic_lowering(data->jsgraph());
    975     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
    976                                               data->common());
    977     SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
    978     RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
    979     ValueNumberingReducer value_numbering(temp_zone);
    980     MachineOperatorReducer machine_reducer(data->jsgraph());
    981     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
    982                                          data->common(), data->machine());
    983     AddReducer(data, &graph_reducer, &dead_code_elimination);
    984     AddReducer(data, &graph_reducer, &simple_reducer);
    985     AddReducer(data, &graph_reducer, &redundancy_elimination);
    986     AddReducer(data, &graph_reducer, &generic_lowering);
    987     AddReducer(data, &graph_reducer, &value_numbering);
    988     AddReducer(data, &graph_reducer, &machine_reducer);
    989     AddReducer(data, &graph_reducer, &common_reducer);
    990     graph_reducer.ReduceGraph();
    991   }
    992 };
    993 
    994 struct ControlFlowOptimizationPhase {
    995   static const char* phase_name() { return "control flow optimization"; }
    996 
    997   void Run(PipelineData* data, Zone* temp_zone) {
    998     ControlFlowOptimizer optimizer(data->graph(), data->common(),
    999                                    data->machine(), temp_zone);
   1000     optimizer.Optimize();
   1001   }
   1002 };
   1003 
   1004 struct EffectControlLinearizationPhase {
   1005   static const char* phase_name() { return "effect linearization"; }
   1006 
   1007   void Run(PipelineData* data, Zone* temp_zone) {
   1008     // The scheduler requires the graphs to be trimmed, so trim now.
   1009     // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
   1010     // graphs.
   1011     GraphTrimmer trimmer(temp_zone, data->graph());
   1012     NodeVector roots(temp_zone);
   1013     data->jsgraph()->GetCachedNodes(&roots);
   1014     trimmer.TrimGraph(roots.begin(), roots.end());
   1015 
   1016     // Schedule the graph without node splitting so that we can
   1017     // fix the effect and control flow for nodes with low-level side
   1018     // effects (such as changing representation to tagged or
   1019     // 'floating' allocation regions.)
   1020     Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
   1021                                                     Scheduler::kNoFlags);
   1022     if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
   1023     TraceSchedule(data->info(), schedule);
   1024 
   1025     // Post-pass for wiring the control/effects
   1026     // - connect allocating representation changes into the control&effect
   1027     //   chains and lower them,
   1028     // - get rid of the region markers,
   1029     // - introduce effect phis and rewire effects to get SSA again.
   1030     EffectControlLinearizer linearizer(data->jsgraph(), schedule, temp_zone);
   1031     linearizer.Run();
   1032   }
   1033 };
   1034 
   1035 struct StoreStoreEliminationPhase {
   1036   static const char* phase_name() { return "Store-store elimination"; }
   1037 
   1038   void Run(PipelineData* data, Zone* temp_zone) {
   1039     StoreStoreElimination store_store_elimination(data->jsgraph(), temp_zone);
   1040     store_store_elimination.Run();
   1041   }
   1042 };
   1043 
   1044 struct MemoryOptimizationPhase {
   1045   static const char* phase_name() { return "memory optimization"; }
   1046 
   1047   void Run(PipelineData* data, Zone* temp_zone) {
   1048     // The memory optimizer requires the graphs to be trimmed, so trim now.
   1049     GraphTrimmer trimmer(temp_zone, data->graph());
   1050     NodeVector roots(temp_zone);
   1051     data->jsgraph()->GetCachedNodes(&roots);
   1052     trimmer.TrimGraph(roots.begin(), roots.end());
   1053 
   1054     // Optimize allocations and load/store operations.
   1055     MemoryOptimizer optimizer(data->jsgraph(), temp_zone);
   1056     optimizer.Optimize();
   1057   }
   1058 };
   1059 
   1060 struct LateOptimizationPhase {
   1061   static const char* phase_name() { return "late optimization"; }
   1062 
   1063   void Run(PipelineData* data, Zone* temp_zone) {
   1064     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
   1065     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
   1066                                               data->common());
   1067     ValueNumberingReducer value_numbering(temp_zone);
   1068     MachineOperatorReducer machine_reducer(data->jsgraph());
   1069     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
   1070                                          data->common(), data->machine());
   1071     SelectLowering select_lowering(data->jsgraph()->graph(),
   1072                                    data->jsgraph()->common());
   1073     TailCallOptimization tco(data->common(), data->graph());
   1074     AddReducer(data, &graph_reducer, &dead_code_elimination);
   1075     AddReducer(data, &graph_reducer, &value_numbering);
   1076     AddReducer(data, &graph_reducer, &machine_reducer);
   1077     AddReducer(data, &graph_reducer, &common_reducer);
   1078     AddReducer(data, &graph_reducer, &select_lowering);
   1079     AddReducer(data, &graph_reducer, &tco);
   1080     graph_reducer.ReduceGraph();
   1081   }
   1082 };
   1083 
   1084 struct EarlyGraphTrimmingPhase {
   1085   static const char* phase_name() { return "early graph trimming"; }
   1086   void Run(PipelineData* data, Zone* temp_zone) {
   1087     GraphTrimmer trimmer(temp_zone, data->graph());
   1088     NodeVector roots(temp_zone);
   1089     data->jsgraph()->GetCachedNodes(&roots);
   1090     trimmer.TrimGraph(roots.begin(), roots.end());
   1091   }
   1092 };
   1093 
   1094 
   1095 struct LateGraphTrimmingPhase {
   1096   static const char* phase_name() { return "late graph trimming"; }
   1097   void Run(PipelineData* data, Zone* temp_zone) {
   1098     GraphTrimmer trimmer(temp_zone, data->graph());
   1099     NodeVector roots(temp_zone);
   1100     data->jsgraph()->GetCachedNodes(&roots);
   1101     trimmer.TrimGraph(roots.begin(), roots.end());
   1102   }
   1103 };
   1104 
   1105 
   1106 struct StressLoopPeelingPhase {
   1107   static const char* phase_name() { return "stress loop peeling"; }
   1108 
   1109   void Run(PipelineData* data, Zone* temp_zone) {
   1110     // Peel the first outer loop for testing.
   1111     // TODO(titzer): peel all loops? the N'th loop? Innermost loops?
   1112     LoopTree* loop_tree = LoopFinder::BuildLoopTree(data->graph(), temp_zone);
   1113     if (loop_tree != nullptr && loop_tree->outer_loops().size() > 0) {
   1114       LoopPeeler::Peel(data->graph(), data->common(), loop_tree,
   1115                        loop_tree->outer_loops()[0], temp_zone);
   1116     }
   1117   }
   1118 };
   1119 
   1120 
   1121 struct ComputeSchedulePhase {
   1122   static const char* phase_name() { return "scheduling"; }
   1123 
   1124   void Run(PipelineData* data, Zone* temp_zone) {
   1125     Schedule* schedule = Scheduler::ComputeSchedule(
   1126         temp_zone, data->graph(), data->info()->is_splitting_enabled()
   1127                                       ? Scheduler::kSplitNodes
   1128                                       : Scheduler::kNoFlags);
   1129     if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
   1130     data->set_schedule(schedule);
   1131   }
   1132 };
   1133 
   1134 
   1135 struct InstructionSelectionPhase {
   1136   static const char* phase_name() { return "select instructions"; }
   1137 
   1138   void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
   1139     InstructionSelector selector(
   1140         temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
   1141         data->schedule(), data->source_positions(), data->frame(),
   1142         data->info()->is_source_positions_enabled()
   1143             ? InstructionSelector::kAllSourcePositions
   1144             : InstructionSelector::kCallSourcePositions);
   1145     selector.SelectInstructions();
   1146   }
   1147 };
   1148 
   1149 
   1150 struct MeetRegisterConstraintsPhase {
   1151   static const char* phase_name() { return "meet register constraints"; }
   1152 
   1153   void Run(PipelineData* data, Zone* temp_zone) {
   1154     ConstraintBuilder builder(data->register_allocation_data());
   1155     builder.MeetRegisterConstraints();
   1156   }
   1157 };
   1158 
   1159 
   1160 struct ResolvePhisPhase {
   1161   static const char* phase_name() { return "resolve phis"; }
   1162 
   1163   void Run(PipelineData* data, Zone* temp_zone) {
   1164     ConstraintBuilder builder(data->register_allocation_data());
   1165     builder.ResolvePhis();
   1166   }
   1167 };
   1168 
   1169 
   1170 struct BuildLiveRangesPhase {
   1171   static const char* phase_name() { return "build live ranges"; }
   1172 
   1173   void Run(PipelineData* data, Zone* temp_zone) {
   1174     LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
   1175     builder.BuildLiveRanges();
   1176   }
   1177 };
   1178 
   1179 
   1180 struct SplinterLiveRangesPhase {
   1181   static const char* phase_name() { return "splinter live ranges"; }
   1182 
   1183   void Run(PipelineData* data, Zone* temp_zone) {
   1184     LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
   1185                                              temp_zone);
   1186     live_range_splinterer.Splinter();
   1187   }
   1188 };
   1189 
   1190 
   1191 template <typename RegAllocator>
   1192 struct AllocateGeneralRegistersPhase {
   1193   static const char* phase_name() { return "allocate general registers"; }
   1194 
   1195   void Run(PipelineData* data, Zone* temp_zone) {
   1196     RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
   1197                            temp_zone);
   1198     allocator.AllocateRegisters();
   1199   }
   1200 };
   1201 
   1202 template <typename RegAllocator>
   1203 struct AllocateFPRegistersPhase {
   1204   static const char* phase_name() {
   1205     return "allocate floating point registers";
   1206   }
   1207 
   1208   void Run(PipelineData* data, Zone* temp_zone) {
   1209     RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
   1210                            temp_zone);
   1211     allocator.AllocateRegisters();
   1212   }
   1213 };
   1214 
   1215 
   1216 struct MergeSplintersPhase {
   1217   static const char* phase_name() { return "merge splintered ranges"; }
   1218   void Run(PipelineData* pipeline_data, Zone* temp_zone) {
   1219     RegisterAllocationData* data = pipeline_data->register_allocation_data();
   1220     LiveRangeMerger live_range_merger(data, temp_zone);
   1221     live_range_merger.Merge();
   1222   }
   1223 };
   1224 
   1225 
   1226 struct LocateSpillSlotsPhase {
   1227   static const char* phase_name() { return "locate spill slots"; }
   1228 
   1229   void Run(PipelineData* data, Zone* temp_zone) {
   1230     SpillSlotLocator locator(data->register_allocation_data());
   1231     locator.LocateSpillSlots();
   1232   }
   1233 };
   1234 
   1235 
   1236 struct AssignSpillSlotsPhase {
   1237   static const char* phase_name() { return "assign spill slots"; }
   1238 
   1239   void Run(PipelineData* data, Zone* temp_zone) {
   1240     OperandAssigner assigner(data->register_allocation_data());
   1241     assigner.AssignSpillSlots();
   1242   }
   1243 };
   1244 
   1245 
   1246 struct CommitAssignmentPhase {
   1247   static const char* phase_name() { return "commit assignment"; }
   1248 
   1249   void Run(PipelineData* data, Zone* temp_zone) {
   1250     OperandAssigner assigner(data->register_allocation_data());
   1251     assigner.CommitAssignment();
   1252   }
   1253 };
   1254 
   1255 
   1256 struct PopulateReferenceMapsPhase {
   1257   static const char* phase_name() { return "populate pointer maps"; }
   1258 
   1259   void Run(PipelineData* data, Zone* temp_zone) {
   1260     ReferenceMapPopulator populator(data->register_allocation_data());
   1261     populator.PopulateReferenceMaps();
   1262   }
   1263 };
   1264 
   1265 
   1266 struct ConnectRangesPhase {
   1267   static const char* phase_name() { return "connect ranges"; }
   1268 
   1269   void Run(PipelineData* data, Zone* temp_zone) {
   1270     LiveRangeConnector connector(data->register_allocation_data());
   1271     connector.ConnectRanges(temp_zone);
   1272   }
   1273 };
   1274 
   1275 
   1276 struct ResolveControlFlowPhase {
   1277   static const char* phase_name() { return "resolve control flow"; }
   1278 
   1279   void Run(PipelineData* data, Zone* temp_zone) {
   1280     LiveRangeConnector connector(data->register_allocation_data());
   1281     connector.ResolveControlFlow(temp_zone);
   1282   }
   1283 };
   1284 
   1285 
   1286 struct OptimizeMovesPhase {
   1287   static const char* phase_name() { return "optimize moves"; }
   1288 
   1289   void Run(PipelineData* data, Zone* temp_zone) {
   1290     MoveOptimizer move_optimizer(temp_zone, data->sequence());
   1291     move_optimizer.Run();
   1292   }
   1293 };
   1294 
   1295 
   1296 struct FrameElisionPhase {
   1297   static const char* phase_name() { return "frame elision"; }
   1298 
   1299   void Run(PipelineData* data, Zone* temp_zone) {
   1300     FrameElider(data->sequence()).Run();
   1301   }
   1302 };
   1303 
   1304 
   1305 struct JumpThreadingPhase {
   1306   static const char* phase_name() { return "jump threading"; }
   1307 
   1308   void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
   1309     ZoneVector<RpoNumber> result(temp_zone);
   1310     if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
   1311                                          frame_at_start)) {
   1312       JumpThreading::ApplyForwarding(result, data->sequence());
   1313     }
   1314   }
   1315 };
   1316 
   1317 
   1318 struct GenerateCodePhase {
   1319   static const char* phase_name() { return "generate code"; }
   1320 
   1321   void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
   1322     CodeGenerator generator(data->frame(), linkage, data->sequence(),
   1323                             data->info());
   1324     data->set_code(generator.GenerateCode());
   1325   }
   1326 };
   1327 
   1328 
   1329 struct PrintGraphPhase {
   1330   static const char* phase_name() { return nullptr; }
   1331 
   1332   void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
   1333     CompilationInfo* info = data->info();
   1334     Graph* graph = data->graph();
   1335 
   1336     {  // Print JSON.
   1337       AllowHandleDereference allow_deref;
   1338       TurboJsonFile json_of(info, std::ios_base::app);
   1339       json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
   1340               << AsJSON(*graph, data->source_positions()) << "},\n";
   1341     }
   1342 
   1343     if (FLAG_trace_turbo_graph) {  // Simple textual RPO.
   1344       AllowHandleDereference allow_deref;
   1345       OFStream os(stdout);
   1346       os << "-- Graph after " << phase << " -- " << std::endl;
   1347       os << AsRPO(*graph);
   1348     }
   1349   }
   1350 };
   1351 
   1352 
   1353 struct VerifyGraphPhase {
   1354   static const char* phase_name() { return nullptr; }
   1355 
   1356   void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
   1357            bool values_only = false) {
   1358     Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
   1359                   values_only ? Verifier::kValuesOnly : Verifier::kAll);
   1360   }
   1361 };
   1362 
   1363 void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
   1364   if (FLAG_trace_turbo) {
   1365     Run<PrintGraphPhase>(phase);
   1366   }
   1367   if (FLAG_turbo_verify) {
   1368     Run<VerifyGraphPhase>(untyped);
   1369   }
   1370 }
   1371 
   1372 bool PipelineImpl::CreateGraph() {
   1373   PipelineData* data = this->data_;
   1374 
   1375   data->BeginPhaseKind("graph creation");
   1376 
   1377   if (FLAG_trace_turbo) {
   1378     OFStream os(stdout);
   1379     os << "---------------------------------------------------\n"
   1380        << "Begin compiling method " << info()->GetDebugName().get()
   1381        << " using Turbofan" << std::endl;
   1382     TurboCfgFile tcf(isolate());
   1383     tcf << AsC1VCompilation(info());
   1384   }
   1385 
   1386   data->source_positions()->AddDecorator();
   1387 
   1388   if (FLAG_loop_assignment_analysis) {
   1389     Run<LoopAssignmentAnalysisPhase>();
   1390   }
   1391 
   1392   Run<TypeHintAnalysisPhase>();
   1393 
   1394   Run<GraphBuilderPhase>();
   1395   if (data->compilation_failed()) {
   1396     data->EndPhaseKind();
   1397     return false;
   1398   }
   1399   RunPrintAndVerify("Initial untyped", true);
   1400 
   1401   // Perform OSR deconstruction.
   1402   if (info()->is_osr()) {
   1403     Run<OsrDeconstructionPhase>();
   1404     RunPrintAndVerify("OSR deconstruction", true);
   1405   }
   1406 
   1407   // Perform function context specialization and inlining (if enabled).
   1408   Run<InliningPhase>();
   1409   RunPrintAndVerify("Inlined", true);
   1410 
   1411   // Remove dead->live edges from the graph.
   1412   Run<EarlyGraphTrimmingPhase>();
   1413   RunPrintAndVerify("Early trimmed", true);
   1414 
   1415   if (FLAG_print_turbo_replay) {
   1416     // Print a replay of the initial graph.
   1417     GraphReplayPrinter::PrintReplay(data->graph());
   1418   }
   1419 
   1420   // Run the type-sensitive lowerings and optimizations on the graph.
   1421   {
   1422     // Type the graph and keep the Typer running on newly created nodes within
   1423     // this scope; the Typer is automatically unlinked from the Graph once we
   1424     // leave this scope below.
   1425     Typer typer(isolate(), data->graph(), info()->is_deoptimization_enabled()
   1426                                               ? Typer::kDeoptimizationEnabled
   1427                                               : Typer::kNoFlags,
   1428                 info()->dependencies());
   1429     Run<TyperPhase>(&typer);
   1430     RunPrintAndVerify("Typed");
   1431 
   1432     data->BeginPhaseKind("lowering");
   1433 
   1434     // Lower JSOperators where we can determine types.
   1435     Run<TypedLoweringPhase>();
   1436     RunPrintAndVerify("Lowered typed");
   1437 
   1438     if (FLAG_turbo_stress_loop_peeling) {
   1439       Run<StressLoopPeelingPhase>();
   1440       RunPrintAndVerify("Loop peeled");
   1441     }
   1442 
   1443     if (FLAG_turbo_escape) {
   1444       Run<EscapeAnalysisPhase>();
   1445       RunPrintAndVerify("Escape Analysed");
   1446     }
   1447 
   1448     // Select representations.
   1449     Run<RepresentationSelectionPhase>();
   1450     RunPrintAndVerify("Representations selected", true);
   1451   }
   1452 
   1453 #ifdef DEBUG
   1454   // From now on it is invalid to look at types on the nodes, because:
   1455   //
   1456   //  (a) The remaining passes (might) run concurrent to the main thread and
   1457   //      therefore must not access the Heap or the Isolate in an uncontrolled
   1458   //      way (as done by the type system), and
   1459   //  (b) the types on the nodes might not make sense after representation
   1460   //      selection due to the way we handle truncations; if we'd want to look
   1461   //      at types afterwards we'd essentially need to re-type (large portions
   1462   //      of) the graph.
   1463   //
   1464   // In order to catch bugs related to type access after this point we remove
   1465   // the types from the nodes at this point (currently only in Debug builds).
   1466   Run<UntyperPhase>();
   1467   RunPrintAndVerify("Untyped", true);
   1468 #endif
   1469 
   1470   // Run early optimization pass.
   1471   Run<EarlyOptimizationPhase>();
   1472   RunPrintAndVerify("Early optimized", true);
   1473 
   1474   data->EndPhaseKind();
   1475 
   1476   return true;
   1477 }
   1478 
   1479 bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
   1480   PipelineData* data = this->data_;
   1481 
   1482   data->BeginPhaseKind("block building");
   1483 
   1484   Run<EffectControlLinearizationPhase>();
   1485   RunPrintAndVerify("Effect and control linearized", true);
   1486 
   1487   if (FLAG_turbo_store_elimination) {
   1488     Run<StoreStoreEliminationPhase>();
   1489     RunPrintAndVerify("Store-store elimination", true);
   1490   }
   1491 
   1492   Run<BranchEliminationPhase>();
   1493   RunPrintAndVerify("Branch conditions eliminated", true);
   1494 
   1495   // Optimize control flow.
   1496   if (FLAG_turbo_cf_optimization) {
   1497     Run<ControlFlowOptimizationPhase>();
   1498     RunPrintAndVerify("Control flow optimized", true);
   1499   }
   1500 
   1501   // Optimize memory access and allocation operations.
   1502   Run<MemoryOptimizationPhase>();
   1503   // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
   1504   RunPrintAndVerify("Memory optimized", true);
   1505 
   1506   // Lower changes that have been inserted before.
   1507   Run<LateOptimizationPhase>();
   1508   // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
   1509   RunPrintAndVerify("Late optimized", true);
   1510 
   1511   Run<LateGraphTrimmingPhase>();
   1512   // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
   1513   RunPrintAndVerify("Late trimmed", true);
   1514 
   1515   data->source_positions()->RemoveDecorator();
   1516 
   1517   return ScheduleAndSelectInstructions(linkage);
   1518 }
   1519 
   1520 Handle<Code> Pipeline::GenerateCodeForCodeStub(Isolate* isolate,
   1521                                                CallDescriptor* call_descriptor,
   1522                                                Graph* graph, Schedule* schedule,
   1523                                                Code::Flags flags,
   1524                                                const char* debug_name) {
   1525   CompilationInfo info(CStrVector(debug_name), isolate, graph->zone(), flags);
   1526 
   1527   // Construct a pipeline for scheduling and code generation.
   1528   ZonePool zone_pool(isolate->allocator());
   1529   PipelineData data(&zone_pool, &info, graph, schedule);
   1530   base::SmartPointer<PipelineStatistics> pipeline_statistics;
   1531   if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
   1532     pipeline_statistics.Reset(new PipelineStatistics(&info, &zone_pool));
   1533     pipeline_statistics->BeginPhaseKind("stub codegen");
   1534   }
   1535 
   1536   PipelineImpl pipeline(&data);
   1537   DCHECK_NOT_NULL(data.schedule());
   1538 
   1539   if (FLAG_trace_turbo) {
   1540     {
   1541       TurboJsonFile json_of(&info, std::ios_base::trunc);
   1542       json_of << "{\"function\":\"" << info.GetDebugName().get()
   1543               << "\", \"source\":\"\",\n\"phases\":[";
   1544     }
   1545     pipeline.Run<PrintGraphPhase>("Machine");
   1546   }
   1547 
   1548   pipeline.Run<VerifyGraphPhase>(false, true);
   1549   return pipeline.ScheduleAndGenerateCode(call_descriptor);
   1550 }
   1551 
   1552 // static
   1553 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info) {
   1554   ZonePool zone_pool(info->isolate()->allocator());
   1555   base::SmartPointer<PipelineStatistics> pipeline_statistics(
   1556       CreatePipelineStatistics(info, &zone_pool));
   1557   PipelineData data(&zone_pool, info, pipeline_statistics.get());
   1558   PipelineImpl pipeline(&data);
   1559 
   1560   Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
   1561 
   1562   if (!pipeline.CreateGraph()) return Handle<Code>::null();
   1563   if (!pipeline.OptimizeGraph(&linkage)) return Handle<Code>::null();
   1564   return pipeline.GenerateCode(&linkage);
   1565 }
   1566 
   1567 // static
   1568 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
   1569                                               Graph* graph,
   1570                                               Schedule* schedule) {
   1571   CallDescriptor* call_descriptor =
   1572       Linkage::ComputeIncoming(info->zone(), info);
   1573   return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
   1574 }
   1575 
   1576 // static
   1577 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
   1578                                               CallDescriptor* call_descriptor,
   1579                                               Graph* graph,
   1580                                               Schedule* schedule) {
   1581   // Construct a pipeline for scheduling and code generation.
   1582   ZonePool zone_pool(info->isolate()->allocator());
   1583   PipelineData data(&zone_pool, info, graph, schedule);
   1584   base::SmartPointer<PipelineStatistics> pipeline_statistics;
   1585   if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
   1586     pipeline_statistics.Reset(new PipelineStatistics(info, &zone_pool));
   1587     pipeline_statistics->BeginPhaseKind("test codegen");
   1588   }
   1589 
   1590   PipelineImpl pipeline(&data);
   1591 
   1592   if (FLAG_trace_turbo) {
   1593     TurboJsonFile json_of(info, std::ios_base::trunc);
   1594     json_of << "{\"function\":\"" << info->GetDebugName().get()
   1595             << "\", \"source\":\"\",\n\"phases\":[";
   1596   }
   1597   // TODO(rossberg): Should this really be untyped?
   1598   pipeline.RunPrintAndVerify("Machine", true);
   1599 
   1600   return pipeline.ScheduleAndGenerateCode(call_descriptor);
   1601 }
   1602 
   1603 // static
   1604 CompilationJob* Pipeline::NewCompilationJob(Handle<JSFunction> function) {
   1605   return new PipelineCompilationJob(function->GetIsolate(), function);
   1606 }
   1607 
   1608 // static
   1609 CompilationJob* Pipeline::NewWasmCompilationJob(
   1610     CompilationInfo* info, Graph* graph, CallDescriptor* descriptor,
   1611     SourcePositionTable* source_positions) {
   1612   return new PipelineWasmCompilationJob(info, graph, descriptor,
   1613                                         source_positions);
   1614 }
   1615 
   1616 bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
   1617                                            InstructionSequence* sequence,
   1618                                            bool run_verifier) {
   1619   CompilationInfo info(ArrayVector("testing"), sequence->isolate(),
   1620                        sequence->zone());
   1621   ZonePool zone_pool(sequence->isolate()->allocator());
   1622   PipelineData data(&zone_pool, &info, sequence);
   1623   PipelineImpl pipeline(&data);
   1624   pipeline.data_->InitializeFrameData(nullptr);
   1625   pipeline.AllocateRegisters(config, nullptr, run_verifier);
   1626   return !data.compilation_failed();
   1627 }
   1628 
   1629 bool PipelineImpl::ScheduleAndSelectInstructions(Linkage* linkage) {
   1630   CallDescriptor* call_descriptor = linkage->GetIncomingDescriptor();
   1631   PipelineData* data = this->data_;
   1632 
   1633   DCHECK_NOT_NULL(data->graph());
   1634 
   1635   if (data->schedule() == nullptr) Run<ComputeSchedulePhase>();
   1636   TraceSchedule(data->info(), data->schedule());
   1637 
   1638   if (FLAG_turbo_profiling) {
   1639     data->set_profiler_data(BasicBlockInstrumentor::Instrument(
   1640         info(), data->graph(), data->schedule()));
   1641   }
   1642 
   1643   data->InitializeInstructionSequence(call_descriptor);
   1644 
   1645   data->InitializeFrameData(call_descriptor);
   1646   // Select and schedule instructions covering the scheduled graph.
   1647   Run<InstructionSelectionPhase>(linkage);
   1648 
   1649   if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
   1650     AllowHandleDereference allow_deref;
   1651     TurboCfgFile tcf(isolate());
   1652     tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
   1653                  data->sequence());
   1654   }
   1655 
   1656   if (FLAG_trace_turbo) {
   1657     std::ostringstream source_position_output;
   1658     // Output source position information before the graph is deleted.
   1659     data_->source_positions()->Print(source_position_output);
   1660     data_->set_source_position_output(source_position_output.str());
   1661   }
   1662 
   1663   data->DeleteGraphZone();
   1664 
   1665   data->BeginPhaseKind("register allocation");
   1666 
   1667   bool run_verifier = FLAG_turbo_verify_allocation;
   1668 
   1669   // Allocate registers.
   1670   AllocateRegisters(RegisterConfiguration::Turbofan(), call_descriptor,
   1671                     run_verifier);
   1672   Run<FrameElisionPhase>();
   1673   if (data->compilation_failed()) {
   1674     info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
   1675     data->EndPhaseKind();
   1676     return false;
   1677   }
   1678 
   1679   // TODO(mtrofin): move this off to the register allocator.
   1680   bool generate_frame_at_start =
   1681       data_->sequence()->instruction_blocks().front()->must_construct_frame();
   1682   // Optimimize jumps.
   1683   if (FLAG_turbo_jt) {
   1684     Run<JumpThreadingPhase>(generate_frame_at_start);
   1685   }
   1686 
   1687   data->EndPhaseKind();
   1688 
   1689   return true;
   1690 }
   1691 
   1692 Handle<Code> PipelineImpl::GenerateCode(Linkage* linkage) {
   1693   PipelineData* data = this->data_;
   1694 
   1695   data->BeginPhaseKind("code generation");
   1696 
   1697   // Generate final machine code.
   1698   Run<GenerateCodePhase>(linkage);
   1699 
   1700   Handle<Code> code = data->code();
   1701   if (data->profiler_data()) {
   1702 #if ENABLE_DISASSEMBLER
   1703     std::ostringstream os;
   1704     code->Disassemble(nullptr, os);
   1705     data->profiler_data()->SetCode(&os);
   1706 #endif
   1707   }
   1708 
   1709   info()->SetCode(code);
   1710   v8::internal::CodeGenerator::PrintCode(code, info());
   1711 
   1712   if (FLAG_trace_turbo) {
   1713     TurboJsonFile json_of(info(), std::ios_base::app);
   1714     json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
   1715 #if ENABLE_DISASSEMBLER
   1716     std::stringstream disassembly_stream;
   1717     code->Disassemble(nullptr, disassembly_stream);
   1718     std::string disassembly_string(disassembly_stream.str());
   1719     for (const auto& c : disassembly_string) {
   1720       json_of << AsEscapedUC16ForJSON(c);
   1721     }
   1722 #endif  // ENABLE_DISASSEMBLER
   1723     json_of << "\"}\n],\n";
   1724     json_of << "\"nodePositions\":";
   1725     json_of << data->source_position_output();
   1726     json_of << "}";
   1727 
   1728     OFStream os(stdout);
   1729     os << "---------------------------------------------------\n"
   1730        << "Finished compiling method " << info()->GetDebugName().get()
   1731        << " using Turbofan" << std::endl;
   1732   }
   1733 
   1734   return code;
   1735 }
   1736 
   1737 Handle<Code> PipelineImpl::ScheduleAndGenerateCode(
   1738     CallDescriptor* call_descriptor) {
   1739   Linkage linkage(call_descriptor);
   1740 
   1741   // Schedule the graph, perform instruction selection and register allocation.
   1742   if (!ScheduleAndSelectInstructions(&linkage)) return Handle<Code>();
   1743 
   1744   // Generate the final machine code.
   1745   return GenerateCode(&linkage);
   1746 }
   1747 
   1748 void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
   1749                                      CallDescriptor* descriptor,
   1750                                      bool run_verifier) {
   1751   PipelineData* data = this->data_;
   1752   // Don't track usage for this zone in compiler stats.
   1753   base::SmartPointer<Zone> verifier_zone;
   1754   RegisterAllocatorVerifier* verifier = nullptr;
   1755   if (run_verifier) {
   1756     verifier_zone.Reset(new Zone(isolate()->allocator()));
   1757     verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
   1758         verifier_zone.get(), config, data->sequence());
   1759   }
   1760 
   1761 #ifdef DEBUG
   1762   data_->sequence()->ValidateEdgeSplitForm();
   1763   data_->sequence()->ValidateDeferredBlockEntryPaths();
   1764   data_->sequence()->ValidateDeferredBlockExitPaths();
   1765 #endif
   1766 
   1767   data->InitializeRegisterAllocationData(config, descriptor);
   1768   if (info()->is_osr()) {
   1769     OsrHelper osr_helper(info());
   1770     osr_helper.SetupFrame(data->frame());
   1771   }
   1772 
   1773   Run<MeetRegisterConstraintsPhase>();
   1774   Run<ResolvePhisPhase>();
   1775   Run<BuildLiveRangesPhase>();
   1776   if (FLAG_trace_turbo_graph) {
   1777     AllowHandleDereference allow_deref;
   1778     OFStream os(stdout);
   1779     os << "----- Instruction sequence before register allocation -----\n"
   1780        << PrintableInstructionSequence({config, data->sequence()});
   1781   }
   1782   if (verifier != nullptr) {
   1783     CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
   1784     CHECK(data->register_allocation_data()
   1785               ->RangesDefinedInDeferredStayInDeferred());
   1786   }
   1787 
   1788   if (FLAG_turbo_preprocess_ranges) {
   1789     Run<SplinterLiveRangesPhase>();
   1790   }
   1791 
   1792   Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
   1793   Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
   1794 
   1795   if (FLAG_turbo_preprocess_ranges) {
   1796     Run<MergeSplintersPhase>();
   1797   }
   1798 
   1799   Run<AssignSpillSlotsPhase>();
   1800 
   1801   Run<CommitAssignmentPhase>();
   1802   Run<PopulateReferenceMapsPhase>();
   1803   Run<ConnectRangesPhase>();
   1804   Run<ResolveControlFlowPhase>();
   1805   if (FLAG_turbo_move_optimization) {
   1806     Run<OptimizeMovesPhase>();
   1807   }
   1808 
   1809   Run<LocateSpillSlotsPhase>();
   1810 
   1811   if (FLAG_trace_turbo_graph) {
   1812     AllowHandleDereference allow_deref;
   1813     OFStream os(stdout);
   1814     os << "----- Instruction sequence after register allocation -----\n"
   1815        << PrintableInstructionSequence({config, data->sequence()});
   1816   }
   1817 
   1818   if (verifier != nullptr) {
   1819     verifier->VerifyAssignment();
   1820     verifier->VerifyGapMoves();
   1821   }
   1822 
   1823   if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
   1824     TurboCfgFile tcf(data->isolate());
   1825     tcf << AsC1VRegisterAllocationData("CodeGen",
   1826                                        data->register_allocation_data());
   1827   }
   1828 
   1829   data->DeleteRegisterAllocationZone();
   1830 }
   1831 
   1832 CompilationInfo* PipelineImpl::info() const { return data_->info(); }
   1833 
   1834 Isolate* PipelineImpl::isolate() const { return info()->isolate(); }
   1835 
   1836 }  // namespace compiler
   1837 }  // namespace internal
   1838 }  // namespace v8
   1839