Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
     29 #define V8_X64_LITHIUM_CODEGEN_X64_H_
     30 
     31 #include "x64/lithium-x64.h"
     32 
     33 #include "checks.h"
     34 #include "deoptimizer.h"
     35 #include "safepoint-table.h"
     36 #include "scopes.h"
     37 #include "v8utils.h"
     38 #include "x64/lithium-gap-resolver-x64.h"
     39 
     40 namespace v8 {
     41 namespace internal {
     42 
     43 // Forward declarations.
     44 class LDeferredCode;
     45 class SafepointGenerator;
     46 
     47 class LCodeGen BASE_EMBEDDED {
     48  public:
     49   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
     50       : zone_(info->zone()),
     51         chunk_(static_cast<LPlatformChunk*>(chunk)),
     52         masm_(assembler),
     53         info_(info),
     54         current_block_(-1),
     55         current_instruction_(-1),
     56         instructions_(chunk->instructions()),
     57         deoptimizations_(4, info->zone()),
     58         jump_table_(4, info->zone()),
     59         deoptimization_literals_(8, info->zone()),
     60         inlined_function_count_(0),
     61         scope_(info->scope()),
     62         status_(UNUSED),
     63         translations_(info->zone()),
     64         deferred_(8, info->zone()),
     65         osr_pc_offset_(-1),
     66         last_lazy_deopt_pc_(0),
     67         frame_is_built_(false),
     68         safepoints_(info->zone()),
     69         resolver_(this),
     70         expected_safepoint_kind_(Safepoint::kSimple),
     71         old_position_(RelocInfo::kNoPosition) {
     72     PopulateDeoptimizationLiteralsWithInlinedFunctions();
     73   }
     74 
     75   // Simple accessors.
     76   MacroAssembler* masm() const { return masm_; }
     77   CompilationInfo* info() const { return info_; }
     78   Isolate* isolate() const { return info_->isolate(); }
     79   Factory* factory() const { return isolate()->factory(); }
     80   Heap* heap() const { return isolate()->heap(); }
     81   Zone* zone() const { return zone_; }
     82 
     83   int LookupDestination(int block_id) const {
     84     return chunk()->LookupDestination(block_id);
     85   }
     86 
     87   bool IsNextEmittedBlock(int block_id) const {
     88     return LookupDestination(block_id) == GetNextEmittedBlock();
     89   }
     90 
     91   bool NeedsEagerFrame() const {
     92     return GetStackSlotCount() > 0 ||
     93         info()->is_non_deferred_calling() ||
     94         !info()->IsStub() ||
     95         info()->requires_frame();
     96   }
     97   bool NeedsDeferredFrame() const {
     98     return !NeedsEagerFrame() && info()->is_deferred_calling();
     99   }
    100 
    101   // Support for converting LOperands to assembler types.
    102   Register ToRegister(LOperand* op) const;
    103   XMMRegister ToDoubleRegister(LOperand* op) const;
    104   bool IsInteger32Constant(LConstantOperand* op) const;
    105   bool IsSmiConstant(LConstantOperand* op) const;
    106   int32_t ToInteger32(LConstantOperand* op) const;
    107   Smi* ToSmi(LConstantOperand* op) const;
    108   double ToDouble(LConstantOperand* op) const;
    109   ExternalReference ToExternalReference(LConstantOperand* op) const;
    110   bool IsTaggedConstant(LConstantOperand* op) const;
    111   Handle<Object> ToHandle(LConstantOperand* op) const;
    112   Operand ToOperand(LOperand* op) const;
    113 
    114   // Try to generate code for the entire chunk, but it may fail if the
    115   // chunk contains constructs we cannot handle. Returns true if the
    116   // code generation attempt succeeded.
    117   bool GenerateCode();
    118 
    119   // Finish the code by setting stack height, safepoint, and bailout
    120   // information on it.
    121   void FinishCode(Handle<Code> code);
    122 
    123   // Deferred code support.
    124   void DoDeferredNumberTagD(LNumberTagD* instr);
    125   void DoDeferredNumberTagU(LNumberTagU* instr);
    126   void DoDeferredTaggedToI(LTaggedToI* instr);
    127   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
    128   void DoDeferredStackCheck(LStackCheck* instr);
    129   void DoDeferredRandom(LRandom* instr);
    130   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
    131   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
    132   void DoDeferredAllocate(LAllocate* instr);
    133   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
    134                                        Label* map_check);
    135   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
    136 
    137 // Parallel move support.
    138   void DoParallelMove(LParallelMove* move);
    139   void DoGap(LGap* instr);
    140 
    141   // Emit frame translation commands for an environment.
    142   void WriteTranslation(LEnvironment* environment, Translation* translation);
    143 
    144   // Declare methods that deal with the individual node types.
    145 #define DECLARE_DO(type) void Do##type(L##type* node);
    146   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
    147 #undef DECLARE_DO
    148 
    149  private:
    150   enum Status {
    151     UNUSED,
    152     GENERATING,
    153     DONE,
    154     ABORTED
    155   };
    156 
    157   bool is_unused() const { return status_ == UNUSED; }
    158   bool is_generating() const { return status_ == GENERATING; }
    159   bool is_done() const { return status_ == DONE; }
    160   bool is_aborted() const { return status_ == ABORTED; }
    161 
    162   StrictModeFlag strict_mode_flag() const {
    163     return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
    164   }
    165 
    166   LPlatformChunk* chunk() const { return chunk_; }
    167   Scope* scope() const { return scope_; }
    168   HGraph* graph() const { return chunk()->graph(); }
    169 
    170   int GetNextEmittedBlock() const;
    171 
    172   void EmitClassOfTest(Label* if_true,
    173                        Label* if_false,
    174                        Handle<String> class_name,
    175                        Register input,
    176                        Register temporary,
    177                        Register scratch);
    178 
    179   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
    180 
    181   void Abort(BailoutReason reason);
    182   void FPRINTF_CHECKING Comment(const char* format, ...);
    183 
    184   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
    185 
    186   // Code generation passes.  Returns true if code generation should
    187   // continue.
    188   bool GeneratePrologue();
    189   bool GenerateBody();
    190   bool GenerateDeferredCode();
    191   bool GenerateJumpTable();
    192   bool GenerateSafepointTable();
    193 
    194   enum SafepointMode {
    195     RECORD_SIMPLE_SAFEPOINT,
    196     RECORD_SAFEPOINT_WITH_REGISTERS
    197   };
    198 
    199   void CallCodeGeneric(Handle<Code> code,
    200                        RelocInfo::Mode mode,
    201                        LInstruction* instr,
    202                        SafepointMode safepoint_mode,
    203                        int argc);
    204 
    205 
    206   void CallCode(Handle<Code> code,
    207                 RelocInfo::Mode mode,
    208                 LInstruction* instr);
    209 
    210   void CallRuntime(const Runtime::Function* function,
    211                    int num_arguments,
    212                    LInstruction* instr);
    213 
    214   void CallRuntime(Runtime::FunctionId id,
    215                    int num_arguments,
    216                    LInstruction* instr) {
    217     const Runtime::Function* function = Runtime::FunctionForId(id);
    218     CallRuntime(function, num_arguments, instr);
    219   }
    220 
    221   void CallRuntimeFromDeferred(Runtime::FunctionId id,
    222                                int argc,
    223                                LInstruction* instr);
    224 
    225   enum RDIState {
    226     RDI_UNINITIALIZED,
    227     RDI_CONTAINS_TARGET
    228   };
    229 
    230   // Generate a direct call to a known function.  Expects the function
    231   // to be in rdi.
    232   void CallKnownFunction(Handle<JSFunction> function,
    233                          int formal_parameter_count,
    234                          int arity,
    235                          LInstruction* instr,
    236                          CallKind call_kind,
    237                          RDIState rdi_state);
    238 
    239   void RecordSafepointWithLazyDeopt(LInstruction* instr,
    240                                     SafepointMode safepoint_mode,
    241                                     int argc);
    242   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
    243                                             Safepoint::DeoptMode mode);
    244   void DeoptimizeIf(Condition cc,
    245                     LEnvironment* environment,
    246                     Deoptimizer::BailoutType bailout_type);
    247   void DeoptimizeIf(Condition cc, LEnvironment* environment);
    248   void ApplyCheckIf(Condition cc, LBoundsCheck* check);
    249 
    250   void AddToTranslation(LEnvironment* environment,
    251                         Translation* translation,
    252                         LOperand* op,
    253                         bool is_tagged,
    254                         bool is_uint32,
    255                         int* object_index_pointer,
    256                         int* dematerialized_index_pointer);
    257   void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code);
    258   void PopulateDeoptimizationData(Handle<Code> code);
    259   int DefineDeoptimizationLiteral(Handle<Object> literal);
    260 
    261   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
    262 
    263   Register ToRegister(int index) const;
    264   XMMRegister ToDoubleRegister(int index) const;
    265   Operand BuildFastArrayOperand(
    266       LOperand* elements_pointer,
    267       LOperand* key,
    268       ElementsKind elements_kind,
    269       uint32_t offset,
    270       uint32_t additional_index = 0);
    271 
    272   void EmitIntegerMathAbs(LMathAbs* instr);
    273   void EmitSmiMathAbs(LMathAbs* instr);
    274 
    275   // Support for recording safepoint and position information.
    276   void RecordSafepoint(LPointerMap* pointers,
    277                        Safepoint::Kind kind,
    278                        int arguments,
    279                        Safepoint::DeoptMode mode);
    280   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
    281   void RecordSafepoint(Safepoint::DeoptMode mode);
    282   void RecordSafepointWithRegisters(LPointerMap* pointers,
    283                                     int arguments,
    284                                     Safepoint::DeoptMode mode);
    285   void RecordPosition(int position);
    286   void RecordAndUpdatePosition(int position);
    287 
    288   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
    289   void EmitGoto(int block);
    290   template<class InstrType>
    291   void EmitBranch(InstrType instr, Condition cc);
    292   template<class InstrType>
    293   void EmitFalseBranch(InstrType instr, Condition cc);
    294   void EmitNumberUntagD(
    295       Register input,
    296       XMMRegister result,
    297       bool allow_undefined_as_nan,
    298       bool deoptimize_on_minus_zero,
    299       LEnvironment* env,
    300       NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
    301 
    302   // Emits optimized code for typeof x == "y".  Modifies input register.
    303   // Returns the condition on which a final split to
    304   // true and false label should be made, to optimize fallthrough.
    305   Condition EmitTypeofIs(Label* true_label,
    306                          Label* false_label,
    307                          Register input,
    308                          Handle<String> type_name);
    309 
    310   // Emits optimized code for %_IsObject(x).  Preserves input register.
    311   // Returns the condition on which a final split to
    312   // true and false label should be made, to optimize fallthrough.
    313   Condition EmitIsObject(Register input,
    314                          Label* is_not_object,
    315                          Label* is_object);
    316 
    317   // Emits optimized code for %_IsString(x).  Preserves input register.
    318   // Returns the condition on which a final split to
    319   // true and false label should be made, to optimize fallthrough.
    320   Condition EmitIsString(Register input,
    321                          Register temp1,
    322                          Label* is_not_string,
    323                          SmiCheck check_needed);
    324 
    325   // Emits optimized code for %_IsConstructCall().
    326   // Caller should branch on equal condition.
    327   void EmitIsConstructCall(Register temp);
    328 
    329   // Emits code for pushing either a tagged constant, a (non-double)
    330   // register, or a stack slot operand.
    331   void EmitPushTaggedOperand(LOperand* operand);
    332 
    333   // Emits optimized code to deep-copy the contents of statically known
    334   // object graphs (e.g. object literal boilerplate).
    335   void EmitDeepCopy(Handle<JSObject> object,
    336                     Register result,
    337                     Register source,
    338                     int* offset,
    339                     AllocationSiteMode mode);
    340 
    341   void EnsureSpaceForLazyDeopt(int space_needed);
    342   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
    343   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
    344   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
    345   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
    346   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
    347   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
    348 #ifdef _MSC_VER
    349   // On windows, you may not access the stack more than one page below
    350   // the most recently mapped page. To make the allocated area randomly
    351   // accessible, we write an arbitrary value to each page in range
    352   // rsp + offset - page_size .. rsp in turn.
    353   void MakeSureStackPagesMapped(int offset);
    354 #endif
    355 
    356   Zone* zone_;
    357   LPlatformChunk* const chunk_;
    358   MacroAssembler* const masm_;
    359   CompilationInfo* const info_;
    360 
    361   int current_block_;
    362   int current_instruction_;
    363   const ZoneList<LInstruction*>* instructions_;
    364   ZoneList<LEnvironment*> deoptimizations_;
    365   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
    366   ZoneList<Handle<Object> > deoptimization_literals_;
    367   int inlined_function_count_;
    368   Scope* const scope_;
    369   Status status_;
    370   TranslationBuffer translations_;
    371   ZoneList<LDeferredCode*> deferred_;
    372   int osr_pc_offset_;
    373   int last_lazy_deopt_pc_;
    374   bool frame_is_built_;
    375 
    376   // Builder that keeps track of safepoints in the code. The table
    377   // itself is emitted at the end of the generated code.
    378   SafepointTableBuilder safepoints_;
    379 
    380   // Compiler from a set of parallel moves to a sequential list of moves.
    381   LGapResolver resolver_;
    382 
    383   Safepoint::Kind expected_safepoint_kind_;
    384 
    385   int old_position_;
    386 
    387   class PushSafepointRegistersScope BASE_EMBEDDED {
    388    public:
    389     explicit PushSafepointRegistersScope(LCodeGen* codegen)
    390         : codegen_(codegen) {
    391       ASSERT(codegen_->info()->is_calling());
    392       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
    393       codegen_->masm_->PushSafepointRegisters();
    394       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
    395     }
    396 
    397     ~PushSafepointRegistersScope() {
    398       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
    399       codegen_->masm_->PopSafepointRegisters();
    400       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
    401     }
    402 
    403    private:
    404     LCodeGen* codegen_;
    405   };
    406 
    407   friend class LDeferredCode;
    408   friend class LEnvironment;
    409   friend class SafepointGenerator;
    410   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
    411 };
    412 
    413 
    414 class LDeferredCode: public ZoneObject {
    415  public:
    416   explicit LDeferredCode(LCodeGen* codegen)
    417       : codegen_(codegen),
    418         external_exit_(NULL),
    419         instruction_index_(codegen->current_instruction_) {
    420     codegen->AddDeferredCode(this);
    421   }
    422 
    423   virtual ~LDeferredCode() { }
    424   virtual void Generate() = 0;
    425   virtual LInstruction* instr() = 0;
    426 
    427   void SetExit(Label* exit) { external_exit_ = exit; }
    428   Label* entry() { return &entry_; }
    429   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
    430   int instruction_index() const { return instruction_index_; }
    431 
    432  protected:
    433   LCodeGen* codegen() const { return codegen_; }
    434   MacroAssembler* masm() const { return codegen_->masm(); }
    435 
    436  private:
    437   LCodeGen* codegen_;
    438   Label entry_;
    439   Label exit_;
    440   Label* external_exit_;
    441   int instruction_index_;
    442 };
    443 
    444 } }  // namespace v8::internal
    445 
    446 #endif  // V8_X64_LITHIUM_CODEGEN_X64_H_
    447