Home | History | Annotate | Download | only in arm64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
      6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
      7 
      8 #include "src/arm64/lithium-arm64.h"
      9 
     10 #include "src/arm64/lithium-gap-resolver-arm64.h"
     11 #include "src/deoptimizer.h"
     12 #include "src/lithium-codegen.h"
     13 #include "src/safepoint-table.h"
     14 #include "src/scopes.h"
     15 #include "src/utils.h"
     16 
     17 namespace v8 {
     18 namespace internal {
     19 
     20 // Forward declarations.
     21 class LDeferredCode;
     22 class SafepointGenerator;
     23 class BranchGenerator;
     24 
     25 class LCodeGen: public LCodeGenBase {
     26  public:
     27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
     28       : LCodeGenBase(chunk, assembler, info),
     29         deoptimizations_(4, info->zone()),
     30         jump_table_(4, info->zone()),
     31         deoptimization_literals_(8, info->zone()),
     32         inlined_function_count_(0),
     33         scope_(info->scope()),
     34         translations_(info->zone()),
     35         deferred_(8, info->zone()),
     36         osr_pc_offset_(-1),
     37         frame_is_built_(false),
     38         safepoints_(info->zone()),
     39         resolver_(this),
     40         expected_safepoint_kind_(Safepoint::kSimple),
     41         after_push_argument_(false),
     42         inlined_arguments_(false) {
     43     PopulateDeoptimizationLiteralsWithInlinedFunctions();
     44   }
     45 
     46   ~LCodeGen() {
     47     DCHECK(!after_push_argument_ || inlined_arguments_);
     48   }
     49 
     50   // Simple accessors.
     51   Scope* scope() const { return scope_; }
     52 
     53   int LookupDestination(int block_id) const {
     54     return chunk()->LookupDestination(block_id);
     55   }
     56 
     57   bool IsNextEmittedBlock(int block_id) const {
     58     return LookupDestination(block_id) == GetNextEmittedBlock();
     59   }
     60 
     61   bool NeedsEagerFrame() const {
     62     return GetStackSlotCount() > 0 ||
     63         info()->is_non_deferred_calling() ||
     64         !info()->IsStub() ||
     65         info()->requires_frame();
     66   }
     67   bool NeedsDeferredFrame() const {
     68     return !NeedsEagerFrame() && info()->is_deferred_calling();
     69   }
     70 
     71   LinkRegisterStatus GetLinkRegisterState() const {
     72     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
     73   }
     74 
     75   // Try to generate code for the entire chunk, but it may fail if the
     76   // chunk contains constructs we cannot handle. Returns true if the
     77   // code generation attempt succeeded.
     78   bool GenerateCode();
     79 
     80   // Finish the code by setting stack height, safepoint, and bailout
     81   // information on it.
     82   void FinishCode(Handle<Code> code);
     83 
     84   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
     85   // Support for converting LOperands to assembler types.
     86   Register ToRegister(LOperand* op) const;
     87   Register ToRegister32(LOperand* op) const;
     88   Operand ToOperand(LOperand* op);
     89   Operand ToOperand32(LOperand* op);
     90   enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
     91   MemOperand ToMemOperand(LOperand* op,
     92                           StackMode stack_mode = kCanUseStackPointer) const;
     93   Handle<Object> ToHandle(LConstantOperand* op) const;
     94 
     95   template <class LI>
     96   Operand ToShiftedRightOperand32(LOperand* right, LI* shift_info);
     97 
     98   int JSShiftAmountFromLConstant(LOperand* constant) {
     99     return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
    100   }
    101 
    102   // TODO(jbramley): Examine these helpers and check that they make sense.
    103   // IsInteger32Constant returns true for smi constants, for example.
    104   bool IsInteger32Constant(LConstantOperand* op) const;
    105   bool IsSmi(LConstantOperand* op) const;
    106 
    107   int32_t ToInteger32(LConstantOperand* op) const;
    108   Smi* ToSmi(LConstantOperand* op) const;
    109   double ToDouble(LConstantOperand* op) const;
    110   DoubleRegister ToDoubleRegister(LOperand* op) const;
    111 
    112   // Declare methods that deal with the individual node types.
    113 #define DECLARE_DO(type) void Do##type(L##type* node);
    114   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
    115 #undef DECLARE_DO
    116 
    117  private:
    118   // Return a double scratch register which can be used locally
    119   // when generating code for a lithium instruction.
    120   DoubleRegister double_scratch() { return crankshaft_fp_scratch; }
    121 
    122   // Deferred code support.
    123   void DoDeferredNumberTagD(LNumberTagD* instr);
    124   void DoDeferredStackCheck(LStackCheck* instr);
    125   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
    126   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
    127   void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
    128                                Label* exit,
    129                                Label* allocation_entry);
    130 
    131   void DoDeferredNumberTagU(LInstruction* instr,
    132                             LOperand* value,
    133                             LOperand* temp1,
    134                             LOperand* temp2);
    135   void DoDeferredTaggedToI(LTaggedToI* instr,
    136                            LOperand* value,
    137                            LOperand* temp1,
    138                            LOperand* temp2);
    139   void DoDeferredAllocate(LAllocate* instr);
    140   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr);
    141   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
    142   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
    143                                    Register result,
    144                                    Register object,
    145                                    Register index);
    146 
    147   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
    148   void EmitGoto(int block);
    149   void DoGap(LGap* instr);
    150 
    151   // Generic version of EmitBranch. It contains some code to avoid emitting a
    152   // branch on the next emitted basic block where we could just fall-through.
    153   // You shouldn't use that directly but rather consider one of the helper like
    154   // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
    155   template<class InstrType>
    156   void EmitBranchGeneric(InstrType instr,
    157                          const BranchGenerator& branch);
    158 
    159   template<class InstrType>
    160   void EmitBranch(InstrType instr, Condition condition);
    161 
    162   template<class InstrType>
    163   void EmitCompareAndBranch(InstrType instr,
    164                             Condition condition,
    165                             const Register& lhs,
    166                             const Operand& rhs);
    167 
    168   template<class InstrType>
    169   void EmitTestAndBranch(InstrType instr,
    170                          Condition condition,
    171                          const Register& value,
    172                          uint64_t mask);
    173 
    174   template<class InstrType>
    175   void EmitBranchIfNonZeroNumber(InstrType instr,
    176                                  const FPRegister& value,
    177                                  const FPRegister& scratch);
    178 
    179   template<class InstrType>
    180   void EmitBranchIfHeapNumber(InstrType instr,
    181                               const Register& value);
    182 
    183   template<class InstrType>
    184   void EmitBranchIfRoot(InstrType instr,
    185                         const Register& value,
    186                         Heap::RootListIndex index);
    187 
    188   // Emits optimized code to deep-copy the contents of statically known object
    189   // graphs (e.g. object literal boilerplate). Expects a pointer to the
    190   // allocated destination object in the result register, and a pointer to the
    191   // source object in the source register.
    192   void EmitDeepCopy(Handle<JSObject> object,
    193                     Register result,
    194                     Register source,
    195                     Register scratch,
    196                     int* offset,
    197                     AllocationSiteMode mode);
    198 
    199   template <class T>
    200   void EmitVectorLoadICRegisters(T* instr);
    201 
    202   // Emits optimized code for %_IsString(x).  Preserves input register.
    203   // Returns the condition on which a final split to
    204   // true and false label should be made, to optimize fallthrough.
    205   Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
    206                          SmiCheck check_needed);
    207 
    208   int DefineDeoptimizationLiteral(Handle<Object> literal);
    209   void PopulateDeoptimizationData(Handle<Code> code);
    210   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
    211 
    212   MemOperand BuildSeqStringOperand(Register string,
    213                                    Register temp,
    214                                    LOperand* index,
    215                                    String::Encoding encoding);
    216   void DeoptimizeBranch(LInstruction* instr, const char* detail,
    217                         BranchType branch_type, Register reg = NoReg,
    218                         int bit = -1,
    219                         Deoptimizer::BailoutType* override_bailout_type = NULL);
    220   void Deoptimize(LInstruction* instr,
    221                   Deoptimizer::BailoutType* override_bailout_type = NULL,
    222                   const char* detail = NULL);
    223   void DeoptimizeIf(Condition cond, LInstruction* instr,
    224                     const char* detail = NULL);
    225   void DeoptimizeIfZero(Register rt, LInstruction* instr,
    226                         const char* detail = NULL);
    227   void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
    228                            const char* detail = NULL);
    229   void DeoptimizeIfNegative(Register rt, LInstruction* instr,
    230                             const char* detail = NULL);
    231   void DeoptimizeIfSmi(Register rt, LInstruction* instr,
    232                        const char* detail = NULL);
    233   void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
    234                           const char* detail = NULL);
    235   void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
    236                         LInstruction* instr, const char* detail = NULL);
    237   void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
    238                            LInstruction* instr, const char* detail = NULL);
    239   void DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr);
    240   void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
    241                              const char* detail = NULL);
    242   void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
    243                           const char* detail = NULL);
    244   void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
    245                             const char* detail = NULL);
    246 
    247   MemOperand PrepareKeyedExternalArrayOperand(Register key,
    248                                               Register base,
    249                                               Register scratch,
    250                                               bool key_is_smi,
    251                                               bool key_is_constant,
    252                                               int constant_key,
    253                                               ElementsKind elements_kind,
    254                                               int base_offset);
    255   MemOperand PrepareKeyedArrayOperand(Register base,
    256                                       Register elements,
    257                                       Register key,
    258                                       bool key_is_tagged,
    259                                       ElementsKind elements_kind,
    260                                       Representation representation,
    261                                       int base_offset);
    262 
    263   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
    264                                             Safepoint::DeoptMode mode);
    265 
    266   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
    267 
    268   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
    269 
    270   // Emit frame translation commands for an environment.
    271   void WriteTranslation(LEnvironment* environment, Translation* translation);
    272 
    273   void AddToTranslation(LEnvironment* environment,
    274                         Translation* translation,
    275                         LOperand* op,
    276                         bool is_tagged,
    277                         bool is_uint32,
    278                         int* object_index_pointer,
    279                         int* dematerialized_index_pointer);
    280 
    281   void SaveCallerDoubles();
    282   void RestoreCallerDoubles();
    283 
    284   // Code generation steps.  Returns true if code generation should continue.
    285   void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
    286   bool GeneratePrologue();
    287   bool GenerateDeferredCode();
    288   bool GenerateJumpTable();
    289   bool GenerateSafepointTable();
    290 
    291   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
    292   void GenerateOsrPrologue();
    293 
    294   enum SafepointMode {
    295     RECORD_SIMPLE_SAFEPOINT,
    296     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
    297   };
    298 
    299   void CallCode(Handle<Code> code,
    300                 RelocInfo::Mode mode,
    301                 LInstruction* instr);
    302 
    303   void CallCodeGeneric(Handle<Code> code,
    304                        RelocInfo::Mode mode,
    305                        LInstruction* instr,
    306                        SafepointMode safepoint_mode);
    307 
    308   void CallRuntime(const Runtime::Function* function,
    309                    int num_arguments,
    310                    LInstruction* instr,
    311                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
    312 
    313   void CallRuntime(Runtime::FunctionId id,
    314                    int num_arguments,
    315                    LInstruction* instr) {
    316     const Runtime::Function* function = Runtime::FunctionForId(id);
    317     CallRuntime(function, num_arguments, instr);
    318   }
    319 
    320   void LoadContextFromDeferred(LOperand* context);
    321   void CallRuntimeFromDeferred(Runtime::FunctionId id,
    322                                int argc,
    323                                LInstruction* instr,
    324                                LOperand* context);
    325 
    326   // Generate a direct call to a known function.
    327   // If the function is already loaded into x1 by the caller, function_reg may
    328   // be set to x1. Otherwise, it must be NoReg, and CallKnownFunction will
    329   // automatically load it.
    330   void CallKnownFunction(Handle<JSFunction> function,
    331                          int formal_parameter_count,
    332                          int arity,
    333                          LInstruction* instr,
    334                          Register function_reg = NoReg);
    335 
    336   // Support for recording safepoint and position information.
    337   void RecordAndWritePosition(int position) OVERRIDE;
    338   void RecordSafepoint(LPointerMap* pointers,
    339                        Safepoint::Kind kind,
    340                        int arguments,
    341                        Safepoint::DeoptMode mode);
    342   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
    343   void RecordSafepoint(Safepoint::DeoptMode mode);
    344   void RecordSafepointWithRegisters(LPointerMap* pointers,
    345                                     int arguments,
    346                                     Safepoint::DeoptMode mode);
    347   void RecordSafepointWithLazyDeopt(LInstruction* instr,
    348                                     SafepointMode safepoint_mode);
    349 
    350   void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
    351 
    352   ZoneList<LEnvironment*> deoptimizations_;
    353   ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
    354   ZoneList<Handle<Object> > deoptimization_literals_;
    355   int inlined_function_count_;
    356   Scope* const scope_;
    357   TranslationBuffer translations_;
    358   ZoneList<LDeferredCode*> deferred_;
    359   int osr_pc_offset_;
    360   bool frame_is_built_;
    361 
    362   // Builder that keeps track of safepoints in the code. The table itself is
    363   // emitted at the end of the generated code.
    364   SafepointTableBuilder safepoints_;
    365 
    366   // Compiler from a set of parallel moves to a sequential list of moves.
    367   LGapResolver resolver_;
    368 
    369   Safepoint::Kind expected_safepoint_kind_;
    370 
    371   // This flag is true when we are after a push (but before a call).
    372   // In this situation, jssp no longer references the end of the stack slots so,
    373   // we can only reference a stack slot via fp.
    374   bool after_push_argument_;
    375   // If we have inlined arguments, we are no longer able to use jssp because
    376   // jssp is modified and we never know if we are in a block after or before
    377   // the pop of the arguments (which restores jssp).
    378   bool inlined_arguments_;
    379 
    380   int old_position_;
    381 
    382   class PushSafepointRegistersScope BASE_EMBEDDED {
    383    public:
    384     explicit PushSafepointRegistersScope(LCodeGen* codegen)
    385         : codegen_(codegen) {
    386       DCHECK(codegen_->info()->is_calling());
    387       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
    388       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
    389 
    390       UseScratchRegisterScope temps(codegen_->masm_);
    391       // Preserve the value of lr which must be saved on the stack (the call to
    392       // the stub will clobber it).
    393       Register to_be_pushed_lr =
    394           temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
    395       codegen_->masm_->Mov(to_be_pushed_lr, lr);
    396       StoreRegistersStateStub stub(codegen_->isolate());
    397       codegen_->masm_->CallStub(&stub);
    398     }
    399 
    400     ~PushSafepointRegistersScope() {
    401       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
    402       RestoreRegistersStateStub stub(codegen_->isolate());
    403       codegen_->masm_->CallStub(&stub);
    404       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
    405     }
    406 
    407    private:
    408     LCodeGen* codegen_;
    409   };
    410 
    411   friend class LDeferredCode;
    412   friend class SafepointGenerator;
    413   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
    414 };
    415 
    416 
    417 class LDeferredCode: public ZoneObject {
    418  public:
    419   explicit LDeferredCode(LCodeGen* codegen)
    420       : codegen_(codegen),
    421         external_exit_(NULL),
    422         instruction_index_(codegen->current_instruction_) {
    423     codegen->AddDeferredCode(this);
    424   }
    425 
    426   virtual ~LDeferredCode() { }
    427   virtual void Generate() = 0;
    428   virtual LInstruction* instr() = 0;
    429 
    430   void SetExit(Label* exit) { external_exit_ = exit; }
    431   Label* entry() { return &entry_; }
    432   Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
    433   int instruction_index() const { return instruction_index_; }
    434 
    435  protected:
    436   LCodeGen* codegen() const { return codegen_; }
    437   MacroAssembler* masm() const { return codegen_->masm(); }
    438 
    439  private:
    440   LCodeGen* codegen_;
    441   Label entry_;
    442   Label exit_;
    443   Label* external_exit_;
    444   int instruction_index_;
    445 };
    446 
    447 
    448 // This is the abstract class used by EmitBranchGeneric.
    449 // It is used to emit code for conditional branching. The Emit() function
    450 // emits code to branch when the condition holds and EmitInverted() emits
    451 // the branch when the inverted condition is verified.
    452 //
    453 // For actual examples of condition see the concrete implementation in
    454 // lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
    455 class BranchGenerator BASE_EMBEDDED {
    456  public:
    457   explicit BranchGenerator(LCodeGen* codegen)
    458     : codegen_(codegen) { }
    459 
    460   virtual ~BranchGenerator() { }
    461 
    462   virtual void Emit(Label* label) const = 0;
    463   virtual void EmitInverted(Label* label) const = 0;
    464 
    465  protected:
    466   MacroAssembler* masm() const { return codegen_->masm(); }
    467 
    468   LCodeGen* codegen_;
    469 };
    470 
    471 } }  // namespace v8::internal
    472 
    473 #endif  // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
    474