Home | History | Annotate | Download | only in arm64
      1 // Copyright 2013 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
      6 #define V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
      7 
      8 #include "src/arm64/lithium-arm64.h"
      9 
     10 #include "src/arm64/lithium-gap-resolver-arm64.h"
     11 #include "src/deoptimizer.h"
     12 #include "src/lithium-codegen.h"
     13 #include "src/safepoint-table.h"
     14 #include "src/scopes.h"
     15 #include "src/utils.h"
     16 
     17 namespace v8 {
     18 namespace internal {
     19 
     20 // Forward declarations.
     21 class LDeferredCode;
     22 class SafepointGenerator;
     23 class BranchGenerator;
     24 
     25 class LCodeGen: public LCodeGenBase {
     26  public:
     27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
     28       : LCodeGenBase(chunk, assembler, info),
     29         deoptimizations_(4, info->zone()),
     30         deopt_jump_table_(4, info->zone()),
     31         deoptimization_literals_(8, info->zone()),
     32         inlined_function_count_(0),
     33         scope_(info->scope()),
     34         translations_(info->zone()),
     35         deferred_(8, info->zone()),
     36         osr_pc_offset_(-1),
     37         frame_is_built_(false),
     38         safepoints_(info->zone()),
     39         resolver_(this),
     40         expected_safepoint_kind_(Safepoint::kSimple),
     41         after_push_argument_(false),
     42         inlined_arguments_(false) {
     43     PopulateDeoptimizationLiteralsWithInlinedFunctions();
     44   }
     45 
     46   ~LCodeGen() {
     47     ASSERT(!after_push_argument_ || inlined_arguments_);
     48   }
     49 
     50   // Simple accessors.
     51   Scope* scope() const { return scope_; }
     52 
     53   int LookupDestination(int block_id) const {
     54     return chunk()->LookupDestination(block_id);
     55   }
     56 
     57   bool IsNextEmittedBlock(int block_id) const {
     58     return LookupDestination(block_id) == GetNextEmittedBlock();
     59   }
     60 
     61   bool NeedsEagerFrame() const {
     62     return GetStackSlotCount() > 0 ||
     63         info()->is_non_deferred_calling() ||
     64         !info()->IsStub() ||
     65         info()->requires_frame();
     66   }
     67   bool NeedsDeferredFrame() const {
     68     return !NeedsEagerFrame() && info()->is_deferred_calling();
     69   }
     70 
     71   LinkRegisterStatus GetLinkRegisterState() const {
     72     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
     73   }
     74 
     75   // Try to generate code for the entire chunk, but it may fail if the
     76   // chunk contains constructs we cannot handle. Returns true if the
     77   // code generation attempt succeeded.
     78   bool GenerateCode();
     79 
     80   // Finish the code by setting stack height, safepoint, and bailout
     81   // information on it.
     82   void FinishCode(Handle<Code> code);
     83 
     84   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
     85   // Support for converting LOperands to assembler types.
     86   // LOperand must be a register.
     87   Register ToRegister(LOperand* op) const;
     88   Register ToRegister32(LOperand* op) const;
     89   Operand ToOperand(LOperand* op);
     90   Operand ToOperand32I(LOperand* op);
     91   Operand ToOperand32U(LOperand* op);
     92   enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
     93   MemOperand ToMemOperand(LOperand* op,
     94                           StackMode stack_mode = kCanUseStackPointer) const;
     95   Handle<Object> ToHandle(LConstantOperand* op) const;
     96 
     97   template<class LI>
     98   Operand ToShiftedRightOperand32I(LOperand* right,
     99                                    LI* shift_info) {
    100     return ToShiftedRightOperand32(right, shift_info, SIGNED_INT32);
    101   }
    102   template<class LI>
    103   Operand ToShiftedRightOperand32U(LOperand* right,
    104                                    LI* shift_info) {
    105     return ToShiftedRightOperand32(right, shift_info, UNSIGNED_INT32);
    106   }
    107   template<class LI>
    108   Operand ToShiftedRightOperand32(LOperand* right,
    109                                   LI* shift_info,
    110                                   IntegerSignedness signedness);
    111 
    112   int JSShiftAmountFromLConstant(LOperand* constant) {
    113     return ToInteger32(LConstantOperand::cast(constant)) & 0x1f;
    114   }
    115 
    116   // TODO(jbramley): Examine these helpers and check that they make sense.
    117   // IsInteger32Constant returns true for smi constants, for example.
    118   bool IsInteger32Constant(LConstantOperand* op) const;
    119   bool IsSmi(LConstantOperand* op) const;
    120 
    121   int32_t ToInteger32(LConstantOperand* op) const;
    122   Smi* ToSmi(LConstantOperand* op) const;
    123   double ToDouble(LConstantOperand* op) const;
    124   DoubleRegister ToDoubleRegister(LOperand* op) const;
    125 
    126   // Declare methods that deal with the individual node types.
    127 #define DECLARE_DO(type) void Do##type(L##type* node);
    128   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
    129 #undef DECLARE_DO
    130 
    131  private:
    132   // Return a double scratch register which can be used locally
    133   // when generating code for a lithium instruction.
    134   DoubleRegister double_scratch() { return crankshaft_fp_scratch; }
    135 
    136   // Deferred code support.
    137   void DoDeferredNumberTagD(LNumberTagD* instr);
    138   void DoDeferredStackCheck(LStackCheck* instr);
    139   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
    140   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
    141   void DoDeferredMathAbsTagged(LMathAbsTagged* instr,
    142                                Label* exit,
    143                                Label* allocation_entry);
    144 
    145   void DoDeferredNumberTagU(LInstruction* instr,
    146                             LOperand* value,
    147                             LOperand* temp1,
    148                             LOperand* temp2);
    149   void DoDeferredTaggedToI(LTaggedToI* instr,
    150                            LOperand* value,
    151                            LOperand* temp1,
    152                            LOperand* temp2);
    153   void DoDeferredAllocate(LAllocate* instr);
    154   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr);
    155   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
    156   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
    157                                    Register result,
    158                                    Register object,
    159                                    Register index);
    160 
    161   Operand ToOperand32(LOperand* op, IntegerSignedness signedness);
    162 
    163   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
    164   void EmitGoto(int block);
    165   void DoGap(LGap* instr);
    166 
    167   // Generic version of EmitBranch. It contains some code to avoid emitting a
    168   // branch on the next emitted basic block where we could just fall-through.
    169   // You shouldn't use that directly but rather consider one of the helper like
    170   // LCodeGen::EmitBranch, LCodeGen::EmitCompareAndBranch...
    171   template<class InstrType>
    172   void EmitBranchGeneric(InstrType instr,
    173                          const BranchGenerator& branch);
    174 
    175   template<class InstrType>
    176   void EmitBranch(InstrType instr, Condition condition);
    177 
    178   template<class InstrType>
    179   void EmitCompareAndBranch(InstrType instr,
    180                             Condition condition,
    181                             const Register& lhs,
    182                             const Operand& rhs);
    183 
    184   template<class InstrType>
    185   void EmitTestAndBranch(InstrType instr,
    186                          Condition condition,
    187                          const Register& value,
    188                          uint64_t mask);
    189 
    190   template<class InstrType>
    191   void EmitBranchIfNonZeroNumber(InstrType instr,
    192                                  const FPRegister& value,
    193                                  const FPRegister& scratch);
    194 
    195   template<class InstrType>
    196   void EmitBranchIfHeapNumber(InstrType instr,
    197                               const Register& value);
    198 
    199   template<class InstrType>
    200   void EmitBranchIfRoot(InstrType instr,
    201                         const Register& value,
    202                         Heap::RootListIndex index);
    203 
    204   // Emits optimized code to deep-copy the contents of statically known object
    205   // graphs (e.g. object literal boilerplate). Expects a pointer to the
    206   // allocated destination object in the result register, and a pointer to the
    207   // source object in the source register.
    208   void EmitDeepCopy(Handle<JSObject> object,
    209                     Register result,
    210                     Register source,
    211                     Register scratch,
    212                     int* offset,
    213                     AllocationSiteMode mode);
    214 
    215   // Emits optimized code for %_IsString(x).  Preserves input register.
    216   // Returns the condition on which a final split to
    217   // true and false label should be made, to optimize fallthrough.
    218   Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
    219                          SmiCheck check_needed);
    220 
    221   int DefineDeoptimizationLiteral(Handle<Object> literal);
    222   void PopulateDeoptimizationData(Handle<Code> code);
    223   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
    224 
    225   MemOperand BuildSeqStringOperand(Register string,
    226                                    Register temp,
    227                                    LOperand* index,
    228                                    String::Encoding encoding);
    229   void DeoptimizeBranch(
    230       LEnvironment* environment,
    231       BranchType branch_type, Register reg = NoReg, int bit = -1,
    232       Deoptimizer::BailoutType* override_bailout_type = NULL);
    233   void Deoptimize(LEnvironment* environment,
    234                   Deoptimizer::BailoutType* override_bailout_type = NULL);
    235   void DeoptimizeIf(Condition cond, LEnvironment* environment);
    236   void DeoptimizeIfZero(Register rt, LEnvironment* environment);
    237   void DeoptimizeIfNotZero(Register rt, LEnvironment* environment);
    238   void DeoptimizeIfNegative(Register rt, LEnvironment* environment);
    239   void DeoptimizeIfSmi(Register rt, LEnvironment* environment);
    240   void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment);
    241   void DeoptimizeIfRoot(Register rt,
    242                         Heap::RootListIndex index,
    243                         LEnvironment* environment);
    244   void DeoptimizeIfNotRoot(Register rt,
    245                            Heap::RootListIndex index,
    246                            LEnvironment* environment);
    247   void DeoptimizeIfMinusZero(DoubleRegister input, LEnvironment* environment);
    248   void DeoptimizeIfBitSet(Register rt, int bit, LEnvironment* environment);
    249   void DeoptimizeIfBitClear(Register rt, int bit, LEnvironment* environment);
    250 
    251   MemOperand PrepareKeyedExternalArrayOperand(Register key,
    252                                               Register base,
    253                                               Register scratch,
    254                                               bool key_is_smi,
    255                                               bool key_is_constant,
    256                                               int constant_key,
    257                                               ElementsKind elements_kind,
    258                                               int base_offset);
    259   MemOperand PrepareKeyedArrayOperand(Register base,
    260                                       Register elements,
    261                                       Register key,
    262                                       bool key_is_tagged,
    263                                       ElementsKind elements_kind,
    264                                       Representation representation,
    265                                       int base_offset);
    266 
    267   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
    268                                             Safepoint::DeoptMode mode);
    269 
    270   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
    271 
    272   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
    273 
    274   // Emit frame translation commands for an environment.
    275   void WriteTranslation(LEnvironment* environment, Translation* translation);
    276 
    277   void AddToTranslation(LEnvironment* environment,
    278                         Translation* translation,
    279                         LOperand* op,
    280                         bool is_tagged,
    281                         bool is_uint32,
    282                         int* object_index_pointer,
    283                         int* dematerialized_index_pointer);
    284 
    285   void SaveCallerDoubles();
    286   void RestoreCallerDoubles();
    287 
    288   // Code generation steps.  Returns true if code generation should continue.
    289   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
    290   bool GeneratePrologue();
    291   bool GenerateDeferredCode();
    292   bool GenerateDeoptJumpTable();
    293   bool GenerateSafepointTable();
    294 
    295   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
    296   void GenerateOsrPrologue();
    297 
    298   enum SafepointMode {
    299     RECORD_SIMPLE_SAFEPOINT,
    300     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
    301   };
    302 
    303   void CallCode(Handle<Code> code,
    304                 RelocInfo::Mode mode,
    305                 LInstruction* instr);
    306 
    307   void CallCodeGeneric(Handle<Code> code,
    308                        RelocInfo::Mode mode,
    309                        LInstruction* instr,
    310                        SafepointMode safepoint_mode);
    311 
    312   void CallRuntime(const Runtime::Function* function,
    313                    int num_arguments,
    314                    LInstruction* instr,
    315                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
    316 
    317   void CallRuntime(Runtime::FunctionId id,
    318                    int num_arguments,
    319                    LInstruction* instr) {
    320     const Runtime::Function* function = Runtime::FunctionForId(id);
    321     CallRuntime(function, num_arguments, instr);
    322   }
    323 
    324   void LoadContextFromDeferred(LOperand* context);
    325   void CallRuntimeFromDeferred(Runtime::FunctionId id,
    326                                int argc,
    327                                LInstruction* instr,
    328                                LOperand* context);
    329 
    330   // Generate a direct call to a known function.
    331   // If the function is already loaded into x1 by the caller, function_reg may
    332   // be set to x1. Otherwise, it must be NoReg, and CallKnownFunction will
    333   // automatically load it.
    334   void CallKnownFunction(Handle<JSFunction> function,
    335                          int formal_parameter_count,
    336                          int arity,
    337                          LInstruction* instr,
    338                          Register function_reg = NoReg);
    339 
    340   // Support for recording safepoint and position information.
    341   void RecordAndWritePosition(int position) V8_OVERRIDE;
    342   void RecordSafepoint(LPointerMap* pointers,
    343                        Safepoint::Kind kind,
    344                        int arguments,
    345                        Safepoint::DeoptMode mode);
    346   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
    347   void RecordSafepoint(Safepoint::DeoptMode mode);
    348   void RecordSafepointWithRegisters(LPointerMap* pointers,
    349                                     int arguments,
    350                                     Safepoint::DeoptMode mode);
    351   void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
    352                                               int arguments,
    353                                               Safepoint::DeoptMode mode);
    354   void RecordSafepointWithLazyDeopt(LInstruction* instr,
    355                                     SafepointMode safepoint_mode);
    356 
    357   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
    358 
    359   ZoneList<LEnvironment*> deoptimizations_;
    360   ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
    361   ZoneList<Handle<Object> > deoptimization_literals_;
    362   int inlined_function_count_;
    363   Scope* const scope_;
    364   TranslationBuffer translations_;
    365   ZoneList<LDeferredCode*> deferred_;
    366   int osr_pc_offset_;
    367   bool frame_is_built_;
    368 
    369   // Builder that keeps track of safepoints in the code. The table itself is
    370   // emitted at the end of the generated code.
    371   SafepointTableBuilder safepoints_;
    372 
    373   // Compiler from a set of parallel moves to a sequential list of moves.
    374   LGapResolver resolver_;
    375 
    376   Safepoint::Kind expected_safepoint_kind_;
    377 
    378   // This flag is true when we are after a push (but before a call).
    379   // In this situation, jssp no longer references the end of the stack slots so,
    380   // we can only reference a stack slot via fp.
    381   bool after_push_argument_;
    382   // If we have inlined arguments, we are no longer able to use jssp because
    383   // jssp is modified and we never know if we are in a block after or before
    384   // the pop of the arguments (which restores jssp).
    385   bool inlined_arguments_;
    386 
    387   int old_position_;
    388 
    389   class PushSafepointRegistersScope BASE_EMBEDDED {
    390    public:
    391     PushSafepointRegistersScope(LCodeGen* codegen,
    392                                 Safepoint::Kind kind)
    393         : codegen_(codegen) {
    394       ASSERT(codegen_->info()->is_calling());
    395       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
    396       codegen_->expected_safepoint_kind_ = kind;
    397 
    398       UseScratchRegisterScope temps(codegen_->masm_);
    399       // Preserve the value of lr which must be saved on the stack (the call to
    400       // the stub will clobber it).
    401       Register to_be_pushed_lr =
    402           temps.UnsafeAcquire(StoreRegistersStateStub::to_be_pushed_lr());
    403       codegen_->masm_->Mov(to_be_pushed_lr, lr);
    404       switch (codegen_->expected_safepoint_kind_) {
    405         case Safepoint::kWithRegisters: {
    406           StoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
    407           codegen_->masm_->CallStub(&stub);
    408           break;
    409         }
    410         case Safepoint::kWithRegistersAndDoubles: {
    411           StoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
    412           codegen_->masm_->CallStub(&stub);
    413           break;
    414         }
    415         default:
    416           UNREACHABLE();
    417       }
    418     }
    419 
    420     ~PushSafepointRegistersScope() {
    421       Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
    422       ASSERT((kind & Safepoint::kWithRegisters) != 0);
    423       switch (kind) {
    424         case Safepoint::kWithRegisters: {
    425           RestoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
    426           codegen_->masm_->CallStub(&stub);
    427           break;
    428         }
    429         case Safepoint::kWithRegistersAndDoubles: {
    430           RestoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
    431           codegen_->masm_->CallStub(&stub);
    432           break;
    433         }
    434         default:
    435           UNREACHABLE();
    436       }
    437       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
    438     }
    439 
    440    private:
    441     LCodeGen* codegen_;
    442   };
    443 
    444   friend class LDeferredCode;
    445   friend class SafepointGenerator;
    446   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
    447 };
    448 
    449 
    450 class LDeferredCode: public ZoneObject {
    451  public:
    452   explicit LDeferredCode(LCodeGen* codegen)
    453       : codegen_(codegen),
    454         external_exit_(NULL),
    455         instruction_index_(codegen->current_instruction_) {
    456     codegen->AddDeferredCode(this);
    457   }
    458 
    459   virtual ~LDeferredCode() { }
    460   virtual void Generate() = 0;
    461   virtual LInstruction* instr() = 0;
    462 
    463   void SetExit(Label* exit) { external_exit_ = exit; }
    464   Label* entry() { return &entry_; }
    465   Label* exit() { return (external_exit_ != NULL) ? external_exit_ : &exit_; }
    466   int instruction_index() const { return instruction_index_; }
    467 
    468  protected:
    469   LCodeGen* codegen() const { return codegen_; }
    470   MacroAssembler* masm() const { return codegen_->masm(); }
    471 
    472  private:
    473   LCodeGen* codegen_;
    474   Label entry_;
    475   Label exit_;
    476   Label* external_exit_;
    477   int instruction_index_;
    478 };
    479 
    480 
    481 // This is the abstract class used by EmitBranchGeneric.
    482 // It is used to emit code for conditional branching. The Emit() function
    483 // emits code to branch when the condition holds and EmitInverted() emits
    484 // the branch when the inverted condition is verified.
    485 //
    486 // For actual examples of condition see the concrete implementation in
    487 // lithium-codegen-arm64.cc (e.g. BranchOnCondition, CompareAndBranch).
    488 class BranchGenerator BASE_EMBEDDED {
    489  public:
    490   explicit BranchGenerator(LCodeGen* codegen)
    491     : codegen_(codegen) { }
    492 
    493   virtual ~BranchGenerator() { }
    494 
    495   virtual void Emit(Label* label) const = 0;
    496   virtual void EmitInverted(Label* label) const = 0;
    497 
    498  protected:
    499   MacroAssembler* masm() const { return codegen_->masm(); }
    500 
    501   LCodeGen* codegen_;
    502 };
    503 
    504 } }  // namespace v8::internal
    505 
    506 #endif  // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_
    507