Home | History | Annotate | Download | only in s390
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_
      6 #define V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_
      7 
      8 #include "src/ast/scopes.h"
      9 #include "src/crankshaft/lithium-codegen.h"
     10 #include "src/crankshaft/s390/lithium-gap-resolver-s390.h"
     11 #include "src/crankshaft/s390/lithium-s390.h"
     12 #include "src/deoptimizer.h"
     13 #include "src/safepoint-table.h"
     14 #include "src/utils.h"
     15 
     16 namespace v8 {
     17 namespace internal {
     18 
     19 // Forward declarations.
     20 class LDeferredCode;
     21 class SafepointGenerator;
     22 
     23 class LCodeGen : public LCodeGenBase {
     24  public:
     25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
     26       : LCodeGenBase(chunk, assembler, info),
     27         jump_table_(4, info->zone()),
     28         scope_(info->scope()),
     29         deferred_(8, info->zone()),
     30         frame_is_built_(false),
     31         safepoints_(info->zone()),
     32         resolver_(this),
     33         expected_safepoint_kind_(Safepoint::kSimple) {
     34     PopulateDeoptimizationLiteralsWithInlinedFunctions();
     35   }
     36 
     37   int LookupDestination(int block_id) const {
     38     return chunk()->LookupDestination(block_id);
     39   }
     40 
     41   bool IsNextEmittedBlock(int block_id) const {
     42     return LookupDestination(block_id) == GetNextEmittedBlock();
     43   }
     44 
     45   bool NeedsEagerFrame() const {
     46     return HasAllocatedStackSlots() || info()->is_non_deferred_calling() ||
     47            !info()->IsStub() || info()->requires_frame();
     48   }
     49   bool NeedsDeferredFrame() const {
     50     return !NeedsEagerFrame() && info()->is_deferred_calling();
     51   }
     52 
     53   LinkRegisterStatus GetLinkRegisterState() const {
     54     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
     55   }
     56 
     57   // Support for converting LOperands to assembler types.
     58   // LOperand must be a register.
     59   Register ToRegister(LOperand* op) const;
     60 
     61   // LOperand is loaded into scratch, unless already a register.
     62   Register EmitLoadRegister(LOperand* op, Register scratch);
     63 
     64   // LConstantOperand must be an Integer32 or Smi
     65   void EmitLoadIntegerConstant(LConstantOperand* const_op, Register dst);
     66 
     67   // LOperand must be a double register.
     68   DoubleRegister ToDoubleRegister(LOperand* op) const;
     69 
     70   intptr_t ToRepresentation(LConstantOperand* op,
     71                             const Representation& r) const;
     72   int32_t ToInteger32(LConstantOperand* op) const;
     73   Smi* ToSmi(LConstantOperand* op) const;
     74   double ToDouble(LConstantOperand* op) const;
     75   Operand ToOperand(LOperand* op);
     76   MemOperand ToMemOperand(LOperand* op) const;
     77   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
     78   MemOperand ToHighMemOperand(LOperand* op) const;
     79 
     80   bool IsInteger32(LConstantOperand* op) const;
     81   bool IsSmi(LConstantOperand* op) const;
     82   Handle<Object> ToHandle(LConstantOperand* op) const;
     83 
     84   // Try to generate code for the entire chunk, but it may fail if the
     85   // chunk contains constructs we cannot handle. Returns true if the
     86   // code generation attempt succeeded.
     87   bool GenerateCode();
     88 
     89   // Finish the code by setting stack height, safepoint, and bailout
     90   // information on it.
     91   void FinishCode(Handle<Code> code);
     92 
     93   // Deferred code support.
     94   void DoDeferredNumberTagD(LNumberTagD* instr);
     95 
     96   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
     97   void DoDeferredNumberTagIU(LInstruction* instr, LOperand* value,
     98                              LOperand* temp1, LOperand* temp2,
     99                              IntegerSignedness signedness);
    100 
    101   void DoDeferredTaggedToI(LTaggedToI* instr);
    102   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
    103   void DoDeferredStackCheck(LStackCheck* instr);
    104   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
    105   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
    106   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
    107   void DoDeferredAllocate(LAllocate* instr);
    108   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
    109   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result,
    110                                    Register object, Register index);
    111 
    112   // Parallel move support.
    113   void DoParallelMove(LParallelMove* move);
    114   void DoGap(LGap* instr);
    115 
    116   MemOperand PrepareKeyedOperand(Register key, Register base,
    117                                  bool key_is_constant, bool key_is_tagged,
    118                                  int constant_key, int element_size_shift,
    119                                  int base_offset,
    120                                  bool keyMaybeNegative = true);
    121 
    122   // Emit frame translation commands for an environment.
    123   void WriteTranslation(LEnvironment* environment, Translation* translation);
    124 
    125 // Declare methods that deal with the individual node types.
    126 #define DECLARE_DO(type) void Do##type(L##type* node);
    127   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
    128 #undef DECLARE_DO
    129 
    130  private:
    131   Scope* scope() const { return scope_; }
    132 
    133   Register scratch0() { return kLithiumScratch; }
    134   DoubleRegister double_scratch0() { return kScratchDoubleReg; }
    135 
    136   LInstruction* GetNextInstruction();
    137 
    138   void EmitClassOfTest(Label* if_true, Label* if_false,
    139                        Handle<String> class_name, Register input,
    140                        Register temporary, Register temporary2);
    141 
    142   bool HasAllocatedStackSlots() const {
    143     return chunk()->HasAllocatedStackSlots();
    144   }
    145   int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); }
    146   int GetTotalFrameSlotCount() const {
    147     return chunk()->GetTotalFrameSlotCount();
    148   }
    149 
    150   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
    151 
    152   void SaveCallerDoubles();
    153   void RestoreCallerDoubles();
    154 
    155   // Code generation passes.  Returns true if code generation should
    156   // continue.
    157   void GenerateBodyInstructionPre(LInstruction* instr) override;
    158   bool GeneratePrologue();
    159   bool GenerateDeferredCode();
    160   bool GenerateJumpTable();
    161   bool GenerateSafepointTable();
    162 
    163   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
    164   void GenerateOsrPrologue();
    165 
    166   enum SafepointMode {
    167     RECORD_SIMPLE_SAFEPOINT,
    168     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
    169   };
    170 
    171   void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr);
    172 
    173   void CallCodeGeneric(Handle<Code> code, RelocInfo::Mode mode,
    174                        LInstruction* instr, SafepointMode safepoint_mode);
    175 
    176   void CallRuntime(const Runtime::Function* function, int num_arguments,
    177                    LInstruction* instr,
    178                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
    179 
    180   void CallRuntime(Runtime::FunctionId id, int num_arguments,
    181                    LInstruction* instr) {
    182     const Runtime::Function* function = Runtime::FunctionForId(id);
    183     CallRuntime(function, num_arguments, instr);
    184   }
    185 
    186   void CallRuntime(Runtime::FunctionId id, LInstruction* instr) {
    187     const Runtime::Function* function = Runtime::FunctionForId(id);
    188     CallRuntime(function, function->nargs, instr);
    189   }
    190 
    191   void LoadContextFromDeferred(LOperand* context);
    192   void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc,
    193                                LInstruction* instr, LOperand* context);
    194 
    195   void PrepareForTailCall(const ParameterCount& actual, Register scratch1,
    196                           Register scratch2, Register scratch3);
    197 
    198   // Generate a direct call to a known function.  Expects the function
    199   // to be in r4.
    200   void CallKnownFunction(Handle<JSFunction> function,
    201                          int formal_parameter_count, int arity,
    202                          bool is_tail_call, LInstruction* instr);
    203 
    204   void RecordSafepointWithLazyDeopt(LInstruction* instr,
    205                                     SafepointMode safepoint_mode);
    206 
    207   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
    208                                             Safepoint::DeoptMode mode);
    209   void DeoptimizeIf(Condition condition, LInstruction* instr,
    210                     DeoptimizeReason deopt_reason,
    211                     Deoptimizer::BailoutType bailout_type, CRegister cr = cr7);
    212   void DeoptimizeIf(Condition condition, LInstruction* instr,
    213                     DeoptimizeReason deopt_reason, CRegister cr = cr7);
    214 
    215   void AddToTranslation(LEnvironment* environment, Translation* translation,
    216                         LOperand* op, bool is_tagged, bool is_uint32,
    217                         int* object_index_pointer,
    218                         int* dematerialized_index_pointer);
    219 
    220   Register ToRegister(int index) const;
    221   DoubleRegister ToDoubleRegister(int index) const;
    222 
    223   MemOperand BuildSeqStringOperand(Register string, LOperand* index,
    224                                    String::Encoding encoding);
    225 
    226   void EmitMathAbs(LMathAbs* instr);
    227 #if V8_TARGET_ARCH_S390X
    228   void EmitInteger32MathAbs(LMathAbs* instr);
    229 #endif
    230 
    231   // Support for recording safepoint information.
    232   void RecordSafepoint(LPointerMap* pointers, Safepoint::Kind kind,
    233                        int arguments, Safepoint::DeoptMode mode);
    234   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
    235   void RecordSafepoint(Safepoint::DeoptMode mode);
    236   void RecordSafepointWithRegisters(LPointerMap* pointers, int arguments,
    237                                     Safepoint::DeoptMode mode);
    238 
    239   static Condition TokenToCondition(Token::Value op);
    240   void EmitGoto(int block);
    241 
    242   // EmitBranch expects to be the last instruction of a block.
    243   template <class InstrType>
    244   void EmitBranch(InstrType instr, Condition condition);
    245   template <class InstrType>
    246   void EmitTrueBranch(InstrType instr, Condition condition);
    247   template <class InstrType>
    248   void EmitFalseBranch(InstrType instr, Condition condition);
    249   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
    250                         DoubleRegister result, NumberUntagDMode mode);
    251 
    252   // Emits optimized code for typeof x == "y".  Modifies input register.
    253   // Returns the condition on which a final split to
    254   // true and false label should be made, to optimize fallthrough.
    255   Condition EmitTypeofIs(Label* true_label, Label* false_label, Register input,
    256                          Handle<String> type_name);
    257 
    258   // Emits optimized code for %_IsString(x).  Preserves input register.
    259   // Returns the condition on which a final split to
    260   // true and false label should be made, to optimize fallthrough.
    261   Condition EmitIsString(Register input, Register temp1, Label* is_not_string,
    262                          SmiCheck check_needed);
    263 
    264   // Emits optimized code to deep-copy the contents of statically known
    265   // object graphs (e.g. object literal boilerplate).
    266   void EmitDeepCopy(Handle<JSObject> object, Register result, Register source,
    267                     int* offset, AllocationSiteMode mode);
    268 
    269   void EnsureSpaceForLazyDeopt(int space_needed) override;
    270   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
    271   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
    272   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
    273   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
    274   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
    275   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
    276 
    277   template <class T>
    278   void EmitVectorLoadICRegisters(T* instr);
    279 
    280   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
    281   Scope* const scope_;
    282   ZoneList<LDeferredCode*> deferred_;
    283   bool frame_is_built_;
    284 
    285   // Builder that keeps track of safepoints in the code. The table
    286   // itself is emitted at the end of the generated code.
    287   SafepointTableBuilder safepoints_;
    288 
    289   // Compiler from a set of parallel moves to a sequential list of moves.
    290   LGapResolver resolver_;
    291 
    292   Safepoint::Kind expected_safepoint_kind_;
    293 
    294   class PushSafepointRegistersScope final BASE_EMBEDDED {
    295    public:
    296     explicit PushSafepointRegistersScope(LCodeGen* codegen);
    297 
    298     ~PushSafepointRegistersScope();
    299 
    300    private:
    301     LCodeGen* codegen_;
    302   };
    303 
    304   friend class LDeferredCode;
    305   friend class LEnvironment;
    306   friend class SafepointGenerator;
    307   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
    308 };
    309 
    310 class LDeferredCode : public ZoneObject {
    311  public:
    312   explicit LDeferredCode(LCodeGen* codegen)
    313       : codegen_(codegen),
    314         external_exit_(NULL),
    315         instruction_index_(codegen->current_instruction_) {
    316     codegen->AddDeferredCode(this);
    317   }
    318 
    319   virtual ~LDeferredCode() {}
    320   virtual void Generate() = 0;
    321   virtual LInstruction* instr() = 0;
    322 
    323   void SetExit(Label* exit) { external_exit_ = exit; }
    324   Label* entry() { return &entry_; }
    325   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
    326   int instruction_index() const { return instruction_index_; }
    327 
    328  protected:
    329   LCodeGen* codegen() const { return codegen_; }
    330   MacroAssembler* masm() const { return codegen_->masm(); }
    331 
    332  private:
    333   LCodeGen* codegen_;
    334   Label entry_;
    335   Label exit_;
    336   Label* external_exit_;
    337   int instruction_index_;
    338 };
    339 }  // namespace internal
    340 }  // namespace v8
    341 
    342 #endif  // V8_CRANKSHAFT_S390_LITHIUM_CODEGEN_S390_H_
    343