Home | History | Annotate | Download | only in arm64
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_
      6 #define V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_
      7 
      8 namespace v8 {
      9 namespace internal {
     10 namespace compiler {
     11 
     12 // ARM64-specific opcodes that specify which assembly sequence to emit.
     13 // Most opcodes specify a single instruction.
     14 #define TARGET_ARCH_OPCODE_LIST(V) \
     15   V(Arm64Add)                      \
     16   V(Arm64Add32)                    \
     17   V(Arm64And)                      \
     18   V(Arm64And32)                    \
     19   V(Arm64Bic)                      \
     20   V(Arm64Bic32)                    \
     21   V(Arm64Clz)                      \
     22   V(Arm64Clz32)                    \
     23   V(Arm64Cmp)                      \
     24   V(Arm64Cmp32)                    \
     25   V(Arm64Cmn)                      \
     26   V(Arm64Cmn32)                    \
     27   V(Arm64Tst)                      \
     28   V(Arm64Tst32)                    \
     29   V(Arm64Or)                       \
     30   V(Arm64Or32)                     \
     31   V(Arm64Orn)                      \
     32   V(Arm64Orn32)                    \
     33   V(Arm64Eor)                      \
     34   V(Arm64Eor32)                    \
     35   V(Arm64Eon)                      \
     36   V(Arm64Eon32)                    \
     37   V(Arm64Sub)                      \
     38   V(Arm64Sub32)                    \
     39   V(Arm64Mul)                      \
     40   V(Arm64Mul32)                    \
     41   V(Arm64Smull)                    \
     42   V(Arm64Umull)                    \
     43   V(Arm64Madd)                     \
     44   V(Arm64Madd32)                   \
     45   V(Arm64Msub)                     \
     46   V(Arm64Msub32)                   \
     47   V(Arm64Mneg)                     \
     48   V(Arm64Mneg32)                   \
     49   V(Arm64Idiv)                     \
     50   V(Arm64Idiv32)                   \
     51   V(Arm64Udiv)                     \
     52   V(Arm64Udiv32)                   \
     53   V(Arm64Imod)                     \
     54   V(Arm64Imod32)                   \
     55   V(Arm64Umod)                     \
     56   V(Arm64Umod32)                   \
     57   V(Arm64Not)                      \
     58   V(Arm64Not32)                    \
     59   V(Arm64Lsl)                      \
     60   V(Arm64Lsl32)                    \
     61   V(Arm64Lsr)                      \
     62   V(Arm64Lsr32)                    \
     63   V(Arm64Asr)                      \
     64   V(Arm64Asr32)                    \
     65   V(Arm64Ror)                      \
     66   V(Arm64Ror32)                    \
     67   V(Arm64Mov32)                    \
     68   V(Arm64Sxtb32)                   \
     69   V(Arm64Sxth32)                   \
     70   V(Arm64Sxtw)                     \
     71   V(Arm64Sbfx32)                   \
     72   V(Arm64Ubfx)                     \
     73   V(Arm64Ubfx32)                   \
     74   V(Arm64Ubfiz32)                  \
     75   V(Arm64Bfi)                      \
     76   V(Arm64Rbit)                     \
     77   V(Arm64Rbit32)                   \
     78   V(Arm64TestAndBranch32)          \
     79   V(Arm64TestAndBranch)            \
     80   V(Arm64CompareAndBranch32)       \
     81   V(Arm64CompareAndBranch)         \
     82   V(Arm64ClaimCSP)                 \
     83   V(Arm64ClaimJSSP)                \
     84   V(Arm64PokeCSP)                  \
     85   V(Arm64PokeJSSP)                 \
     86   V(Arm64PokePair)                 \
     87   V(Arm64Float32Cmp)               \
     88   V(Arm64Float32Add)               \
     89   V(Arm64Float32Sub)               \
     90   V(Arm64Float32Mul)               \
     91   V(Arm64Float32Div)               \
     92   V(Arm64Float32Max)               \
     93   V(Arm64Float32Min)               \
     94   V(Arm64Float32Abs)               \
     95   V(Arm64Float32Neg)               \
     96   V(Arm64Float32Sqrt)              \
     97   V(Arm64Float32RoundDown)         \
     98   V(Arm64Float64Cmp)               \
     99   V(Arm64Float64Add)               \
    100   V(Arm64Float64Sub)               \
    101   V(Arm64Float64Mul)               \
    102   V(Arm64Float64Div)               \
    103   V(Arm64Float64Mod)               \
    104   V(Arm64Float64Max)               \
    105   V(Arm64Float64Min)               \
    106   V(Arm64Float64Abs)               \
    107   V(Arm64Float64Neg)               \
    108   V(Arm64Float64Sqrt)              \
    109   V(Arm64Float64RoundDown)         \
    110   V(Arm64Float32RoundUp)           \
    111   V(Arm64Float64RoundUp)           \
    112   V(Arm64Float64RoundTiesAway)     \
    113   V(Arm64Float32RoundTruncate)     \
    114   V(Arm64Float64RoundTruncate)     \
    115   V(Arm64Float32RoundTiesEven)     \
    116   V(Arm64Float64RoundTiesEven)     \
    117   V(Arm64Float64SilenceNaN)        \
    118   V(Arm64Float32ToFloat64)         \
    119   V(Arm64Float64ToFloat32)         \
    120   V(Arm64Float32ToInt32)           \
    121   V(Arm64Float64ToInt32)           \
    122   V(Arm64Float32ToUint32)          \
    123   V(Arm64Float64ToUint32)          \
    124   V(Arm64Float32ToInt64)           \
    125   V(Arm64Float64ToInt64)           \
    126   V(Arm64Float32ToUint64)          \
    127   V(Arm64Float64ToUint64)          \
    128   V(Arm64Int32ToFloat32)           \
    129   V(Arm64Int32ToFloat64)           \
    130   V(Arm64Int64ToFloat32)           \
    131   V(Arm64Int64ToFloat64)           \
    132   V(Arm64Uint32ToFloat32)          \
    133   V(Arm64Uint32ToFloat64)          \
    134   V(Arm64Uint64ToFloat32)          \
    135   V(Arm64Uint64ToFloat64)          \
    136   V(Arm64Float64ExtractLowWord32)  \
    137   V(Arm64Float64ExtractHighWord32) \
    138   V(Arm64Float64InsertLowWord32)   \
    139   V(Arm64Float64InsertHighWord32)  \
    140   V(Arm64Float64MoveU64)           \
    141   V(Arm64U64MoveFloat64)           \
    142   V(Arm64LdrS)                     \
    143   V(Arm64StrS)                     \
    144   V(Arm64LdrD)                     \
    145   V(Arm64StrD)                     \
    146   V(Arm64Ldrb)                     \
    147   V(Arm64Ldrsb)                    \
    148   V(Arm64Strb)                     \
    149   V(Arm64Ldrh)                     \
    150   V(Arm64Ldrsh)                    \
    151   V(Arm64Strh)                     \
    152   V(Arm64LdrW)                     \
    153   V(Arm64StrW)                     \
    154   V(Arm64Ldr)                      \
    155   V(Arm64Str)
    156 
    157 // Addressing modes represent the "shape" of inputs to an instruction.
    158 // Many instructions support multiple addressing modes. Addressing modes
    159 // are encoded into the InstructionCode of the instruction and tell the
    160 // code generator after register allocation which assembler method to call.
    161 //
    162 // We use the following local notation for addressing modes:
    163 //
    164 // R = register
    165 // O = register or stack slot
    166 // D = double register
    167 // I = immediate (handle, external, int32)
    168 // MRI = [register + immediate]
    169 // MRR = [register + register]
    170 #define TARGET_ADDRESSING_MODE_LIST(V)                          \
    171   V(MRI)              /* [%r0 + K] */                           \
    172   V(MRR)              /* [%r0 + %r1] */                         \
    173   V(Operand2_R_LSL_I) /* %r0 LSL K */                           \
    174   V(Operand2_R_LSR_I) /* %r0 LSR K */                           \
    175   V(Operand2_R_ASR_I) /* %r0 ASR K */                           \
    176   V(Operand2_R_ROR_I) /* %r0 ROR K */                           \
    177   V(Operand2_R_UXTB)  /* %r0 UXTB (unsigned extend byte) */     \
    178   V(Operand2_R_UXTH)  /* %r0 UXTH (unsigned extend halfword) */ \
    179   V(Operand2_R_SXTB)  /* %r0 SXTB (signed extend byte) */       \
    180   V(Operand2_R_SXTH)  /* %r0 SXTH (signed extend halfword) */
    181 
    182 enum ResetJSSPAfterCall { kNoResetJSSP, kResetJSSP };
    183 
    184 }  // namespace compiler
    185 }  // namespace internal
    186 }  // namespace v8
    187 
    188 #endif  // V8_COMPILER_ARM64_INSTRUCTION_CODES_ARM64_H_
    189