1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ 6 #define V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ 7 8 namespace v8 { 9 namespace internal { 10 namespace compiler { 11 12 // ARM-specific opcodes that specify which assembly sequence to emit. 13 // Most opcodes specify a single instruction. 14 #define TARGET_ARCH_OPCODE_LIST(V) \ 15 V(ArmAdd) \ 16 V(ArmAnd) \ 17 V(ArmBic) \ 18 V(ArmClz) \ 19 V(ArmCmp) \ 20 V(ArmCmn) \ 21 V(ArmTst) \ 22 V(ArmTeq) \ 23 V(ArmOrr) \ 24 V(ArmEor) \ 25 V(ArmSub) \ 26 V(ArmRsb) \ 27 V(ArmMul) \ 28 V(ArmMla) \ 29 V(ArmMls) \ 30 V(ArmSmull) \ 31 V(ArmSmmul) \ 32 V(ArmSmmla) \ 33 V(ArmUmull) \ 34 V(ArmSdiv) \ 35 V(ArmUdiv) \ 36 V(ArmMov) \ 37 V(ArmMvn) \ 38 V(ArmBfc) \ 39 V(ArmUbfx) \ 40 V(ArmSbfx) \ 41 V(ArmSxtb) \ 42 V(ArmSxth) \ 43 V(ArmSxtab) \ 44 V(ArmSxtah) \ 45 V(ArmUxtb) \ 46 V(ArmUxth) \ 47 V(ArmUxtab) \ 48 V(ArmRbit) \ 49 V(ArmRev) \ 50 V(ArmUxtah) \ 51 V(ArmAddPair) \ 52 V(ArmSubPair) \ 53 V(ArmMulPair) \ 54 V(ArmLslPair) \ 55 V(ArmLsrPair) \ 56 V(ArmAsrPair) \ 57 V(ArmVcmpF32) \ 58 V(ArmVaddF32) \ 59 V(ArmVsubF32) \ 60 V(ArmVmulF32) \ 61 V(ArmVmlaF32) \ 62 V(ArmVmlsF32) \ 63 V(ArmVdivF32) \ 64 V(ArmVabsF32) \ 65 V(ArmVnegF32) \ 66 V(ArmVsqrtF32) \ 67 V(ArmVcmpF64) \ 68 V(ArmVaddF64) \ 69 V(ArmVsubF64) \ 70 V(ArmVmulF64) \ 71 V(ArmVmlaF64) \ 72 V(ArmVmlsF64) \ 73 V(ArmVdivF64) \ 74 V(ArmVmodF64) \ 75 V(ArmVabsF64) \ 76 V(ArmVnegF64) \ 77 V(ArmVsqrtF64) \ 78 V(ArmVrintmF32) \ 79 V(ArmVrintmF64) \ 80 V(ArmVrintpF32) \ 81 V(ArmVrintpF64) \ 82 V(ArmVrintzF32) \ 83 V(ArmVrintzF64) \ 84 V(ArmVrintaF64) \ 85 V(ArmVrintnF32) \ 86 V(ArmVrintnF64) \ 87 V(ArmVcvtF32F64) \ 88 V(ArmVcvtF64F32) \ 89 V(ArmVcvtF32S32) \ 90 V(ArmVcvtF32U32) \ 91 V(ArmVcvtF64S32) \ 92 V(ArmVcvtF64U32) \ 93 V(ArmVcvtS32F32) \ 94 V(ArmVcvtU32F32) \ 95 V(ArmVcvtS32F64) \ 96 V(ArmVcvtU32F64) \ 97 V(ArmVmovU32F32) \ 98 V(ArmVmovF32U32) \ 99 V(ArmVmovLowU32F64) \ 100 V(ArmVmovLowF64U32) \ 101 V(ArmVmovHighU32F64) \ 102 V(ArmVmovHighF64U32) \ 103 V(ArmVmovF64U32U32) \ 104 V(ArmVmovU32U32F64) \ 105 V(ArmVldrF32) \ 106 V(ArmVstrF32) \ 107 V(ArmVldrF64) \ 108 V(ArmVld1F64) \ 109 V(ArmVstrF64) \ 110 V(ArmVst1F64) \ 111 V(ArmVld1S128) \ 112 V(ArmVst1S128) \ 113 V(ArmFloat32Max) \ 114 V(ArmFloat64Max) \ 115 V(ArmFloat32Min) \ 116 V(ArmFloat64Min) \ 117 V(ArmFloat64SilenceNaN) \ 118 V(ArmLdrb) \ 119 V(ArmLdrsb) \ 120 V(ArmStrb) \ 121 V(ArmLdrh) \ 122 V(ArmLdrsh) \ 123 V(ArmStrh) \ 124 V(ArmLdr) \ 125 V(ArmStr) \ 126 V(ArmPush) \ 127 V(ArmPoke) \ 128 V(ArmPeek) \ 129 V(ArmDsbIsb) \ 130 V(ArmF32x4Splat) \ 131 V(ArmF32x4ExtractLane) \ 132 V(ArmF32x4ReplaceLane) \ 133 V(ArmF32x4SConvertI32x4) \ 134 V(ArmF32x4UConvertI32x4) \ 135 V(ArmF32x4Abs) \ 136 V(ArmF32x4Neg) \ 137 V(ArmF32x4RecipApprox) \ 138 V(ArmF32x4RecipSqrtApprox) \ 139 V(ArmF32x4Add) \ 140 V(ArmF32x4AddHoriz) \ 141 V(ArmF32x4Sub) \ 142 V(ArmF32x4Mul) \ 143 V(ArmF32x4Min) \ 144 V(ArmF32x4Max) \ 145 V(ArmF32x4Eq) \ 146 V(ArmF32x4Ne) \ 147 V(ArmF32x4Lt) \ 148 V(ArmF32x4Le) \ 149 V(ArmI32x4Splat) \ 150 V(ArmI32x4ExtractLane) \ 151 V(ArmI32x4ReplaceLane) \ 152 V(ArmI32x4SConvertF32x4) \ 153 V(ArmI32x4SConvertI16x8Low) \ 154 V(ArmI32x4SConvertI16x8High) \ 155 V(ArmI32x4Neg) \ 156 V(ArmI32x4Shl) \ 157 V(ArmI32x4ShrS) \ 158 V(ArmI32x4Add) \ 159 V(ArmI32x4AddHoriz) \ 160 V(ArmI32x4Sub) \ 161 V(ArmI32x4Mul) \ 162 V(ArmI32x4MinS) \ 163 V(ArmI32x4MaxS) \ 164 V(ArmI32x4Eq) \ 165 V(ArmI32x4Ne) \ 166 V(ArmI32x4GtS) \ 167 V(ArmI32x4GeS) \ 168 V(ArmI32x4UConvertF32x4) \ 169 V(ArmI32x4UConvertI16x8Low) \ 170 V(ArmI32x4UConvertI16x8High) \ 171 V(ArmI32x4ShrU) \ 172 V(ArmI32x4MinU) \ 173 V(ArmI32x4MaxU) \ 174 V(ArmI32x4GtU) \ 175 V(ArmI32x4GeU) \ 176 V(ArmI16x8Splat) \ 177 V(ArmI16x8ExtractLane) \ 178 V(ArmI16x8ReplaceLane) \ 179 V(ArmI16x8SConvertI8x16Low) \ 180 V(ArmI16x8SConvertI8x16High) \ 181 V(ArmI16x8Neg) \ 182 V(ArmI16x8Shl) \ 183 V(ArmI16x8ShrS) \ 184 V(ArmI16x8SConvertI32x4) \ 185 V(ArmI16x8Add) \ 186 V(ArmI16x8AddSaturateS) \ 187 V(ArmI16x8AddHoriz) \ 188 V(ArmI16x8Sub) \ 189 V(ArmI16x8SubSaturateS) \ 190 V(ArmI16x8Mul) \ 191 V(ArmI16x8MinS) \ 192 V(ArmI16x8MaxS) \ 193 V(ArmI16x8Eq) \ 194 V(ArmI16x8Ne) \ 195 V(ArmI16x8GtS) \ 196 V(ArmI16x8GeS) \ 197 V(ArmI16x8UConvertI8x16Low) \ 198 V(ArmI16x8UConvertI8x16High) \ 199 V(ArmI16x8ShrU) \ 200 V(ArmI16x8UConvertI32x4) \ 201 V(ArmI16x8AddSaturateU) \ 202 V(ArmI16x8SubSaturateU) \ 203 V(ArmI16x8MinU) \ 204 V(ArmI16x8MaxU) \ 205 V(ArmI16x8GtU) \ 206 V(ArmI16x8GeU) \ 207 V(ArmI8x16Splat) \ 208 V(ArmI8x16ExtractLane) \ 209 V(ArmI8x16ReplaceLane) \ 210 V(ArmI8x16Neg) \ 211 V(ArmI8x16Shl) \ 212 V(ArmI8x16ShrS) \ 213 V(ArmI8x16SConvertI16x8) \ 214 V(ArmI8x16Add) \ 215 V(ArmI8x16AddSaturateS) \ 216 V(ArmI8x16Sub) \ 217 V(ArmI8x16SubSaturateS) \ 218 V(ArmI8x16Mul) \ 219 V(ArmI8x16MinS) \ 220 V(ArmI8x16MaxS) \ 221 V(ArmI8x16Eq) \ 222 V(ArmI8x16Ne) \ 223 V(ArmI8x16GtS) \ 224 V(ArmI8x16GeS) \ 225 V(ArmI8x16ShrU) \ 226 V(ArmI8x16UConvertI16x8) \ 227 V(ArmI8x16AddSaturateU) \ 228 V(ArmI8x16SubSaturateU) \ 229 V(ArmI8x16MinU) \ 230 V(ArmI8x16MaxU) \ 231 V(ArmI8x16GtU) \ 232 V(ArmI8x16GeU) \ 233 V(ArmS128Zero) \ 234 V(ArmS128Dup) \ 235 V(ArmS128And) \ 236 V(ArmS128Or) \ 237 V(ArmS128Xor) \ 238 V(ArmS128Not) \ 239 V(ArmS128Select) \ 240 V(ArmS32x4ZipLeft) \ 241 V(ArmS32x4ZipRight) \ 242 V(ArmS32x4UnzipLeft) \ 243 V(ArmS32x4UnzipRight) \ 244 V(ArmS32x4TransposeLeft) \ 245 V(ArmS32x4TransposeRight) \ 246 V(ArmS32x4Shuffle) \ 247 V(ArmS16x8ZipLeft) \ 248 V(ArmS16x8ZipRight) \ 249 V(ArmS16x8UnzipLeft) \ 250 V(ArmS16x8UnzipRight) \ 251 V(ArmS16x8TransposeLeft) \ 252 V(ArmS16x8TransposeRight) \ 253 V(ArmS8x16ZipLeft) \ 254 V(ArmS8x16ZipRight) \ 255 V(ArmS8x16UnzipLeft) \ 256 V(ArmS8x16UnzipRight) \ 257 V(ArmS8x16TransposeLeft) \ 258 V(ArmS8x16TransposeRight) \ 259 V(ArmS8x16Concat) \ 260 V(ArmS8x16Shuffle) \ 261 V(ArmS32x2Reverse) \ 262 V(ArmS16x4Reverse) \ 263 V(ArmS16x2Reverse) \ 264 V(ArmS8x8Reverse) \ 265 V(ArmS8x4Reverse) \ 266 V(ArmS8x2Reverse) \ 267 V(ArmS1x4AnyTrue) \ 268 V(ArmS1x4AllTrue) \ 269 V(ArmS1x8AnyTrue) \ 270 V(ArmS1x8AllTrue) \ 271 V(ArmS1x16AnyTrue) \ 272 V(ArmS1x16AllTrue) \ 273 V(ArmWord32AtomicPairLoad) \ 274 V(ArmWord32AtomicPairStore) \ 275 V(ArmWord32AtomicPairAdd) \ 276 V(ArmWord32AtomicPairSub) \ 277 V(ArmWord32AtomicPairAnd) \ 278 V(ArmWord32AtomicPairOr) \ 279 V(ArmWord32AtomicPairXor) \ 280 V(ArmWord32AtomicPairExchange) \ 281 V(ArmWord32AtomicPairCompareExchange) \ 282 V(ArmWord64AtomicNarrowAddUint8) \ 283 V(ArmWord64AtomicNarrowAddUint16) \ 284 V(ArmWord64AtomicNarrowAddUint32) \ 285 V(ArmWord64AtomicNarrowSubUint8) \ 286 V(ArmWord64AtomicNarrowSubUint16) \ 287 V(ArmWord64AtomicNarrowSubUint32) \ 288 V(ArmWord64AtomicNarrowAndUint8) \ 289 V(ArmWord64AtomicNarrowAndUint16) \ 290 V(ArmWord64AtomicNarrowAndUint32) \ 291 V(ArmWord64AtomicNarrowOrUint8) \ 292 V(ArmWord64AtomicNarrowOrUint16) \ 293 V(ArmWord64AtomicNarrowOrUint32) \ 294 V(ArmWord64AtomicNarrowXorUint8) \ 295 V(ArmWord64AtomicNarrowXorUint16) \ 296 V(ArmWord64AtomicNarrowXorUint32) \ 297 V(ArmWord64AtomicNarrowExchangeUint8) \ 298 V(ArmWord64AtomicNarrowExchangeUint16) \ 299 V(ArmWord64AtomicNarrowExchangeUint32) \ 300 V(ArmWord64AtomicNarrowCompareExchangeUint8) \ 301 V(ArmWord64AtomicNarrowCompareExchangeUint16) \ 302 V(ArmWord64AtomicNarrowCompareExchangeUint32) 303 304 // Addressing modes represent the "shape" of inputs to an instruction. 305 // Many instructions support multiple addressing modes. Addressing modes 306 // are encoded into the InstructionCode of the instruction and tell the 307 // code generator after register allocation which assembler method to call. 308 #define TARGET_ADDRESSING_MODE_LIST(V) \ 309 V(Offset_RI) /* [%r0 + K] */ \ 310 V(Offset_RR) /* [%r0 + %r1] */ \ 311 V(Operand2_I) /* K */ \ 312 V(Operand2_R) /* %r0 */ \ 313 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ 314 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ 315 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ 316 V(Operand2_R_ROR_I) /* %r0 ROR K */ \ 317 V(Operand2_R_ASR_R) /* %r0 ASR %r1 */ \ 318 V(Operand2_R_LSL_R) /* %r0 LSL %r1 */ \ 319 V(Operand2_R_LSR_R) /* %r0 LSR %r1 */ \ 320 V(Operand2_R_ROR_R) /* %r0 ROR %r1 */ 321 322 } // namespace compiler 323 } // namespace internal 324 } // namespace v8 325 326 #endif // V8_COMPILER_ARM_INSTRUCTION_CODES_ARM_H_ 327