Home | History | Annotate | Download | only in priv
      1 
      2 /*---------------------------------------------------------------*/
      3 /*--- begin                                 host_arm64_defs.h ---*/
      4 /*---------------------------------------------------------------*/
      5 
      6 /*
      7    This file is part of Valgrind, a dynamic binary instrumentation
      8    framework.
      9 
     10    Copyright (C) 2013-2017 OpenWorks
     11       info (at) open-works.net
     12 
     13    This program is free software; you can redistribute it and/or
     14    modify it under the terms of the GNU General Public License as
     15    published by the Free Software Foundation; either version 2 of the
     16    License, or (at your option) any later version.
     17 
     18    This program is distributed in the hope that it will be useful, but
     19    WITHOUT ANY WARRANTY; without even the implied warranty of
     20    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     21    General Public License for more details.
     22 
     23    You should have received a copy of the GNU General Public License
     24    along with this program; if not, write to the Free Software
     25    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
     26    02110-1301, USA.
     27 
     28    The GNU General Public License is contained in the file COPYING.
     29 */
     30 
     31 #ifndef __VEX_HOST_ARM64_DEFS_H
     32 #define __VEX_HOST_ARM64_DEFS_H
     33 
     34 #include "libvex_basictypes.h"
     35 #include "libvex.h"                      // VexArch
     36 #include "host_generic_regs.h"           // HReg
     37 
     38 
     39 /* --------- Registers. --------- */
     40 
     41 #define ST_IN static inline
     42 ST_IN HReg hregARM64_X22 ( void ) { return mkHReg(False, HRcInt64,  22,  0); }
     43 ST_IN HReg hregARM64_X23 ( void ) { return mkHReg(False, HRcInt64,  23,  1); }
     44 ST_IN HReg hregARM64_X24 ( void ) { return mkHReg(False, HRcInt64,  24,  2); }
     45 ST_IN HReg hregARM64_X25 ( void ) { return mkHReg(False, HRcInt64,  25,  3); }
     46 ST_IN HReg hregARM64_X26 ( void ) { return mkHReg(False, HRcInt64,  26,  4); }
     47 ST_IN HReg hregARM64_X27 ( void ) { return mkHReg(False, HRcInt64,  27,  5); }
     48 ST_IN HReg hregARM64_X28 ( void ) { return mkHReg(False, HRcInt64,  28,  6); }
     49 
     50 ST_IN HReg hregARM64_X0  ( void ) { return mkHReg(False, HRcInt64,  0,   7); }
     51 ST_IN HReg hregARM64_X1  ( void ) { return mkHReg(False, HRcInt64,  1,   8); }
     52 ST_IN HReg hregARM64_X2  ( void ) { return mkHReg(False, HRcInt64,  2,   9); }
     53 ST_IN HReg hregARM64_X3  ( void ) { return mkHReg(False, HRcInt64,  3,  10); }
     54 ST_IN HReg hregARM64_X4  ( void ) { return mkHReg(False, HRcInt64,  4,  11); }
     55 ST_IN HReg hregARM64_X5  ( void ) { return mkHReg(False, HRcInt64,  5,  12); }
     56 ST_IN HReg hregARM64_X6  ( void ) { return mkHReg(False, HRcInt64,  6,  13); }
     57 ST_IN HReg hregARM64_X7  ( void ) { return mkHReg(False, HRcInt64,  7,  14); }
     58 
     59 ST_IN HReg hregARM64_Q16 ( void ) { return mkHReg(False, HRcVec128, 16, 15); }
     60 ST_IN HReg hregARM64_Q17 ( void ) { return mkHReg(False, HRcVec128, 17, 16); }
     61 ST_IN HReg hregARM64_Q18 ( void ) { return mkHReg(False, HRcVec128, 18, 17); }
     62 ST_IN HReg hregARM64_Q19 ( void ) { return mkHReg(False, HRcVec128, 19, 18); }
     63 ST_IN HReg hregARM64_Q20 ( void ) { return mkHReg(False, HRcVec128, 20, 19); }
     64 
     65 ST_IN HReg hregARM64_D8  ( void ) { return mkHReg(False, HRcFlt64,  8,  20); }
     66 ST_IN HReg hregARM64_D9  ( void ) { return mkHReg(False, HRcFlt64,  9,  21); }
     67 ST_IN HReg hregARM64_D10 ( void ) { return mkHReg(False, HRcFlt64,  10, 22); }
     68 ST_IN HReg hregARM64_D11 ( void ) { return mkHReg(False, HRcFlt64,  11, 23); }
     69 ST_IN HReg hregARM64_D12 ( void ) { return mkHReg(False, HRcFlt64,  12, 24); }
     70 ST_IN HReg hregARM64_D13 ( void ) { return mkHReg(False, HRcFlt64,  13, 25); }
     71 
     72 ST_IN HReg hregARM64_X8  ( void ) { return mkHReg(False, HRcInt64,  8,  26); }
     73 ST_IN HReg hregARM64_X9  ( void ) { return mkHReg(False, HRcInt64,  9,  27); }
     74 ST_IN HReg hregARM64_X21 ( void ) { return mkHReg(False, HRcInt64, 21,  28); }
     75 #undef ST_IN
     76 
     77 extern void ppHRegARM64 ( HReg );
     78 
     79 /* Number of registers used arg passing in function calls */
     80 #define ARM64_N_ARGREGS 8   /* x0 .. x7 */
     81 
     82 
     83 /* --------- Condition codes. --------- */
     84 
     85 typedef
     86    enum {
     87       ARM64cc_EQ  = 0,  /* equal                         : Z=1 */
     88       ARM64cc_NE  = 1,  /* not equal                     : Z=0 */
     89 
     90       ARM64cc_CS  = 2,  /* >=u (higher or same)          : C=1 */
     91       ARM64cc_CC  = 3,  /* <u  (lower)                   : C=0 */
     92 
     93       ARM64cc_MI  = 4,  /* minus (negative)              : N=1 */
     94       ARM64cc_PL  = 5,  /* plus (zero or +ve)            : N=0 */
     95 
     96       ARM64cc_VS  = 6,  /* overflow                      : V=1 */
     97       ARM64cc_VC  = 7,  /* no overflow                   : V=0 */
     98 
     99       ARM64cc_HI  = 8,  /* >u   (higher)                 :   C=1 && Z=0 */
    100       ARM64cc_LS  = 9,  /* <=u  (lower or same)          : !(C=1 && Z=0) */
    101 
    102       ARM64cc_GE  = 10, /* >=s (signed greater or equal) :   N=V */
    103       ARM64cc_LT  = 11, /* <s  (signed less than)        : !(N=V) */
    104 
    105       ARM64cc_GT  = 12, /* >s  (signed greater)          :   Z=0 && N=V */
    106       ARM64cc_LE  = 13, /* <=s (signed less or equal)    : !(Z=0 && N=V) */
    107 
    108       ARM64cc_AL  = 14, /* always (unconditional) */
    109       ARM64cc_NV  = 15  /* in 64-bit mode also means "always" */
    110    }
    111    ARM64CondCode;
    112 
    113 
    114 /* --------- Memory address expressions (amodes). --------- */
    115 
    116 typedef
    117    enum {
    118       ARM64am_RI9=10, /* reg + simm9 */
    119       ARM64am_RI12,   /* reg + uimm12 * szB (iow, scaled by access size) */
    120       ARM64am_RR      /* reg1 + reg2 */
    121    }
    122    ARM64AModeTag;
    123 
    124 typedef
    125    struct {
    126       ARM64AModeTag tag;
    127       union {
    128          struct {
    129             HReg reg;
    130             Int  simm9; /* -256 .. +255 */
    131          } RI9;
    132          struct {
    133             HReg  reg;
    134             UInt  uimm12; /* 0 .. 4095 */
    135             UChar szB;    /* 1, 2, 4, 8 (16 ?) */
    136          } RI12;
    137          struct {
    138             HReg base;
    139             HReg index;
    140          } RR;
    141       } ARM64am;
    142    }
    143    ARM64AMode;
    144 
    145 extern ARM64AMode* ARM64AMode_RI9  ( HReg reg, Int simm9 );
    146 extern ARM64AMode* ARM64AMode_RI12 ( HReg reg, Int uimm12, UChar szB );
    147 extern ARM64AMode* ARM64AMode_RR   ( HReg base, HReg index );
    148 
    149 
    150 /* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
    151 
    152 typedef
    153    enum {
    154       ARM64riA_I12=20, /* uimm12 << 0 or 12 only */
    155       ARM64riA_R       /* reg */
    156    }
    157    ARM64RIATag;
    158 
    159 typedef
    160    struct {
    161       ARM64RIATag tag;
    162       union {
    163          struct {
    164             UShort imm12;  /* 0 .. 4095 */
    165             UChar  shift;  /* 0 or 12 only */
    166          } I12;
    167          struct {
    168             HReg reg;
    169          } R;
    170       } ARM64riA;
    171    }
    172    ARM64RIA;
    173 
    174 extern ARM64RIA* ARM64RIA_I12 ( UShort imm12, UChar shift );
    175 extern ARM64RIA* ARM64RIA_R   ( HReg );
    176 
    177 
    178 /* --------- Reg or "bitfield" (logic immediate) operands --------- */
    179 
    180 typedef
    181    enum {
    182       ARM64riL_I13=6, /* wierd-o bitfield immediate, 13 bits in total */
    183       ARM64riL_R      /* reg */
    184    }
    185    ARM64RILTag;
    186 
    187 typedef
    188    struct {
    189       ARM64RILTag tag;
    190       union {
    191          struct {
    192             UChar bitN; /* 0 .. 1 */
    193             UChar immR; /* 0 .. 63 */
    194             UChar immS; /* 0 .. 63 */
    195          } I13;
    196          struct {
    197             HReg reg;
    198          } R;
    199       } ARM64riL;
    200    }
    201    ARM64RIL;
    202 
    203 extern ARM64RIL* ARM64RIL_I13 ( UChar bitN, UChar immR, UChar immS );
    204 extern ARM64RIL* ARM64RIL_R   ( HReg );
    205 
    206 
    207 /* --------------- Reg or uimm6 operands --------------- */
    208 
    209 typedef
    210    enum {
    211       ARM64ri6_I6=30, /* uimm6, 1 .. 63 only */
    212       ARM64ri6_R      /* reg */
    213    }
    214    ARM64RI6Tag;
    215 
    216 typedef
    217    struct {
    218       ARM64RI6Tag tag;
    219       union {
    220          struct {
    221             UInt imm6;   /* 1 .. 63 */
    222          } I6;
    223          struct {
    224             HReg reg;
    225          } R;
    226       } ARM64ri6;
    227    }
    228    ARM64RI6;
    229 
    230 extern ARM64RI6* ARM64RI6_I6 ( UInt imm6 );
    231 extern ARM64RI6* ARM64RI6_R  ( HReg );
    232 
    233 
    234 /* --------------------- Instructions --------------------- */
    235 
    236 typedef
    237    enum {
    238       ARM64lo_AND=40,
    239       ARM64lo_OR,
    240       ARM64lo_XOR
    241    }
    242    ARM64LogicOp;
    243 
    244 typedef
    245    enum {
    246       ARM64sh_SHL=50,
    247       ARM64sh_SHR,
    248       ARM64sh_SAR
    249    }
    250    ARM64ShiftOp;
    251 
    252 typedef
    253    enum {
    254       ARM64un_NEG=60,
    255       ARM64un_NOT,
    256       ARM64un_CLZ,
    257    }
    258    ARM64UnaryOp;
    259 
    260 typedef
    261    enum {
    262       ARM64mul_PLAIN=70, /* lo64(64 * 64)  */
    263       ARM64mul_ZX,       /* hi64(64 *u 64) */
    264       ARM64mul_SX        /* hi64(64 *s 64) */
    265    }
    266    ARM64MulOp;
    267 
    268 typedef
    269    /* These characterise an integer-FP conversion, but don't imply any
    270       particular direction. */
    271    enum {
    272       ARM64cvt_F32_I32S=80,
    273       ARM64cvt_F64_I32S,
    274       ARM64cvt_F32_I64S,
    275       ARM64cvt_F64_I64S,
    276       ARM64cvt_F32_I32U,
    277       ARM64cvt_F64_I32U,
    278       ARM64cvt_F32_I64U,
    279       ARM64cvt_F64_I64U,
    280       ARM64cvt_INVALID
    281    }
    282    ARM64CvtOp;
    283 
    284 typedef
    285    enum {
    286       ARM64fpb_ADD=100,
    287       ARM64fpb_SUB,
    288       ARM64fpb_MUL,
    289       ARM64fpb_DIV,
    290       ARM64fpb_INVALID
    291    }
    292    ARM64FpBinOp;
    293 
    294 typedef
    295    enum {
    296       ARM64fpu_NEG=110,
    297       ARM64fpu_ABS,
    298       ARM64fpu_SQRT,
    299       ARM64fpu_RINT,
    300       ARM64fpu_RECPX,
    301       ARM64fpu_INVALID
    302    }
    303    ARM64FpUnaryOp;
    304 
    305 typedef
    306    enum {
    307       ARM64vecb_ADD64x2=120, ARM64vecb_ADD32x4,
    308       ARM64vecb_ADD16x8,     ARM64vecb_ADD8x16,
    309       ARM64vecb_SUB64x2,     ARM64vecb_SUB32x4,
    310       ARM64vecb_SUB16x8,     ARM64vecb_SUB8x16,
    311                              ARM64vecb_MUL32x4,
    312       ARM64vecb_MUL16x8,     ARM64vecb_MUL8x16,
    313       ARM64vecb_FADD64x2,    ARM64vecb_FADD32x4,
    314       ARM64vecb_FSUB64x2,    ARM64vecb_FSUB32x4,
    315       ARM64vecb_FMUL64x2,    ARM64vecb_FMUL32x4,
    316       ARM64vecb_FDIV64x2,    ARM64vecb_FDIV32x4,
    317       ARM64vecb_FMAX64x2,    ARM64vecb_FMAX32x4,
    318       ARM64vecb_FMIN64x2,    ARM64vecb_FMIN32x4,
    319                              ARM64vecb_UMAX32x4,
    320       ARM64vecb_UMAX16x8,    ARM64vecb_UMAX8x16,
    321                              ARM64vecb_UMIN32x4,
    322       ARM64vecb_UMIN16x8,    ARM64vecb_UMIN8x16,
    323                              ARM64vecb_SMAX32x4,
    324       ARM64vecb_SMAX16x8,    ARM64vecb_SMAX8x16,
    325                              ARM64vecb_SMIN32x4,
    326       ARM64vecb_SMIN16x8,    ARM64vecb_SMIN8x16,
    327       ARM64vecb_AND,
    328       ARM64vecb_ORR,
    329       ARM64vecb_XOR,
    330       ARM64vecb_CMEQ64x2,    ARM64vecb_CMEQ32x4,
    331       ARM64vecb_CMEQ16x8,    ARM64vecb_CMEQ8x16,
    332       ARM64vecb_CMHI64x2,    ARM64vecb_CMHI32x4, /* >u */
    333       ARM64vecb_CMHI16x8,    ARM64vecb_CMHI8x16,
    334       ARM64vecb_CMGT64x2,    ARM64vecb_CMGT32x4, /* >s */
    335       ARM64vecb_CMGT16x8,    ARM64vecb_CMGT8x16,
    336       ARM64vecb_FCMEQ64x2,   ARM64vecb_FCMEQ32x4,
    337       ARM64vecb_FCMGE64x2,   ARM64vecb_FCMGE32x4,
    338       ARM64vecb_FCMGT64x2,   ARM64vecb_FCMGT32x4,
    339       ARM64vecb_TBL1,
    340       ARM64vecb_UZP164x2,    ARM64vecb_UZP132x4,
    341       ARM64vecb_UZP116x8,    ARM64vecb_UZP18x16,
    342       ARM64vecb_UZP264x2,    ARM64vecb_UZP232x4,
    343       ARM64vecb_UZP216x8,    ARM64vecb_UZP28x16,
    344       ARM64vecb_ZIP132x4,    ARM64vecb_ZIP116x8,
    345       ARM64vecb_ZIP18x16,    ARM64vecb_ZIP232x4,
    346       ARM64vecb_ZIP216x8,    ARM64vecb_ZIP28x16,
    347                              ARM64vecb_PMUL8x16,
    348                              ARM64vecb_PMULL8x8,
    349                              ARM64vecb_UMULL2DSS,
    350       ARM64vecb_UMULL4SHH,   ARM64vecb_UMULL8HBB,
    351                              ARM64vecb_SMULL2DSS,
    352       ARM64vecb_SMULL4SHH,   ARM64vecb_SMULL8HBB,
    353       ARM64vecb_SQADD64x2,   ARM64vecb_SQADD32x4,
    354       ARM64vecb_SQADD16x8,   ARM64vecb_SQADD8x16,
    355       ARM64vecb_UQADD64x2,   ARM64vecb_UQADD32x4,
    356       ARM64vecb_UQADD16x8,   ARM64vecb_UQADD8x16,
    357       ARM64vecb_SQSUB64x2,   ARM64vecb_SQSUB32x4,
    358       ARM64vecb_SQSUB16x8,   ARM64vecb_SQSUB8x16,
    359       ARM64vecb_UQSUB64x2,   ARM64vecb_UQSUB32x4,
    360       ARM64vecb_UQSUB16x8,   ARM64vecb_UQSUB8x16,
    361                              ARM64vecb_SQDMULL2DSS,
    362       ARM64vecb_SQDMULL4SHH,
    363                              ARM64vecb_SQDMULH32x4,
    364       ARM64vecb_SQDMULH16x8,
    365                              ARM64vecb_SQRDMULH32x4,
    366       ARM64vecb_SQRDMULH16x8,
    367       ARM64vecb_SQSHL64x2,   ARM64vecb_SQSHL32x4,
    368       ARM64vecb_SQSHL16x8,   ARM64vecb_SQSHL8x16,
    369       ARM64vecb_UQSHL64x2,   ARM64vecb_UQSHL32x4,
    370       ARM64vecb_UQSHL16x8,   ARM64vecb_UQSHL8x16,
    371       ARM64vecb_SQRSHL64x2,  ARM64vecb_SQRSHL32x4,
    372       ARM64vecb_SQRSHL16x8,  ARM64vecb_SQRSHL8x16,
    373       ARM64vecb_UQRSHL64x2,  ARM64vecb_UQRSHL32x4,
    374       ARM64vecb_UQRSHL16x8,  ARM64vecb_UQRSHL8x16,
    375       ARM64vecb_SSHL64x2,    ARM64vecb_SSHL32x4,
    376       ARM64vecb_SSHL16x8,    ARM64vecb_SSHL8x16,
    377       ARM64vecb_USHL64x2,    ARM64vecb_USHL32x4,
    378       ARM64vecb_USHL16x8,    ARM64vecb_USHL8x16,
    379       ARM64vecb_SRSHL64x2,   ARM64vecb_SRSHL32x4,
    380       ARM64vecb_SRSHL16x8,   ARM64vecb_SRSHL8x16,
    381       ARM64vecb_URSHL64x2,   ARM64vecb_URSHL32x4,
    382       ARM64vecb_URSHL16x8,   ARM64vecb_URSHL8x16,
    383       ARM64vecb_FRECPS64x2,  ARM64vecb_FRECPS32x4,
    384       ARM64vecb_FRSQRTS64x2, ARM64vecb_FRSQRTS32x4,
    385       ARM64vecb_INVALID
    386    }
    387    ARM64VecBinOp;
    388 
    389 typedef
    390    enum {
    391       ARM64vecmo_SUQADD64x2=300, ARM64vecmo_SUQADD32x4,
    392       ARM64vecmo_SUQADD16x8,     ARM64vecmo_SUQADD8x16,
    393       ARM64vecmo_USQADD64x2,     ARM64vecmo_USQADD32x4,
    394       ARM64vecmo_USQADD16x8,     ARM64vecmo_USQADD8x16,
    395       ARM64vecmo_INVALID
    396    }
    397    ARM64VecModifyOp;
    398 
    399 typedef
    400    enum {
    401       ARM64vecu_FNEG64x2=350, ARM64vecu_FNEG32x4,
    402       ARM64vecu_FABS64x2,     ARM64vecu_FABS32x4,
    403       ARM64vecu_NOT,
    404       ARM64vecu_ABS64x2,      ARM64vecu_ABS32x4,
    405       ARM64vecu_ABS16x8,      ARM64vecu_ABS8x16,
    406       ARM64vecu_CLS32x4,      ARM64vecu_CLS16x8,      ARM64vecu_CLS8x16,
    407       ARM64vecu_CLZ32x4,      ARM64vecu_CLZ16x8,      ARM64vecu_CLZ8x16,
    408       ARM64vecu_CNT8x16,
    409       ARM64vecu_RBIT,
    410       ARM64vecu_REV1616B,
    411       ARM64vecu_REV3216B,     ARM64vecu_REV328H,
    412       ARM64vecu_REV6416B,     ARM64vecu_REV648H,      ARM64vecu_REV644S,
    413       ARM64vecu_URECPE32x4,
    414       ARM64vecu_URSQRTE32x4,
    415       ARM64vecu_FRECPE64x2,   ARM64vecu_FRECPE32x4,
    416       ARM64vecu_FRSQRTE64x2,  ARM64vecu_FRSQRTE32x4,
    417       ARM64vecu_FSQRT64x2,    ARM64vecu_FSQRT32x4,
    418       ARM64vecu_INVALID
    419    }
    420    ARM64VecUnaryOp;
    421 
    422 typedef
    423    enum {
    424       ARM64vecshi_USHR64x2=400, ARM64vecshi_USHR32x4,
    425       ARM64vecshi_USHR16x8,     ARM64vecshi_USHR8x16,
    426       ARM64vecshi_SSHR64x2,     ARM64vecshi_SSHR32x4,
    427       ARM64vecshi_SSHR16x8,     ARM64vecshi_SSHR8x16,
    428       ARM64vecshi_SHL64x2,      ARM64vecshi_SHL32x4,
    429       ARM64vecshi_SHL16x8,      ARM64vecshi_SHL8x16,
    430       /* These narrowing shifts zero out the top half of the destination
    431          register. */
    432       ARM64vecshi_SQSHRN2SD,    ARM64vecshi_SQSHRN4HS,   ARM64vecshi_SQSHRN8BH,
    433       ARM64vecshi_UQSHRN2SD,    ARM64vecshi_UQSHRN4HS,   ARM64vecshi_UQSHRN8BH,
    434       ARM64vecshi_SQSHRUN2SD,   ARM64vecshi_SQSHRUN4HS,  ARM64vecshi_SQSHRUN8BH,
    435       ARM64vecshi_SQRSHRN2SD,   ARM64vecshi_SQRSHRN4HS,  ARM64vecshi_SQRSHRN8BH,
    436       ARM64vecshi_UQRSHRN2SD,   ARM64vecshi_UQRSHRN4HS,  ARM64vecshi_UQRSHRN8BH,
    437       ARM64vecshi_SQRSHRUN2SD,  ARM64vecshi_SQRSHRUN4HS, ARM64vecshi_SQRSHRUN8BH,
    438       /* Saturating left shifts, of various flavours. */
    439       ARM64vecshi_UQSHL64x2,    ARM64vecshi_UQSHL32x4,
    440       ARM64vecshi_UQSHL16x8,    ARM64vecshi_UQSHL8x16,
    441       ARM64vecshi_SQSHL64x2,    ARM64vecshi_SQSHL32x4,
    442       ARM64vecshi_SQSHL16x8,    ARM64vecshi_SQSHL8x16,
    443       ARM64vecshi_SQSHLU64x2,   ARM64vecshi_SQSHLU32x4,
    444       ARM64vecshi_SQSHLU16x8,   ARM64vecshi_SQSHLU8x16,
    445       ARM64vecshi_INVALID
    446    }
    447    ARM64VecShiftImmOp;
    448 
    449 typedef
    450    enum {
    451       ARM64vecna_XTN=450,
    452       ARM64vecna_SQXTN,
    453       ARM64vecna_UQXTN,
    454       ARM64vecna_SQXTUN,
    455       ARM64vecna_INVALID
    456    }
    457    ARM64VecNarrowOp;
    458 
    459 typedef
    460    enum {
    461       /* baseline */
    462       ARM64in_Arith=1220,
    463       ARM64in_Cmp,
    464       ARM64in_Logic,
    465       ARM64in_Test,
    466       ARM64in_Shift,
    467       ARM64in_Unary,
    468       ARM64in_MovI,        /* int reg-reg move */
    469       ARM64in_Imm64,
    470       ARM64in_LdSt64,
    471       ARM64in_LdSt32,      /* w/ ZX loads */
    472       ARM64in_LdSt16,      /* w/ ZX loads */
    473       ARM64in_LdSt8,       /* w/ ZX loads */
    474       ARM64in_XDirect,     /* direct transfer to GA */
    475       ARM64in_XIndir,      /* indirect transfer to GA */
    476       ARM64in_XAssisted,   /* assisted transfer to GA */
    477       ARM64in_CSel,
    478       ARM64in_Call,
    479       ARM64in_AddToSP,     /* move SP by small, signed constant */
    480       ARM64in_FromSP,      /* move SP to integer register */
    481       ARM64in_Mul,
    482       ARM64in_LdrEX,
    483       ARM64in_StrEX,
    484       ARM64in_CAS,
    485       ARM64in_MFence,
    486       ARM64in_ClrEX,
    487       /* ARM64in_V*: scalar ops involving vector registers */
    488       ARM64in_VLdStH,   /* ld/st to/from low 16 bits of vec reg, imm offset */
    489       ARM64in_VLdStS,   /* ld/st to/from low 32 bits of vec reg, imm offset */
    490       ARM64in_VLdStD,   /* ld/st to/from low 64 bits of vec reg, imm offset */
    491       ARM64in_VLdStQ,   /* ld/st to/from all 128 bits of vec reg, no offset */
    492       ARM64in_VCvtI2F,
    493       ARM64in_VCvtF2I,
    494       ARM64in_VCvtSD,   /* scalar 32 bit FP <--> 64 bit FP */
    495       ARM64in_VCvtHS,   /* scalar 16 bit FP <--> 32 bit FP */
    496       ARM64in_VCvtHD,   /* scalar 16 bit FP <--> 64 bit FP */
    497       ARM64in_VUnaryD,
    498       ARM64in_VUnaryS,
    499       ARM64in_VBinD,
    500       ARM64in_VBinS,
    501       ARM64in_VCmpD,
    502       ARM64in_VCmpS,
    503       ARM64in_VFCSel,
    504       ARM64in_FPCR,
    505       ARM64in_FPSR,
    506       /* ARM64in_V*V: vector ops on vector registers */
    507       ARM64in_VBinV,
    508       ARM64in_VModifyV,
    509       ARM64in_VUnaryV,
    510       ARM64in_VNarrowV,
    511       ARM64in_VShiftImmV,
    512       ARM64in_VExtV,
    513       ARM64in_VImmQ,
    514       ARM64in_VDfromX,    /* Move an Xreg to a Dreg */
    515       ARM64in_VQfromX,    /* Move an Xreg to a Qreg lo64, and zero hi64 */
    516       ARM64in_VQfromXX,   /* Move 2 Xregs to a Qreg */
    517       ARM64in_VXfromQ,    /* Move half a Qreg to an Xreg */
    518       ARM64in_VXfromDorS, /* Move Dreg or Sreg(ZX) to an Xreg */
    519       ARM64in_VMov,       /* vector reg-reg move, 16, 8 or 4 bytes */
    520       /* infrastructure */
    521       ARM64in_EvCheck,    /* Event check */
    522       ARM64in_ProfInc     /* 64-bit profile counter increment */
    523    }
    524    ARM64InstrTag;
    525 
    526 /* Destinations are on the LEFT (first operand) */
    527 
    528 typedef
    529    struct {
    530       ARM64InstrTag tag;
    531       union {
    532          /* --- INTEGER INSTRUCTIONS --- */
    533          /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
    534          struct {
    535             HReg      dst;
    536             HReg      argL;
    537             ARM64RIA* argR;
    538             Bool      isAdd;
    539          } Arith;
    540          /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
    541          struct {
    542             HReg      argL;
    543             ARM64RIA* argR;
    544             Bool      is64;
    545          } Cmp;
    546          /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
    547          struct {
    548             HReg         dst;
    549             HReg         argL;
    550             ARM64RIL*    argR;
    551             ARM64LogicOp op;
    552          } Logic;
    553          /* 64 bit TST reg, reg or bimm (AND and set flags) */
    554          struct {
    555             HReg      argL;
    556             ARM64RIL* argR;
    557          } Test;
    558          /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
    559          struct {
    560             HReg         dst;
    561             HReg         argL;
    562             ARM64RI6*    argR;
    563             ARM64ShiftOp op;
    564          } Shift;
    565          /* NOT/NEG/CLZ, 64 bit only */
    566          struct {
    567             HReg         dst;
    568             HReg         src;
    569             ARM64UnaryOp op;
    570          } Unary;
    571          /* MOV dst, src -- reg-reg move for integer registers */
    572          struct {
    573             HReg dst;
    574             HReg src;
    575          } MovI;
    576          /* Pseudo-insn; make a 64-bit immediate */
    577          struct {
    578             HReg  dst;
    579             ULong imm64;
    580          } Imm64;
    581          /* 64-bit load or store */
    582          struct {
    583             Bool        isLoad;
    584             HReg        rD;
    585             ARM64AMode* amode;
    586          } LdSt64;
    587          /* zx-32-to-64-bit load, or 32-bit store */
    588          struct {
    589             Bool        isLoad;
    590             HReg        rD;
    591             ARM64AMode* amode;
    592          } LdSt32;
    593          /* zx-16-to-64-bit load, or 16-bit store */
    594          struct {
    595             Bool        isLoad;
    596             HReg        rD;
    597             ARM64AMode* amode;
    598          } LdSt16;
    599          /* zx-8-to-64-bit load, or 8-bit store */
    600          struct {
    601             Bool        isLoad;
    602             HReg        rD;
    603             ARM64AMode* amode;
    604          } LdSt8;
    605          /* Update the guest PC value, then exit requesting to chain
    606             to it.  May be conditional.  Urr, use of Addr64 implicitly
    607             assumes that wordsize(guest) == wordsize(host). */
    608          struct {
    609             Addr64        dstGA;    /* next guest address */
    610             ARM64AMode*   amPC;     /* amode in guest state for PC */
    611             ARM64CondCode cond;     /* can be ARM64cc_AL */
    612             Bool          toFastEP; /* chain to the slow or fast point? */
    613          } XDirect;
    614          /* Boring transfer to a guest address not known at JIT time.
    615             Not chainable.  May be conditional. */
    616          struct {
    617             HReg          dstGA;
    618             ARM64AMode*   amPC;
    619             ARM64CondCode cond; /* can be ARM64cc_AL */
    620          } XIndir;
    621          /* Assisted transfer to a guest address, most general case.
    622             Not chainable.  May be conditional. */
    623          struct {
    624             HReg          dstGA;
    625             ARM64AMode*   amPC;
    626             ARM64CondCode cond; /* can be ARM64cc_AL */
    627             IRJumpKind    jk;
    628          } XAssisted;
    629          /* CSEL: dst = if cond then argL else argR.  cond may be anything. */
    630           struct {
    631             HReg          dst;
    632             HReg          argL;
    633             HReg          argR;
    634             ARM64CondCode cond;
    635          } CSel;
    636          /* Pseudo-insn.  Call target (an absolute address), on given
    637             condition (which could be ARM64cc_AL). */
    638          struct {
    639             RetLoc        rloc;     /* where the return value will be */
    640             Addr64        target;
    641             ARM64CondCode cond;
    642             Int           nArgRegs; /* # regs carrying args: 0 .. 8 */
    643          } Call;
    644          /* move SP by small, signed constant */
    645          struct {
    646             Int simm; /* needs to be 0 % 16 and in the range -4095
    647                          .. 4095 inclusive */
    648          } AddToSP;
    649          /* move SP to integer register */
    650          struct {
    651             HReg dst;
    652          } FromSP;
    653          /* Integer multiply, with 3 variants:
    654               (PLAIN) lo64(64 *  64)
    655               (ZX)    hi64(64 *u 64)
    656               (SX)    hi64(64 *s 64)
    657          */
    658          struct {
    659             HReg       dst;
    660             HReg       argL;
    661             HReg       argR;
    662             ARM64MulOp op;
    663          } Mul;
    664          /* LDXR{,H,B} x2, [x4] */
    665          struct {
    666             Int  szB; /* 1, 2, 4 or 8 */
    667          } LdrEX;
    668          /* STXR{,H,B} w0, x2, [x4] */
    669          struct {
    670             Int  szB; /* 1, 2, 4 or 8 */
    671          } StrEX;
    672          /* x1 = CAS(x3(addr), x5(expected) -> x7(new)),
    673             where x1[8*szB-1 : 0] == x5[8*szB-1 : 0] indicates success,
    674                   x1[8*szB-1 : 0] != x5[8*szB-1 : 0] indicates failure.
    675             Uses x8 as scratch (but that's not allocatable).
    676             Hence: RD x3, x5, x7; WR x1
    677 
    678             (szB=8)  mov  x8, x5
    679             (szB=4)  and  x8, x5, #0xFFFFFFFF
    680             (szB=2)  and  x8, x5, #0xFFFF
    681             (szB=1)  and  x8, x5, #0xFF
    682             -- x8 is correctly zero-extended expected value
    683             ldxr    x1, [x3]
    684             -- x1 is correctly zero-extended actual value
    685             cmp     x1, x8
    686             bne     after
    687             -- if branch taken, failure; x1[[8*szB-1 : 0] holds old value
    688             -- attempt to store
    689             stxr    w1, x7, [x3]
    690             -- if store successful, x1==0, so the eor is "x1 := x5"
    691             -- if store failed,     x1==1, so the eor makes x1 != x5
    692             eor     x1, x5, x1
    693            after:
    694          */
    695          struct {
    696             Int szB; /* 1, 2, 4 or 8 */
    697          } CAS;
    698          /* Mem fence.  An insn which fences all loads and stores as
    699             much as possible before continuing.  On ARM64 we emit the
    700             sequence "dsb sy ; dmb sy ; isb sy", which is probably
    701             total nuclear overkill, but better safe than sorry. */
    702          struct {
    703          } MFence;
    704          /* A CLREX instruction. */
    705          struct {
    706          } ClrEX;
    707          /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
    708          /* ld/st to/from low 16 bits of vec reg, imm offset */
    709          struct {
    710             Bool isLoad;
    711             HReg hD;
    712             HReg rN;
    713             UInt uimm12;  /* 0 .. 8190 inclusive, 0 % 2 */
    714          } VLdStH;
    715          /* ld/st to/from low 32 bits of vec reg, imm offset */
    716          struct {
    717             Bool isLoad;
    718             HReg sD;
    719             HReg rN;
    720             UInt uimm12;  /* 0 .. 16380 inclusive, 0 % 4 */
    721          } VLdStS;
    722          /* ld/st to/from low 64 bits of vec reg, imm offset */
    723          struct {
    724             Bool isLoad;
    725             HReg dD;
    726             HReg rN;
    727             UInt uimm12;  /* 0 .. 32760 inclusive, 0 % 8 */
    728          } VLdStD;
    729          /* ld/st to/from all 128 bits of vec reg, no offset */
    730          struct {
    731             Bool isLoad;
    732             HReg rQ; // data
    733             HReg rN; // address
    734          } VLdStQ;
    735          /* Scalar conversion of int to float. */
    736          struct {
    737             ARM64CvtOp how;
    738             HReg       rD; // dst, a D or S register
    739             HReg       rS; // src, a W or X register
    740          } VCvtI2F;
    741          /* Scalar conversion of float to int, w/ specified RM. */
    742          struct {
    743             ARM64CvtOp how;
    744             HReg       rD; // dst, a W or X register
    745             HReg       rS; // src, a D or S register
    746             UChar      armRM; // ARM encoded RM:
    747                               // 00=nearest, 01=+inf, 10=-inf, 11=zero
    748          } VCvtF2I;
    749          /* Convert between 32-bit and 64-bit FP values (both ways). (FCVT) */
    750          struct {
    751             Bool sToD; /* True: F32->F64.  False: F64->F32 */
    752             HReg dst;
    753             HReg src;
    754          } VCvtSD;
    755          /* Convert between 16-bit and 32-bit FP values (both ways). (FCVT) */
    756          struct {
    757             Bool hToS; /* True: F16->F32.  False: F32->F16 */
    758             HReg dst;
    759             HReg src;
    760          } VCvtHS;
    761          /* Convert between 16-bit and 64-bit FP values (both ways). (FCVT) */
    762          struct {
    763             Bool hToD; /* True: F16->F64.  False: F64->F16 */
    764             HReg dst;
    765             HReg src;
    766          } VCvtHD;
    767          /* 64-bit FP unary */
    768          struct {
    769             ARM64FpUnaryOp op;
    770             HReg           dst;
    771             HReg           src;
    772          } VUnaryD;
    773          /* 32-bit FP unary */
    774          struct {
    775             ARM64FpUnaryOp op;
    776             HReg           dst;
    777             HReg           src;
    778          } VUnaryS;
    779          /* 64-bit FP binary arithmetic */
    780          struct {
    781             ARM64FpBinOp op;
    782             HReg         dst;
    783             HReg         argL;
    784             HReg         argR;
    785          } VBinD;
    786          /* 32-bit FP binary arithmetic */
    787          struct {
    788             ARM64FpBinOp op;
    789             HReg         dst;
    790             HReg         argL;
    791             HReg         argR;
    792          } VBinS;
    793          /* 64-bit FP compare */
    794          struct {
    795             HReg argL;
    796             HReg argR;
    797          } VCmpD;
    798          /* 32-bit FP compare */
    799          struct {
    800             HReg argL;
    801             HReg argR;
    802          } VCmpS;
    803          /* 32- or 64-bit FP conditional select */
    804          struct {
    805             HReg          dst;
    806             HReg          argL;
    807             HReg          argR;
    808             ARM64CondCode cond;
    809             Bool          isD;
    810          }
    811          VFCSel;
    812          /* Move a 32-bit value to/from the FPCR */
    813          struct {
    814             Bool toFPCR;
    815             HReg iReg;
    816          } FPCR;
    817          /* Move a 32-bit value to/from the FPSR */
    818          struct {
    819             Bool toFPSR;
    820             HReg iReg;
    821          } FPSR;
    822          /* binary vector operation on vector registers */
    823          struct {
    824             ARM64VecBinOp op;
    825             HReg          dst;
    826             HReg          argL;
    827             HReg          argR;
    828          } VBinV;
    829          /* binary vector operation on vector registers.
    830             Dst reg is also a src. */
    831          struct {
    832             ARM64VecModifyOp op;
    833             HReg             mod;
    834             HReg             arg;
    835          } VModifyV;
    836          /* unary vector operation on vector registers */
    837          struct {
    838             ARM64VecUnaryOp op;
    839             HReg            dst;
    840             HReg            arg;
    841          } VUnaryV;
    842          /* vector narrowing, Q -> Q.  Result goes in the bottom half
    843             of dst and the top half is zeroed out.  Iow one of the
    844             XTN family. */
    845         struct {
    846            ARM64VecNarrowOp op;
    847            UInt             dszBlg2; // 0: 16to8_x8  1: 32to16_x4  2: 64to32_x2
    848            HReg             dst;     // Q reg
    849            HReg             src;     // Q reg
    850         } VNarrowV;
    851         /* Vector shift by immediate.  For left shifts, |amt| must be
    852            >= 0 and < implied lane size of |op|.  For right shifts,
    853            |amt| must be > 0 and <= implied lane size of |op|.  Shifts
    854            beyond these ranges are not allowed. */
    855         struct {
    856            ARM64VecShiftImmOp op;
    857            HReg               dst;
    858            HReg               src;
    859            UInt               amt;
    860         } VShiftImmV;
    861         struct {
    862            HReg dst;
    863            HReg srcLo;
    864            HReg srcHi;
    865            UInt amtB;
    866         } VExtV;
    867          struct {
    868             HReg   rQ;
    869             UShort imm; /* Same 1-bit-per-byte encoding as IR */
    870          } VImmQ;
    871          struct {
    872             HReg rD;
    873             HReg rX;
    874          } VDfromX;
    875          struct {
    876             HReg rQ;
    877             HReg rXlo;
    878          } VQfromX;
    879          struct {
    880             HReg rQ;
    881             HReg rXhi;
    882             HReg rXlo;
    883          } VQfromXX;
    884          struct {
    885             HReg rX;
    886             HReg rQ;
    887             UInt laneNo; /* either 0 or 1 */
    888          } VXfromQ;
    889          struct {
    890             HReg rX;
    891             HReg rDorS;
    892             Bool fromD;
    893          } VXfromDorS;
    894          /* MOV dst, src -- reg-reg move for vector registers */
    895          struct {
    896             UInt szB; // 16=mov qD,qS;  8=mov dD,dS;  4=mov sD,sS
    897             HReg dst;
    898             HReg src;
    899          } VMov;
    900          struct {
    901             ARM64AMode* amCounter;
    902             ARM64AMode* amFailAddr;
    903          } EvCheck;
    904          struct {
    905             /* No fields.  The address of the counter to inc is
    906                installed later, post-translation, by patching it in,
    907                as it is not known at translation time. */
    908          } ProfInc;
    909       } ARM64in;
    910    }
    911    ARM64Instr;
    912 
    913 
    914 extern ARM64Instr* ARM64Instr_Arith   ( HReg, HReg, ARM64RIA*, Bool isAdd );
    915 extern ARM64Instr* ARM64Instr_Cmp     ( HReg, ARM64RIA*, Bool is64 );
    916 extern ARM64Instr* ARM64Instr_Logic   ( HReg, HReg, ARM64RIL*, ARM64LogicOp );
    917 extern ARM64Instr* ARM64Instr_Test    ( HReg, ARM64RIL* );
    918 extern ARM64Instr* ARM64Instr_Shift   ( HReg, HReg, ARM64RI6*, ARM64ShiftOp );
    919 extern ARM64Instr* ARM64Instr_Unary   ( HReg, HReg, ARM64UnaryOp );
    920 extern ARM64Instr* ARM64Instr_MovI    ( HReg, HReg );
    921 extern ARM64Instr* ARM64Instr_Imm64   ( HReg, ULong );
    922 extern ARM64Instr* ARM64Instr_LdSt64  ( Bool isLoad, HReg, ARM64AMode* );
    923 extern ARM64Instr* ARM64Instr_LdSt32  ( Bool isLoad, HReg, ARM64AMode* );
    924 extern ARM64Instr* ARM64Instr_LdSt16  ( Bool isLoad, HReg, ARM64AMode* );
    925 extern ARM64Instr* ARM64Instr_LdSt8   ( Bool isLoad, HReg, ARM64AMode* );
    926 extern ARM64Instr* ARM64Instr_XDirect ( Addr64 dstGA, ARM64AMode* amPC,
    927                                         ARM64CondCode cond, Bool toFastEP );
    928 extern ARM64Instr* ARM64Instr_XIndir  ( HReg dstGA, ARM64AMode* amPC,
    929                                         ARM64CondCode cond );
    930 extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
    931                                           ARM64CondCode cond, IRJumpKind jk );
    932 extern ARM64Instr* ARM64Instr_CSel    ( HReg dst, HReg argL, HReg argR,
    933                                         ARM64CondCode cond );
    934 extern ARM64Instr* ARM64Instr_Call    ( ARM64CondCode, Addr64, Int nArgRegs,
    935                                         RetLoc rloc );
    936 extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
    937 extern ARM64Instr* ARM64Instr_FromSP  ( HReg dst );
    938 extern ARM64Instr* ARM64Instr_Mul     ( HReg dst, HReg argL, HReg argR,
    939                                         ARM64MulOp op );
    940 extern ARM64Instr* ARM64Instr_LdrEX   ( Int szB );
    941 extern ARM64Instr* ARM64Instr_StrEX   ( Int szB );
    942 extern ARM64Instr* ARM64Instr_CAS     ( Int szB );
    943 extern ARM64Instr* ARM64Instr_MFence  ( void );
    944 extern ARM64Instr* ARM64Instr_ClrEX   ( void );
    945 extern ARM64Instr* ARM64Instr_VLdStH  ( Bool isLoad, HReg sD, HReg rN,
    946                                         UInt uimm12 /* 0 .. 8190, 0 % 2 */ );
    947 extern ARM64Instr* ARM64Instr_VLdStS  ( Bool isLoad, HReg sD, HReg rN,
    948                                         UInt uimm12 /* 0 .. 16380, 0 % 4 */ );
    949 extern ARM64Instr* ARM64Instr_VLdStD  ( Bool isLoad, HReg dD, HReg rN,
    950                                         UInt uimm12 /* 0 .. 32760, 0 % 8 */ );
    951 extern ARM64Instr* ARM64Instr_VLdStQ  ( Bool isLoad, HReg rQ, HReg rN );
    952 extern ARM64Instr* ARM64Instr_VCvtI2F ( ARM64CvtOp how, HReg rD, HReg rS );
    953 extern ARM64Instr* ARM64Instr_VCvtF2I ( ARM64CvtOp how, HReg rD, HReg rS,
    954                                         UChar armRM );
    955 extern ARM64Instr* ARM64Instr_VCvtSD  ( Bool sToD, HReg dst, HReg src );
    956 extern ARM64Instr* ARM64Instr_VCvtHS  ( Bool hToS, HReg dst, HReg src );
    957 extern ARM64Instr* ARM64Instr_VCvtHD  ( Bool hToD, HReg dst, HReg src );
    958 extern ARM64Instr* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op, HReg dst, HReg src );
    959 extern ARM64Instr* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op, HReg dst, HReg src );
    960 extern ARM64Instr* ARM64Instr_VBinD   ( ARM64FpBinOp op, HReg, HReg, HReg );
    961 extern ARM64Instr* ARM64Instr_VBinS   ( ARM64FpBinOp op, HReg, HReg, HReg );
    962 extern ARM64Instr* ARM64Instr_VCmpD   ( HReg argL, HReg argR );
    963 extern ARM64Instr* ARM64Instr_VCmpS   ( HReg argL, HReg argR );
    964 extern ARM64Instr* ARM64Instr_VFCSel  ( HReg dst, HReg argL, HReg argR,
    965                                         ARM64CondCode cond, Bool isD );
    966 extern ARM64Instr* ARM64Instr_FPCR    ( Bool toFPCR, HReg iReg );
    967 extern ARM64Instr* ARM64Instr_FPSR    ( Bool toFPSR, HReg iReg );
    968 extern ARM64Instr* ARM64Instr_VBinV   ( ARM64VecBinOp op, HReg, HReg, HReg );
    969 extern ARM64Instr* ARM64Instr_VModifyV ( ARM64VecModifyOp, HReg, HReg );
    970 extern ARM64Instr* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op, HReg, HReg );
    971 extern ARM64Instr* ARM64Instr_VNarrowV ( ARM64VecNarrowOp op, UInt dszBlg2,
    972                                          HReg dst, HReg src );
    973 extern ARM64Instr* ARM64Instr_VShiftImmV ( ARM64VecShiftImmOp op,
    974                                            HReg dst, HReg src, UInt amt );
    975 extern ARM64Instr* ARM64Instr_VExtV   ( HReg dst,
    976                                         HReg srcLo, HReg srcHi, UInt amtB );
    977 extern ARM64Instr* ARM64Instr_VImmQ   ( HReg, UShort );
    978 extern ARM64Instr* ARM64Instr_VDfromX ( HReg rD, HReg rX );
    979 extern ARM64Instr* ARM64Instr_VQfromX ( HReg rQ, HReg rXlo );
    980 extern ARM64Instr* ARM64Instr_VQfromXX( HReg rQ, HReg rXhi, HReg rXlo );
    981 extern ARM64Instr* ARM64Instr_VXfromQ ( HReg rX, HReg rQ, UInt laneNo );
    982 extern ARM64Instr* ARM64Instr_VXfromDorS ( HReg rX, HReg rDorS, Bool fromD );
    983 extern ARM64Instr* ARM64Instr_VMov    ( UInt szB, HReg dst, HReg src );
    984 
    985 extern ARM64Instr* ARM64Instr_EvCheck ( ARM64AMode* amCounter,
    986                                         ARM64AMode* amFailAddr );
    987 extern ARM64Instr* ARM64Instr_ProfInc ( void );
    988 
    989 extern void ppARM64Instr ( const ARM64Instr* );
    990 
    991 
    992 /* Some functions that insulate the register allocator from details
    993    of the underlying instruction set. */
    994 extern void getRegUsage_ARM64Instr ( HRegUsage*, const ARM64Instr*, Bool );
    995 extern void mapRegs_ARM64Instr     ( HRegRemap*, ARM64Instr*, Bool );
    996 extern Bool isMove_ARM64Instr      ( const ARM64Instr*, HReg*, HReg* );
    997 extern Int  emit_ARM64Instr        ( /*MB_MOD*/Bool* is_profInc,
    998                                      UChar* buf, Int nbuf, const ARM64Instr* i,
    999                                      Bool mode64,
   1000                                      VexEndness endness_host,
   1001                                      const void* disp_cp_chain_me_to_slowEP,
   1002                                      const void* disp_cp_chain_me_to_fastEP,
   1003                                      const void* disp_cp_xindir,
   1004                                      const void* disp_cp_xassisted );
   1005 
   1006 extern void genSpill_ARM64  ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
   1007                               HReg rreg, Int offset, Bool );
   1008 extern void genReload_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
   1009                               HReg rreg, Int offset, Bool );
   1010 
   1011 extern const RRegUniverse* getRRegUniverse_ARM64 ( void );
   1012 
   1013 extern HInstrArray* iselSB_ARM64 ( const IRSB*,
   1014                                    VexArch,
   1015                                    const VexArchInfo*,
   1016                                    const VexAbiInfo*,
   1017                                    Int offs_Host_EvC_Counter,
   1018                                    Int offs_Host_EvC_FailAddr,
   1019                                    Bool chainingAllowed,
   1020                                    Bool addProfInc,
   1021                                    Addr max_ga );
   1022 
   1023 /* How big is an event check?  This is kind of a kludge because it
   1024    depends on the offsets of host_EvC_FAILADDR and
   1025    host_EvC_COUNTER. */
   1026 extern Int evCheckSzB_ARM64 (void);
   1027 
   1028 /* Perform a chaining and unchaining of an XDirect jump. */
   1029 extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
   1030                                           void* place_to_chain,
   1031                                           const void* disp_cp_chain_me_EXPECTED,
   1032                                           const void* place_to_jump_to );
   1033 
   1034 extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
   1035                                             void* place_to_unchain,
   1036                                             const void* place_to_jump_to_EXPECTED,
   1037                                             const void* disp_cp_chain_me );
   1038 
   1039 /* Patch the counter location into an existing ProfInc point. */
   1040 extern VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
   1041                                           void*  place_to_patch,
   1042                                           const ULong* location_of_counter );
   1043 
   1044 
   1045 #endif /* ndef __VEX_HOST_ARM64_DEFS_H */
   1046 
   1047 /*---------------------------------------------------------------*/
   1048 /*--- end                                   host_arm64_defs.h ---*/
   1049 /*---------------------------------------------------------------*/
   1050