Home | History | Annotate | Download | only in sparc
      1 /*
      2  * Tiny Code Generator for QEMU
      3  *
      4  * Copyright (c) 2008 Fabrice Bellard
      5  *
      6  * Permission is hereby granted, free of charge, to any person obtaining a copy
      7  * of this software and associated documentation files (the "Software"), to deal
      8  * in the Software without restriction, including without limitation the rights
      9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     10  * copies of the Software, and to permit persons to whom the Software is
     11  * furnished to do so, subject to the following conditions:
     12  *
     13  * The above copyright notice and this permission notice shall be included in
     14  * all copies or substantial portions of the Software.
     15  *
     16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
     19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     22  * THE SOFTWARE.
     23  */
     24 
     25 #ifndef NDEBUG
     26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
     27     "%g0",
     28     "%g1",
     29     "%g2",
     30     "%g3",
     31     "%g4",
     32     "%g5",
     33     "%g6",
     34     "%g7",
     35     "%o0",
     36     "%o1",
     37     "%o2",
     38     "%o3",
     39     "%o4",
     40     "%o5",
     41     "%o6",
     42     "%o7",
     43     "%l0",
     44     "%l1",
     45     "%l2",
     46     "%l3",
     47     "%l4",
     48     "%l5",
     49     "%l6",
     50     "%l7",
     51     "%i0",
     52     "%i1",
     53     "%i2",
     54     "%i3",
     55     "%i4",
     56     "%i5",
     57     "%i6",
     58     "%i7",
     59 };
     60 #endif
     61 
     62 static const int tcg_target_reg_alloc_order[] = {
     63     TCG_REG_L0,
     64     TCG_REG_L1,
     65     TCG_REG_L2,
     66     TCG_REG_L3,
     67     TCG_REG_L4,
     68     TCG_REG_L5,
     69     TCG_REG_L6,
     70     TCG_REG_L7,
     71     TCG_REG_I0,
     72     TCG_REG_I1,
     73     TCG_REG_I2,
     74     TCG_REG_I3,
     75     TCG_REG_I4,
     76 };
     77 
     78 static const int tcg_target_call_iarg_regs[6] = {
     79     TCG_REG_O0,
     80     TCG_REG_O1,
     81     TCG_REG_O2,
     82     TCG_REG_O3,
     83     TCG_REG_O4,
     84     TCG_REG_O5,
     85 };
     86 
     87 static const int tcg_target_call_oarg_regs[2] = {
     88     TCG_REG_O0,
     89     TCG_REG_O1,
     90 };
     91 
     92 static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
     93 {
     94     return (val << ((sizeof(tcg_target_long) * 8 - bits))
     95             >> (sizeof(tcg_target_long) * 8 - bits)) == val;
     96 }
     97 
     98 static inline int check_fit_i32(uint32_t val, unsigned int bits)
     99 {
    100     return ((val << (32 - bits)) >> (32 - bits)) == val;
    101 }
    102 
    103 static void patch_reloc(uint8_t *code_ptr, int type,
    104                         tcg_target_long value, tcg_target_long addend)
    105 {
    106     value += addend;
    107     switch (type) {
    108     case R_SPARC_32:
    109         if (value != (uint32_t)value)
    110             tcg_abort();
    111         *(uint32_t *)code_ptr = value;
    112         break;
    113     case R_SPARC_WDISP22:
    114         value -= (long)code_ptr;
    115         value >>= 2;
    116         if (!check_fit_tl(value, 22))
    117             tcg_abort();
    118         *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
    119         break;
    120     case R_SPARC_WDISP19:
    121         value -= (long)code_ptr;
    122         value >>= 2;
    123         if (!check_fit_tl(value, 19))
    124             tcg_abort();
    125         *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
    126         break;
    127     default:
    128         tcg_abort();
    129     }
    130 }
    131 
    132 /* maximum number of register used for input function arguments */
    133 static inline int tcg_target_get_call_iarg_regs_count(int flags)
    134 {
    135     return 6;
    136 }
    137 
    138 /* parse target specific constraints */
    139 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
    140 {
    141     const char *ct_str;
    142 
    143     ct_str = *pct_str;
    144     switch (ct_str[0]) {
    145     case 'r':
    146     case 'L': /* qemu_ld/st constraint */
    147         ct->ct |= TCG_CT_REG;
    148         tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
    149         // Helper args
    150         tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
    151         tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
    152         tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
    153         break;
    154     case 'I':
    155         ct->ct |= TCG_CT_CONST_S11;
    156         break;
    157     case 'J':
    158         ct->ct |= TCG_CT_CONST_S13;
    159         break;
    160     default:
    161         return -1;
    162     }
    163     ct_str++;
    164     *pct_str = ct_str;
    165     return 0;
    166 }
    167 
    168 /* test if a constant matches the constraint */
    169 static inline int tcg_target_const_match(tcg_target_long val,
    170                                          const TCGArgConstraint *arg_ct)
    171 {
    172     int ct;
    173 
    174     ct = arg_ct->ct;
    175     if (ct & TCG_CT_CONST)
    176         return 1;
    177     else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
    178         return 1;
    179     else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
    180         return 1;
    181     else
    182         return 0;
    183 }
    184 
    185 #define INSN_OP(x)  ((x) << 30)
    186 #define INSN_OP2(x) ((x) << 22)
    187 #define INSN_OP3(x) ((x) << 19)
    188 #define INSN_OPF(x) ((x) << 5)
    189 #define INSN_RD(x)  ((x) << 25)
    190 #define INSN_RS1(x) ((x) << 14)
    191 #define INSN_RS2(x) (x)
    192 #define INSN_ASI(x) ((x) << 5)
    193 
    194 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
    195 #define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
    196 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
    197 
    198 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
    199 #define COND_N     0x0
    200 #define COND_E     0x1
    201 #define COND_LE    0x2
    202 #define COND_L     0x3
    203 #define COND_LEU   0x4
    204 #define COND_CS    0x5
    205 #define COND_NEG   0x6
    206 #define COND_VS    0x7
    207 #define COND_A     0x8
    208 #define COND_NE    0x9
    209 #define COND_G     0xa
    210 #define COND_GE    0xb
    211 #define COND_GU    0xc
    212 #define COND_CC    0xd
    213 #define COND_POS   0xe
    214 #define COND_VC    0xf
    215 #define BA         (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
    216 
    217 #define ARITH_ADD  (INSN_OP(2) | INSN_OP3(0x00))
    218 #define ARITH_AND  (INSN_OP(2) | INSN_OP3(0x01))
    219 #define ARITH_OR   (INSN_OP(2) | INSN_OP3(0x02))
    220 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
    221 #define ARITH_XOR  (INSN_OP(2) | INSN_OP3(0x03))
    222 #define ARITH_SUB  (INSN_OP(2) | INSN_OP3(0x04))
    223 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
    224 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
    225 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
    226 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
    227 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
    228 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
    229 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
    230 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
    231 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
    232 
    233 #define SHIFT_SLL  (INSN_OP(2) | INSN_OP3(0x25))
    234 #define SHIFT_SRL  (INSN_OP(2) | INSN_OP3(0x26))
    235 #define SHIFT_SRA  (INSN_OP(2) | INSN_OP3(0x27))
    236 
    237 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
    238 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
    239 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
    240 
    241 #define WRY        (INSN_OP(2) | INSN_OP3(0x30))
    242 #define JMPL       (INSN_OP(2) | INSN_OP3(0x38))
    243 #define SAVE       (INSN_OP(2) | INSN_OP3(0x3c))
    244 #define RESTORE    (INSN_OP(2) | INSN_OP3(0x3d))
    245 #define SETHI      (INSN_OP(0) | INSN_OP2(0x4))
    246 #define CALL       INSN_OP(1)
    247 #define LDUB       (INSN_OP(3) | INSN_OP3(0x01))
    248 #define LDSB       (INSN_OP(3) | INSN_OP3(0x09))
    249 #define LDUH       (INSN_OP(3) | INSN_OP3(0x02))
    250 #define LDSH       (INSN_OP(3) | INSN_OP3(0x0a))
    251 #define LDUW       (INSN_OP(3) | INSN_OP3(0x00))
    252 #define LDSW       (INSN_OP(3) | INSN_OP3(0x08))
    253 #define LDX        (INSN_OP(3) | INSN_OP3(0x0b))
    254 #define STB        (INSN_OP(3) | INSN_OP3(0x05))
    255 #define STH        (INSN_OP(3) | INSN_OP3(0x06))
    256 #define STW        (INSN_OP(3) | INSN_OP3(0x04))
    257 #define STX        (INSN_OP(3) | INSN_OP3(0x0e))
    258 #define LDUBA      (INSN_OP(3) | INSN_OP3(0x11))
    259 #define LDSBA      (INSN_OP(3) | INSN_OP3(0x19))
    260 #define LDUHA      (INSN_OP(3) | INSN_OP3(0x12))
    261 #define LDSHA      (INSN_OP(3) | INSN_OP3(0x1a))
    262 #define LDUWA      (INSN_OP(3) | INSN_OP3(0x10))
    263 #define LDSWA      (INSN_OP(3) | INSN_OP3(0x18))
    264 #define LDXA       (INSN_OP(3) | INSN_OP3(0x1b))
    265 #define STBA       (INSN_OP(3) | INSN_OP3(0x15))
    266 #define STHA       (INSN_OP(3) | INSN_OP3(0x16))
    267 #define STWA       (INSN_OP(3) | INSN_OP3(0x14))
    268 #define STXA       (INSN_OP(3) | INSN_OP3(0x1e))
    269 
    270 #ifndef ASI_PRIMARY_LITTLE
    271 #define ASI_PRIMARY_LITTLE 0x88
    272 #endif
    273 
    274 static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
    275                                  int op)
    276 {
    277     tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
    278               INSN_RS2(rs2));
    279 }
    280 
    281 static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
    282                                   uint32_t offset, int op)
    283 {
    284     tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
    285               INSN_IMM13(offset));
    286 }
    287 
    288 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
    289 {
    290     tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
    291 }
    292 
    293 static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
    294 {
    295     tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
    296 }
    297 
    298 static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
    299 {
    300     tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
    301 }
    302 
    303 static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
    304 {
    305     if (check_fit_tl(arg, 12))
    306         tcg_out_movi_imm13(s, ret, arg);
    307     else {
    308         tcg_out_sethi(s, ret, arg);
    309         if (arg & 0x3ff)
    310             tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
    311     }
    312 }
    313 
    314 static inline void tcg_out_movi(TCGContext *s, TCGType type,
    315                                 int ret, tcg_target_long arg)
    316 {
    317 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
    318     if (!check_fit_tl(arg, 32) && (arg & ~0xffffffffULL) != 0) {
    319         tcg_out_movi_imm32(s, TCG_REG_I4, arg >> 32);
    320         tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
    321         tcg_out_movi_imm32(s, ret, arg);
    322         tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
    323     } else if (check_fit_tl(arg, 12))
    324         tcg_out_movi_imm13(s, ret, arg);
    325     else {
    326         tcg_out_sethi(s, ret, arg);
    327         if (arg & 0x3ff)
    328             tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
    329     }
    330 #else
    331     tcg_out_movi_imm32(s, ret, arg);
    332 #endif
    333 }
    334 
    335 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
    336                                   tcg_target_long arg)
    337 {
    338     tcg_out_sethi(s, ret, arg);
    339     tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
    340               INSN_IMM13(arg & 0x3ff));
    341 }
    342 
    343 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
    344                                   tcg_target_long arg)
    345 {
    346     if (!check_fit_tl(arg, 10))
    347         tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ffULL);
    348 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
    349     tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
    350               INSN_IMM13(arg & 0x3ff));
    351 #else
    352     tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
    353               INSN_IMM13(arg & 0x3ff));
    354 #endif
    355 }
    356 
    357 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
    358 {
    359     if (check_fit_tl(offset, 13))
    360         tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
    361                   INSN_IMM13(offset));
    362     else {
    363         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
    364         tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
    365                   INSN_RS2(addr));
    366     }
    367 }
    368 
    369 static inline void tcg_out_ldst_asi(TCGContext *s, int ret, int addr,
    370                                     int offset, int op, int asi)
    371 {
    372     tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
    373     tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
    374               INSN_ASI(asi) | INSN_RS2(addr));
    375 }
    376 
    377 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
    378                               int arg1, tcg_target_long arg2)
    379 {
    380     if (type == TCG_TYPE_I32)
    381         tcg_out_ldst(s, ret, arg1, arg2, LDUW);
    382     else
    383         tcg_out_ldst(s, ret, arg1, arg2, LDX);
    384 }
    385 
    386 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
    387                               int arg1, tcg_target_long arg2)
    388 {
    389     if (type == TCG_TYPE_I32)
    390         tcg_out_ldst(s, arg, arg1, arg2, STW);
    391     else
    392         tcg_out_ldst(s, arg, arg1, arg2, STX);
    393 }
    394 
    395 static inline void tcg_out_sety(TCGContext *s, tcg_target_long val)
    396 {
    397     if (val == 0 || val == -1)
    398         tcg_out32(s, WRY | INSN_IMM13(val));
    399     else
    400         fprintf(stderr, "unimplemented sety %ld\n", (long)val);
    401 }
    402 
    403 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
    404 {
    405     if (val != 0) {
    406         if (check_fit_tl(val, 13))
    407             tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
    408         else {
    409             tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
    410             tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
    411         }
    412     }
    413 }
    414 
    415 static inline void tcg_out_andi(TCGContext *s, int reg, tcg_target_long val)
    416 {
    417     if (val != 0) {
    418         if (check_fit_tl(val, 13))
    419             tcg_out_arithi(s, reg, reg, val, ARITH_AND);
    420         else {
    421             tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
    422             tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_AND);
    423         }
    424     }
    425 }
    426 
    427 static inline void tcg_out_nop(TCGContext *s)
    428 {
    429     tcg_out_sethi(s, TCG_REG_G0, 0);
    430 }
    431 
    432 static void tcg_out_branch_i32(TCGContext *s, int opc, int label_index)
    433 {
    434     int32_t val;
    435     TCGLabel *l = &s->labels[label_index];
    436 
    437     if (l->has_value) {
    438         val = l->u.value - (tcg_target_long)s->code_ptr;
    439         tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
    440                       | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
    441     } else {
    442         tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
    443         tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
    444     }
    445 }
    446 
    447 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
    448 static void tcg_out_branch_i64(TCGContext *s, int opc, int label_index)
    449 {
    450     int32_t val;
    451     TCGLabel *l = &s->labels[label_index];
    452 
    453     if (l->has_value) {
    454         val = l->u.value - (tcg_target_long)s->code_ptr;
    455         tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
    456                       (0x5 << 19) |
    457                       INSN_OFF19(l->u.value - (unsigned long)s->code_ptr)));
    458     } else {
    459         tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP19, label_index, 0);
    460         tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
    461                       (0x5 << 19) | 0));
    462     }
    463 }
    464 #endif
    465 
    466 static const uint8_t tcg_cond_to_bcond[10] = {
    467     [TCG_COND_EQ] = COND_E,
    468     [TCG_COND_NE] = COND_NE,
    469     [TCG_COND_LT] = COND_L,
    470     [TCG_COND_GE] = COND_GE,
    471     [TCG_COND_LE] = COND_LE,
    472     [TCG_COND_GT] = COND_G,
    473     [TCG_COND_LTU] = COND_CS,
    474     [TCG_COND_GEU] = COND_CC,
    475     [TCG_COND_LEU] = COND_LEU,
    476     [TCG_COND_GTU] = COND_GU,
    477 };
    478 
    479 static void tcg_out_brcond_i32(TCGContext *s, int cond,
    480                                TCGArg arg1, TCGArg arg2, int const_arg2,
    481                                int label_index)
    482 {
    483     if (const_arg2 && arg2 == 0)
    484         /* orcc %g0, r, %g0 */
    485         tcg_out_arith(s, TCG_REG_G0, TCG_REG_G0, arg1, ARITH_ORCC);
    486     else
    487         /* subcc r1, r2, %g0 */
    488         tcg_out_arith(s, TCG_REG_G0, arg1, arg2, ARITH_SUBCC);
    489     tcg_out_branch_i32(s, tcg_cond_to_bcond[cond], label_index);
    490     tcg_out_nop(s);
    491 }
    492 
    493 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
    494 static void tcg_out_brcond_i64(TCGContext *s, int cond,
    495                                TCGArg arg1, TCGArg arg2, int const_arg2,
    496                                int label_index)
    497 {
    498     if (const_arg2 && arg2 == 0)
    499         /* orcc %g0, r, %g0 */
    500         tcg_out_arith(s, TCG_REG_G0, TCG_REG_G0, arg1, ARITH_ORCC);
    501     else
    502         /* subcc r1, r2, %g0 */
    503         tcg_out_arith(s, TCG_REG_G0, arg1, arg2, ARITH_SUBCC);
    504     tcg_out_branch_i64(s, tcg_cond_to_bcond[cond], label_index);
    505     tcg_out_nop(s);
    506 }
    507 #endif
    508 
    509 /* Generate global QEMU prologue and epilogue code */
    510 void tcg_target_qemu_prologue(TCGContext *s)
    511 {
    512     tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
    513               INSN_IMM13(-TCG_TARGET_STACK_MINFRAME));
    514     tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I0) |
    515               INSN_RS2(TCG_REG_G0));
    516     tcg_out_nop(s);
    517 }
    518 
    519 #if defined(CONFIG_SOFTMMU)
    520 
    521 #include "../../softmmu_defs.h"
    522 
    523 static const void * const qemu_ld_helpers[4] = {
    524     __ldb_mmu,
    525     __ldw_mmu,
    526     __ldl_mmu,
    527     __ldq_mmu,
    528 };
    529 
    530 static const void * const qemu_st_helpers[4] = {
    531     __stb_mmu,
    532     __stw_mmu,
    533     __stl_mmu,
    534     __stq_mmu,
    535 };
    536 #endif
    537 
    538 #if TARGET_LONG_BITS == 32
    539 #define TARGET_LD_OP LDUW
    540 #else
    541 #define TARGET_LD_OP LDX
    542 #endif
    543 
    544 #if TARGET_PHYS_ADDR_BITS == 32
    545 #define TARGET_ADDEND_LD_OP LDUW
    546 #else
    547 #define TARGET_ADDEND_LD_OP LDX
    548 #endif
    549 
    550 #ifdef __arch64__
    551 #define HOST_LD_OP LDX
    552 #define HOST_ST_OP STX
    553 #define HOST_SLL_OP SHIFT_SLLX
    554 #define HOST_SRA_OP SHIFT_SRAX
    555 #else
    556 #define HOST_LD_OP LDUW
    557 #define HOST_ST_OP STW
    558 #define HOST_SLL_OP SHIFT_SLL
    559 #define HOST_SRA_OP SHIFT_SRA
    560 #endif
    561 
    562 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
    563                             int opc)
    564 {
    565     int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
    566 #if defined(CONFIG_SOFTMMU)
    567     uint32_t *label1_ptr, *label2_ptr;
    568 #endif
    569 
    570     data_reg = *args++;
    571     addr_reg = *args++;
    572     mem_index = *args;
    573     s_bits = opc & 3;
    574 
    575     arg0 = TCG_REG_O0;
    576     arg1 = TCG_REG_O1;
    577     arg2 = TCG_REG_O2;
    578 
    579 #if defined(CONFIG_SOFTMMU)
    580     /* srl addr_reg, x, arg1 */
    581     tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
    582                    SHIFT_SRL);
    583     /* and addr_reg, x, arg0 */
    584     tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
    585                    ARITH_AND);
    586 
    587     /* and arg1, x, arg1 */
    588     tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
    589 
    590     /* add arg1, x, arg1 */
    591     tcg_out_addi(s, arg1, offsetof(CPUState,
    592                                    tlb_table[mem_index][0].addr_read));
    593 
    594     /* add env, arg1, arg1 */
    595     tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
    596 
    597     /* ld [arg1], arg2 */
    598     tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
    599               INSN_RS2(TCG_REG_G0));
    600 
    601     /* subcc arg0, arg2, %g0 */
    602     tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
    603 
    604     /* will become:
    605        be label1
    606         or
    607        be,pt %xcc label1 */
    608     label1_ptr = (uint32_t *)s->code_ptr;
    609     tcg_out32(s, 0);
    610 
    611     /* mov (delay slot) */
    612     tcg_out_mov(s, arg0, addr_reg);
    613 
    614     /* mov */
    615     tcg_out_movi(s, TCG_TYPE_I32, arg1, mem_index);
    616 
    617     /* XXX: move that code at the end of the TB */
    618     /* qemu_ld_helper[s_bits](arg0, arg1) */
    619     tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
    620                            - (tcg_target_ulong)s->code_ptr) >> 2)
    621                          & 0x3fffffff));
    622     /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
    623        global registers */
    624     // delay slot
    625     tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    626                  TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    627                  sizeof(long), HOST_ST_OP);
    628     tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    629                  TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    630                  sizeof(long), HOST_LD_OP);
    631 
    632     /* data_reg = sign_extend(arg0) */
    633     switch(opc) {
    634     case 0 | 4:
    635         /* sll arg0, 24/56, data_reg */
    636         tcg_out_arithi(s, data_reg, arg0, (int)sizeof(tcg_target_long) * 8 - 8,
    637                        HOST_SLL_OP);
    638         /* sra data_reg, 24/56, data_reg */
    639         tcg_out_arithi(s, data_reg, data_reg,
    640                        (int)sizeof(tcg_target_long) * 8 - 8, HOST_SRA_OP);
    641         break;
    642     case 1 | 4:
    643         /* sll arg0, 16/48, data_reg */
    644         tcg_out_arithi(s, data_reg, arg0,
    645                        (int)sizeof(tcg_target_long) * 8 - 16, HOST_SLL_OP);
    646         /* sra data_reg, 16/48, data_reg */
    647         tcg_out_arithi(s, data_reg, data_reg,
    648                        (int)sizeof(tcg_target_long) * 8 - 16, HOST_SRA_OP);
    649         break;
    650     case 2 | 4:
    651         /* sll arg0, 32, data_reg */
    652         tcg_out_arithi(s, data_reg, arg0, 32, HOST_SLL_OP);
    653         /* sra data_reg, 32, data_reg */
    654         tcg_out_arithi(s, data_reg, data_reg, 32, HOST_SRA_OP);
    655         break;
    656     case 0:
    657     case 1:
    658     case 2:
    659     case 3:
    660     default:
    661         /* mov */
    662         tcg_out_mov(s, data_reg, arg0);
    663         break;
    664     }
    665 
    666     /* will become:
    667        ba label2 */
    668     label2_ptr = (uint32_t *)s->code_ptr;
    669     tcg_out32(s, 0);
    670 
    671     /* nop (delay slot */
    672     tcg_out_nop(s);
    673 
    674     /* label1: */
    675 #if TARGET_LONG_BITS == 32
    676     /* be label1 */
    677     *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
    678                    INSN_OFF22((unsigned long)s->code_ptr -
    679                               (unsigned long)label1_ptr));
    680 #else
    681     /* be,pt %xcc label1 */
    682     *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
    683                    (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
    684                               (unsigned long)label1_ptr));
    685 #endif
    686 
    687     /* ld [arg1 + x], arg1 */
    688     tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
    689                  offsetof(CPUTLBEntry, addr_read), TARGET_ADDEND_LD_OP);
    690 
    691 #if TARGET_LONG_BITS == 32
    692     /* and addr_reg, x, arg0 */
    693     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
    694     tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
    695     /* add arg0, arg1, arg0 */
    696     tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
    697 #else
    698     /* add addr_reg, arg1, arg0 */
    699     tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
    700 #endif
    701 
    702 #else
    703     arg0 = addr_reg;
    704 #endif
    705 
    706     switch(opc) {
    707     case 0:
    708         /* ldub [arg0], data_reg */
    709         tcg_out_ldst(s, data_reg, arg0, 0, LDUB);
    710         break;
    711     case 0 | 4:
    712         /* ldsb [arg0], data_reg */
    713         tcg_out_ldst(s, data_reg, arg0, 0, LDSB);
    714         break;
    715     case 1:
    716 #ifdef TARGET_WORDS_BIGENDIAN
    717         /* lduh [arg0], data_reg */
    718         tcg_out_ldst(s, data_reg, arg0, 0, LDUH);
    719 #else
    720         /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
    721         tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUHA, ASI_PRIMARY_LITTLE);
    722 #endif
    723         break;
    724     case 1 | 4:
    725 #ifdef TARGET_WORDS_BIGENDIAN
    726         /* ldsh [arg0], data_reg */
    727         tcg_out_ldst(s, data_reg, arg0, 0, LDSH);
    728 #else
    729         /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
    730         tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSHA, ASI_PRIMARY_LITTLE);
    731 #endif
    732         break;
    733     case 2:
    734 #ifdef TARGET_WORDS_BIGENDIAN
    735         /* lduw [arg0], data_reg */
    736         tcg_out_ldst(s, data_reg, arg0, 0, LDUW);
    737 #else
    738         /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
    739         tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUWA, ASI_PRIMARY_LITTLE);
    740 #endif
    741         break;
    742     case 2 | 4:
    743 #ifdef TARGET_WORDS_BIGENDIAN
    744         /* ldsw [arg0], data_reg */
    745         tcg_out_ldst(s, data_reg, arg0, 0, LDSW);
    746 #else
    747         /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
    748         tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSWA, ASI_PRIMARY_LITTLE);
    749 #endif
    750         break;
    751     case 3:
    752 #ifdef TARGET_WORDS_BIGENDIAN
    753         /* ldx [arg0], data_reg */
    754         tcg_out_ldst(s, data_reg, arg0, 0, LDX);
    755 #else
    756         /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
    757         tcg_out_ldst_asi(s, data_reg, arg0, 0, LDXA, ASI_PRIMARY_LITTLE);
    758 #endif
    759         break;
    760     default:
    761         tcg_abort();
    762     }
    763 
    764 #if defined(CONFIG_SOFTMMU)
    765     /* label2: */
    766     *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
    767                    INSN_OFF22((unsigned long)s->code_ptr -
    768                               (unsigned long)label2_ptr));
    769 #endif
    770 }
    771 
    772 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
    773                             int opc)
    774 {
    775     int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
    776 #if defined(CONFIG_SOFTMMU)
    777     uint32_t *label1_ptr, *label2_ptr;
    778 #endif
    779 
    780     data_reg = *args++;
    781     addr_reg = *args++;
    782     mem_index = *args;
    783 
    784     s_bits = opc;
    785 
    786     arg0 = TCG_REG_O0;
    787     arg1 = TCG_REG_O1;
    788     arg2 = TCG_REG_O2;
    789 
    790 #if defined(CONFIG_SOFTMMU)
    791     /* srl addr_reg, x, arg1 */
    792     tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
    793                    SHIFT_SRL);
    794 
    795     /* and addr_reg, x, arg0 */
    796     tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
    797                    ARITH_AND);
    798 
    799     /* and arg1, x, arg1 */
    800     tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
    801 
    802     /* add arg1, x, arg1 */
    803     tcg_out_addi(s, arg1, offsetof(CPUState,
    804                                    tlb_table[mem_index][0].addr_write));
    805 
    806     /* add env, arg1, arg1 */
    807     tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
    808 
    809     /* ld [arg1], arg2 */
    810     tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
    811               INSN_RS2(TCG_REG_G0));
    812 
    813     /* subcc arg0, arg2, %g0 */
    814     tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
    815 
    816     /* will become:
    817        be label1
    818         or
    819        be,pt %xcc label1 */
    820     label1_ptr = (uint32_t *)s->code_ptr;
    821     tcg_out32(s, 0);
    822 
    823     /* mov (delay slot) */
    824     tcg_out_mov(s, arg0, addr_reg);
    825 
    826     /* mov */
    827     tcg_out_mov(s, arg1, data_reg);
    828 
    829     /* mov */
    830     tcg_out_movi(s, TCG_TYPE_I32, arg2, mem_index);
    831 
    832     /* XXX: move that code at the end of the TB */
    833     /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
    834     tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[s_bits]
    835                            - (tcg_target_ulong)s->code_ptr) >> 2)
    836                          & 0x3fffffff));
    837     /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
    838        global registers */
    839     // delay slot
    840     tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    841                  TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    842                  sizeof(long), HOST_ST_OP);
    843     tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    844                  TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    845                  sizeof(long), HOST_LD_OP);
    846 
    847     /* will become:
    848        ba label2 */
    849     label2_ptr = (uint32_t *)s->code_ptr;
    850     tcg_out32(s, 0);
    851 
    852     /* nop (delay slot) */
    853     tcg_out_nop(s);
    854 
    855 #if TARGET_LONG_BITS == 32
    856     /* be label1 */
    857     *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
    858                    INSN_OFF22((unsigned long)s->code_ptr -
    859                               (unsigned long)label1_ptr));
    860 #else
    861     /* be,pt %xcc label1 */
    862     *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
    863                    (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
    864                               (unsigned long)label1_ptr));
    865 #endif
    866 
    867     /* ld [arg1 + x], arg1 */
    868     tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
    869                  offsetof(CPUTLBEntry, addr_write), TARGET_ADDEND_LD_OP);
    870 
    871 #if TARGET_LONG_BITS == 32
    872     /* and addr_reg, x, arg0 */
    873     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
    874     tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
    875     /* add arg0, arg1, arg0 */
    876     tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
    877 #else
    878     /* add addr_reg, arg1, arg0 */
    879     tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
    880 #endif
    881 
    882 #else
    883     arg0 = addr_reg;
    884 #endif
    885 
    886     switch(opc) {
    887     case 0:
    888         /* stb data_reg, [arg0] */
    889         tcg_out_ldst(s, data_reg, arg0, 0, STB);
    890         break;
    891     case 1:
    892 #ifdef TARGET_WORDS_BIGENDIAN
    893         /* sth data_reg, [arg0] */
    894         tcg_out_ldst(s, data_reg, arg0, 0, STH);
    895 #else
    896         /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
    897         tcg_out_ldst_asi(s, data_reg, arg0, 0, STHA, ASI_PRIMARY_LITTLE);
    898 #endif
    899         break;
    900     case 2:
    901 #ifdef TARGET_WORDS_BIGENDIAN
    902         /* stw data_reg, [arg0] */
    903         tcg_out_ldst(s, data_reg, arg0, 0, STW);
    904 #else
    905         /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
    906         tcg_out_ldst_asi(s, data_reg, arg0, 0, STWA, ASI_PRIMARY_LITTLE);
    907 #endif
    908         break;
    909     case 3:
    910 #ifdef TARGET_WORDS_BIGENDIAN
    911         /* stx data_reg, [arg0] */
    912         tcg_out_ldst(s, data_reg, arg0, 0, STX);
    913 #else
    914         /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
    915         tcg_out_ldst_asi(s, data_reg, arg0, 0, STXA, ASI_PRIMARY_LITTLE);
    916 #endif
    917         break;
    918     default:
    919         tcg_abort();
    920     }
    921 
    922 #if defined(CONFIG_SOFTMMU)
    923     /* label2: */
    924     *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
    925                    INSN_OFF22((unsigned long)s->code_ptr -
    926                               (unsigned long)label2_ptr));
    927 #endif
    928 }
    929 
    930 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
    931                               const int *const_args)
    932 {
    933     int c;
    934 
    935     switch (opc) {
    936     case INDEX_op_exit_tb:
    937         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
    938         tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
    939                   INSN_IMM13(8));
    940         tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
    941                       INSN_RS2(TCG_REG_G0));
    942         break;
    943     case INDEX_op_goto_tb:
    944         if (s->tb_jmp_offset) {
    945             /* direct jump method */
    946             tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
    947             tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
    948                       INSN_IMM13((args[0] & 0x1fff)));
    949             s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
    950         } else {
    951             /* indirect jump method */
    952             tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
    953             tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
    954                       INSN_RS2(TCG_REG_G0));
    955         }
    956         tcg_out_nop(s);
    957         s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
    958         break;
    959     case INDEX_op_call:
    960         if (const_args[0])
    961             tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
    962                                    - (tcg_target_ulong)s->code_ptr) >> 2)
    963                                  & 0x3fffffff));
    964         else {
    965             tcg_out_ld_ptr(s, TCG_REG_I5,
    966                            (tcg_target_long)(s->tb_next + args[0]));
    967             tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
    968                       INSN_RS2(TCG_REG_G0));
    969         }
    970         /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
    971            global registers */
    972         // delay slot
    973         tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    974                      TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    975                      sizeof(long), HOST_ST_OP);
    976         tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
    977                      TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
    978                      sizeof(long), HOST_LD_OP);
    979         break;
    980     case INDEX_op_jmp:
    981     case INDEX_op_br:
    982         tcg_out_branch_i32(s, COND_A, args[0]);
    983         tcg_out_nop(s);
    984         break;
    985     case INDEX_op_movi_i32:
    986         tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
    987         break;
    988 
    989 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
    990 #define OP_32_64(x)                             \
    991         glue(glue(case INDEX_op_, x), _i32:)    \
    992         glue(glue(case INDEX_op_, x), _i64:)
    993 #else
    994 #define OP_32_64(x)                             \
    995         glue(glue(case INDEX_op_, x), _i32:)
    996 #endif
    997         OP_32_64(ld8u);
    998         tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
    999         break;
   1000         OP_32_64(ld8s);
   1001         tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
   1002         break;
   1003         OP_32_64(ld16u);
   1004         tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
   1005         break;
   1006         OP_32_64(ld16s);
   1007         tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
   1008         break;
   1009     case INDEX_op_ld_i32:
   1010 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1011     case INDEX_op_ld32u_i64:
   1012 #endif
   1013         tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
   1014         break;
   1015         OP_32_64(st8);
   1016         tcg_out_ldst(s, args[0], args[1], args[2], STB);
   1017         break;
   1018         OP_32_64(st16);
   1019         tcg_out_ldst(s, args[0], args[1], args[2], STH);
   1020         break;
   1021     case INDEX_op_st_i32:
   1022 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1023     case INDEX_op_st32_i64:
   1024 #endif
   1025         tcg_out_ldst(s, args[0], args[1], args[2], STW);
   1026         break;
   1027         OP_32_64(add);
   1028         c = ARITH_ADD;
   1029         goto gen_arith32;
   1030         OP_32_64(sub);
   1031         c = ARITH_SUB;
   1032         goto gen_arith32;
   1033         OP_32_64(and);
   1034         c = ARITH_AND;
   1035         goto gen_arith32;
   1036         OP_32_64(or);
   1037         c = ARITH_OR;
   1038         goto gen_arith32;
   1039         OP_32_64(xor);
   1040         c = ARITH_XOR;
   1041         goto gen_arith32;
   1042     case INDEX_op_shl_i32:
   1043         c = SHIFT_SLL;
   1044         goto gen_arith32;
   1045     case INDEX_op_shr_i32:
   1046         c = SHIFT_SRL;
   1047         goto gen_arith32;
   1048     case INDEX_op_sar_i32:
   1049         c = SHIFT_SRA;
   1050         goto gen_arith32;
   1051     case INDEX_op_mul_i32:
   1052         c = ARITH_UMUL;
   1053         goto gen_arith32;
   1054     case INDEX_op_div2_i32:
   1055 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
   1056         c = ARITH_SDIVX;
   1057         goto gen_arith32;
   1058 #else
   1059         tcg_out_sety(s, 0);
   1060         c = ARITH_SDIV;
   1061         goto gen_arith32;
   1062 #endif
   1063     case INDEX_op_divu2_i32:
   1064 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
   1065         c = ARITH_UDIVX;
   1066         goto gen_arith32;
   1067 #else
   1068         tcg_out_sety(s, 0);
   1069         c = ARITH_UDIV;
   1070         goto gen_arith32;
   1071 #endif
   1072 
   1073     case INDEX_op_brcond_i32:
   1074         tcg_out_brcond_i32(s, args[2], args[0], args[1], const_args[1],
   1075                            args[3]);
   1076         break;
   1077 
   1078     case INDEX_op_qemu_ld8u:
   1079         tcg_out_qemu_ld(s, args, 0);
   1080         break;
   1081     case INDEX_op_qemu_ld8s:
   1082         tcg_out_qemu_ld(s, args, 0 | 4);
   1083         break;
   1084     case INDEX_op_qemu_ld16u:
   1085         tcg_out_qemu_ld(s, args, 1);
   1086         break;
   1087     case INDEX_op_qemu_ld16s:
   1088         tcg_out_qemu_ld(s, args, 1 | 4);
   1089         break;
   1090     case INDEX_op_qemu_ld32u:
   1091         tcg_out_qemu_ld(s, args, 2);
   1092         break;
   1093     case INDEX_op_qemu_ld32s:
   1094         tcg_out_qemu_ld(s, args, 2 | 4);
   1095         break;
   1096     case INDEX_op_qemu_st8:
   1097         tcg_out_qemu_st(s, args, 0);
   1098         break;
   1099     case INDEX_op_qemu_st16:
   1100         tcg_out_qemu_st(s, args, 1);
   1101         break;
   1102     case INDEX_op_qemu_st32:
   1103         tcg_out_qemu_st(s, args, 2);
   1104         break;
   1105 
   1106 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1107     case INDEX_op_movi_i64:
   1108         tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
   1109         break;
   1110     case INDEX_op_ld32s_i64:
   1111         tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
   1112         break;
   1113     case INDEX_op_ld_i64:
   1114         tcg_out_ldst(s, args[0], args[1], args[2], LDX);
   1115         break;
   1116     case INDEX_op_st_i64:
   1117         tcg_out_ldst(s, args[0], args[1], args[2], STX);
   1118         break;
   1119     case INDEX_op_shl_i64:
   1120         c = SHIFT_SLLX;
   1121         goto gen_arith32;
   1122     case INDEX_op_shr_i64:
   1123         c = SHIFT_SRLX;
   1124         goto gen_arith32;
   1125     case INDEX_op_sar_i64:
   1126         c = SHIFT_SRAX;
   1127         goto gen_arith32;
   1128     case INDEX_op_mul_i64:
   1129         c = ARITH_MULX;
   1130         goto gen_arith32;
   1131     case INDEX_op_div2_i64:
   1132         c = ARITH_SDIVX;
   1133         goto gen_arith32;
   1134     case INDEX_op_divu2_i64:
   1135         c = ARITH_UDIVX;
   1136         goto gen_arith32;
   1137 
   1138     case INDEX_op_brcond_i64:
   1139         tcg_out_brcond_i64(s, args[2], args[0], args[1], const_args[1],
   1140                            args[3]);
   1141         break;
   1142     case INDEX_op_qemu_ld64:
   1143         tcg_out_qemu_ld(s, args, 3);
   1144         break;
   1145     case INDEX_op_qemu_st64:
   1146         tcg_out_qemu_st(s, args, 3);
   1147         break;
   1148 
   1149 #endif
   1150     gen_arith32:
   1151         if (const_args[2]) {
   1152             tcg_out_arithi(s, args[0], args[1], args[2], c);
   1153         } else {
   1154             tcg_out_arith(s, args[0], args[1], args[2], c);
   1155         }
   1156         break;
   1157 
   1158     default:
   1159         fprintf(stderr, "unknown opcode 0x%x\n", opc);
   1160         tcg_abort();
   1161     }
   1162 }
   1163 
   1164 static const TCGTargetOpDef sparc_op_defs[] = {
   1165     { INDEX_op_exit_tb, { } },
   1166     { INDEX_op_goto_tb, { } },
   1167     { INDEX_op_call, { "ri" } },
   1168     { INDEX_op_jmp, { "ri" } },
   1169     { INDEX_op_br, { } },
   1170 
   1171     { INDEX_op_mov_i32, { "r", "r" } },
   1172     { INDEX_op_movi_i32, { "r" } },
   1173     { INDEX_op_ld8u_i32, { "r", "r" } },
   1174     { INDEX_op_ld8s_i32, { "r", "r" } },
   1175     { INDEX_op_ld16u_i32, { "r", "r" } },
   1176     { INDEX_op_ld16s_i32, { "r", "r" } },
   1177     { INDEX_op_ld_i32, { "r", "r" } },
   1178     { INDEX_op_st8_i32, { "r", "r" } },
   1179     { INDEX_op_st16_i32, { "r", "r" } },
   1180     { INDEX_op_st_i32, { "r", "r" } },
   1181 
   1182     { INDEX_op_add_i32, { "r", "r", "rJ" } },
   1183     { INDEX_op_mul_i32, { "r", "r", "rJ" } },
   1184     { INDEX_op_div2_i32, { "r", "r", "0", "1", "r" } },
   1185     { INDEX_op_divu2_i32, { "r", "r", "0", "1", "r" } },
   1186     { INDEX_op_sub_i32, { "r", "r", "rJ" } },
   1187     { INDEX_op_and_i32, { "r", "r", "rJ" } },
   1188     { INDEX_op_or_i32, { "r", "r", "rJ" } },
   1189     { INDEX_op_xor_i32, { "r", "r", "rJ" } },
   1190 
   1191     { INDEX_op_shl_i32, { "r", "r", "rJ" } },
   1192     { INDEX_op_shr_i32, { "r", "r", "rJ" } },
   1193     { INDEX_op_sar_i32, { "r", "r", "rJ" } },
   1194 
   1195     { INDEX_op_brcond_i32, { "r", "ri" } },
   1196 
   1197     { INDEX_op_qemu_ld8u, { "r", "L" } },
   1198     { INDEX_op_qemu_ld8s, { "r", "L" } },
   1199     { INDEX_op_qemu_ld16u, { "r", "L" } },
   1200     { INDEX_op_qemu_ld16s, { "r", "L" } },
   1201     { INDEX_op_qemu_ld32u, { "r", "L" } },
   1202     { INDEX_op_qemu_ld32s, { "r", "L" } },
   1203 
   1204     { INDEX_op_qemu_st8, { "L", "L" } },
   1205     { INDEX_op_qemu_st16, { "L", "L" } },
   1206     { INDEX_op_qemu_st32, { "L", "L" } },
   1207 
   1208 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1209     { INDEX_op_mov_i64, { "r", "r" } },
   1210     { INDEX_op_movi_i64, { "r" } },
   1211     { INDEX_op_ld8u_i64, { "r", "r" } },
   1212     { INDEX_op_ld8s_i64, { "r", "r" } },
   1213     { INDEX_op_ld16u_i64, { "r", "r" } },
   1214     { INDEX_op_ld16s_i64, { "r", "r" } },
   1215     { INDEX_op_ld32u_i64, { "r", "r" } },
   1216     { INDEX_op_ld32s_i64, { "r", "r" } },
   1217     { INDEX_op_ld_i64, { "r", "r" } },
   1218     { INDEX_op_st8_i64, { "r", "r" } },
   1219     { INDEX_op_st16_i64, { "r", "r" } },
   1220     { INDEX_op_st32_i64, { "r", "r" } },
   1221     { INDEX_op_st_i64, { "r", "r" } },
   1222     { INDEX_op_qemu_ld64, { "L", "L" } },
   1223     { INDEX_op_qemu_st64, { "L", "L" } },
   1224 
   1225     { INDEX_op_add_i64, { "r", "r", "rJ" } },
   1226     { INDEX_op_mul_i64, { "r", "r", "rJ" } },
   1227     { INDEX_op_div2_i64, { "r", "r", "0", "1", "r" } },
   1228     { INDEX_op_divu2_i64, { "r", "r", "0", "1", "r" } },
   1229     { INDEX_op_sub_i64, { "r", "r", "rJ" } },
   1230     { INDEX_op_and_i64, { "r", "r", "rJ" } },
   1231     { INDEX_op_or_i64, { "r", "r", "rJ" } },
   1232     { INDEX_op_xor_i64, { "r", "r", "rJ" } },
   1233 
   1234     { INDEX_op_shl_i64, { "r", "r", "rJ" } },
   1235     { INDEX_op_shr_i64, { "r", "r", "rJ" } },
   1236     { INDEX_op_sar_i64, { "r", "r", "rJ" } },
   1237 
   1238     { INDEX_op_brcond_i64, { "r", "ri" } },
   1239 #endif
   1240     { -1 },
   1241 };
   1242 
   1243 void tcg_target_init(TCGContext *s)
   1244 {
   1245     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
   1246 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1247     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
   1248 #endif
   1249     tcg_regset_set32(tcg_target_call_clobber_regs, 0,
   1250                      (1 << TCG_REG_G1) |
   1251                      (1 << TCG_REG_G2) |
   1252                      (1 << TCG_REG_G3) |
   1253                      (1 << TCG_REG_G4) |
   1254                      (1 << TCG_REG_G5) |
   1255                      (1 << TCG_REG_G6) |
   1256                      (1 << TCG_REG_G7) |
   1257                      (1 << TCG_REG_O0) |
   1258                      (1 << TCG_REG_O1) |
   1259                      (1 << TCG_REG_O2) |
   1260                      (1 << TCG_REG_O3) |
   1261                      (1 << TCG_REG_O4) |
   1262                      (1 << TCG_REG_O5) |
   1263                      (1 << TCG_REG_O7));
   1264 
   1265     tcg_regset_clear(s->reserved_regs);
   1266     tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
   1267 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
   1268     tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
   1269 #endif
   1270     tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
   1271     tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
   1272     tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
   1273     tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
   1274     tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
   1275     tcg_add_target_add_op_defs(sparc_op_defs);
   1276 }
   1277