1 /* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2008 Fabrice Bellard 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25 #define TCG_CT_CONST_U32 0x100 26 27 static uint8_t *tb_ret_addr; 28 29 #define FAST_PATH 30 31 #if TARGET_PHYS_ADDR_BITS == 32 32 #define LD_ADDEND LWZ 33 #else 34 #define LD_ADDEND LD 35 #endif 36 37 #if TARGET_LONG_BITS == 32 38 #define LD_ADDR LWZU 39 #define CMP_L 0 40 #else 41 #define LD_ADDR LDU 42 #define CMP_L (1<<21) 43 #endif 44 45 #ifndef NDEBUG 46 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 47 "r0", 48 "r1", 49 "rp", 50 "r3", 51 "r4", 52 "r5", 53 "r6", 54 "r7", 55 "r8", 56 "r9", 57 "r10", 58 "r11", 59 "r12", 60 "r13", 61 "r14", 62 "r15", 63 "r16", 64 "r17", 65 "r18", 66 "r19", 67 "r20", 68 "r21", 69 "r22", 70 "r23", 71 "r24", 72 "r25", 73 "r26", 74 "r27", 75 "r28", 76 "r29", 77 "r30", 78 "r31" 79 }; 80 #endif 81 82 static const int tcg_target_reg_alloc_order[] = { 83 TCG_REG_R14, 84 TCG_REG_R15, 85 TCG_REG_R16, 86 TCG_REG_R17, 87 TCG_REG_R18, 88 TCG_REG_R19, 89 TCG_REG_R20, 90 TCG_REG_R21, 91 TCG_REG_R22, 92 TCG_REG_R23, 93 TCG_REG_R28, 94 TCG_REG_R29, 95 TCG_REG_R30, 96 TCG_REG_R31, 97 TCG_REG_R3, 98 TCG_REG_R4, 99 TCG_REG_R5, 100 TCG_REG_R6, 101 TCG_REG_R7, 102 TCG_REG_R8, 103 TCG_REG_R9, 104 TCG_REG_R10, 105 TCG_REG_R11, 106 TCG_REG_R12, 107 TCG_REG_R24, 108 TCG_REG_R25, 109 TCG_REG_R26, 110 TCG_REG_R27 111 }; 112 113 static const int tcg_target_call_iarg_regs[] = { 114 TCG_REG_R3, 115 TCG_REG_R4, 116 TCG_REG_R5, 117 TCG_REG_R6, 118 TCG_REG_R7, 119 TCG_REG_R8, 120 TCG_REG_R9, 121 TCG_REG_R10 122 }; 123 124 static const int tcg_target_call_oarg_regs[2] = { 125 TCG_REG_R3 126 }; 127 128 static const int tcg_target_callee_save_regs[] = { 129 TCG_REG_R14, 130 TCG_REG_R15, 131 TCG_REG_R16, 132 TCG_REG_R17, 133 TCG_REG_R18, 134 TCG_REG_R19, 135 TCG_REG_R20, 136 TCG_REG_R21, 137 TCG_REG_R22, 138 TCG_REG_R23, 139 TCG_REG_R24, 140 TCG_REG_R25, 141 TCG_REG_R26, 142 /* TCG_REG_R27, */ /* currently used for the global env, so no 143 need to save */ 144 TCG_REG_R28, 145 TCG_REG_R29, 146 TCG_REG_R30, 147 TCG_REG_R31 148 }; 149 150 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target) 151 { 152 tcg_target_long disp; 153 154 disp = target - (tcg_target_long) pc; 155 if ((disp << 38) >> 38 != disp) 156 tcg_abort (); 157 158 return disp & 0x3fffffc; 159 } 160 161 static void reloc_pc24 (void *pc, tcg_target_long target) 162 { 163 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc) 164 | reloc_pc24_val (pc, target); 165 } 166 167 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target) 168 { 169 tcg_target_long disp; 170 171 disp = target - (tcg_target_long) pc; 172 if (disp != (int16_t) disp) 173 tcg_abort (); 174 175 return disp & 0xfffc; 176 } 177 178 static void reloc_pc14 (void *pc, tcg_target_long target) 179 { 180 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc) 181 | reloc_pc14_val (pc, target); 182 } 183 184 static void patch_reloc (uint8_t *code_ptr, int type, 185 tcg_target_long value, tcg_target_long addend) 186 { 187 value += addend; 188 switch (type) { 189 case R_PPC_REL14: 190 reloc_pc14 (code_ptr, value); 191 break; 192 case R_PPC_REL24: 193 reloc_pc24 (code_ptr, value); 194 break; 195 default: 196 tcg_abort (); 197 } 198 } 199 200 /* maximum number of register used for input function arguments */ 201 static int tcg_target_get_call_iarg_regs_count (int flags) 202 { 203 return ARRAY_SIZE (tcg_target_call_iarg_regs); 204 } 205 206 /* parse target specific constraints */ 207 static int target_parse_constraint (TCGArgConstraint *ct, const char **pct_str) 208 { 209 const char *ct_str; 210 211 ct_str = *pct_str; 212 switch (ct_str[0]) { 213 case 'A': case 'B': case 'C': case 'D': 214 ct->ct |= TCG_CT_REG; 215 tcg_regset_set_reg (ct->u.regs, 3 + ct_str[0] - 'A'); 216 break; 217 case 'r': 218 ct->ct |= TCG_CT_REG; 219 tcg_regset_set32 (ct->u.regs, 0, 0xffffffff); 220 break; 221 case 'L': /* qemu_ld constraint */ 222 ct->ct |= TCG_CT_REG; 223 tcg_regset_set32 (ct->u.regs, 0, 0xffffffff); 224 tcg_regset_reset_reg (ct->u.regs, TCG_REG_R3); 225 #ifdef CONFIG_SOFTMMU 226 tcg_regset_reset_reg (ct->u.regs, TCG_REG_R4); 227 #endif 228 break; 229 case 'S': /* qemu_st constraint */ 230 ct->ct |= TCG_CT_REG; 231 tcg_regset_set32 (ct->u.regs, 0, 0xffffffff); 232 tcg_regset_reset_reg (ct->u.regs, TCG_REG_R3); 233 #ifdef CONFIG_SOFTMMU 234 tcg_regset_reset_reg (ct->u.regs, TCG_REG_R4); 235 tcg_regset_reset_reg (ct->u.regs, TCG_REG_R5); 236 #endif 237 break; 238 case 'Z': 239 ct->ct |= TCG_CT_CONST_U32; 240 break; 241 default: 242 return -1; 243 } 244 ct_str++; 245 *pct_str = ct_str; 246 return 0; 247 } 248 249 /* test if a constant matches the constraint */ 250 static int tcg_target_const_match (tcg_target_long val, 251 const TCGArgConstraint *arg_ct) 252 { 253 int ct; 254 255 ct = arg_ct->ct; 256 if (ct & TCG_CT_CONST) 257 return 1; 258 else if ((ct & TCG_CT_CONST_U32) && (val == (uint32_t) val)) 259 return 1; 260 return 0; 261 } 262 263 #define OPCD(opc) ((opc)<<26) 264 #define XO19(opc) (OPCD(19)|((opc)<<1)) 265 #define XO30(opc) (OPCD(30)|((opc)<<2)) 266 #define XO31(opc) (OPCD(31)|((opc)<<1)) 267 #define XO58(opc) (OPCD(58)|(opc)) 268 #define XO62(opc) (OPCD(62)|(opc)) 269 270 #define B OPCD( 18) 271 #define BC OPCD( 16) 272 #define LBZ OPCD( 34) 273 #define LHZ OPCD( 40) 274 #define LHA OPCD( 42) 275 #define LWZ OPCD( 32) 276 #define STB OPCD( 38) 277 #define STH OPCD( 44) 278 #define STW OPCD( 36) 279 280 #define STD XO62( 0) 281 #define STDU XO62( 1) 282 #define STDX XO31(149) 283 284 #define LD XO58( 0) 285 #define LDX XO31( 21) 286 #define LDU XO58( 1) 287 #define LWA XO58( 2) 288 #define LWAX XO31(341) 289 290 #define ADDI OPCD( 14) 291 #define ADDIS OPCD( 15) 292 #define ORI OPCD( 24) 293 #define ORIS OPCD( 25) 294 #define XORI OPCD( 26) 295 #define XORIS OPCD( 27) 296 #define ANDI OPCD( 28) 297 #define ANDIS OPCD( 29) 298 #define MULLI OPCD( 7) 299 #define CMPLI OPCD( 10) 300 #define CMPI OPCD( 11) 301 302 #define LWZU OPCD( 33) 303 #define STWU OPCD( 37) 304 305 #define RLWINM OPCD( 21) 306 307 #define RLDICL XO30( 0) 308 #define RLDICR XO30( 1) 309 #define RLDIMI XO30( 3) 310 311 #define BCLR XO19( 16) 312 #define BCCTR XO19(528) 313 #define CRAND XO19(257) 314 #define CRANDC XO19(129) 315 #define CRNAND XO19(225) 316 #define CROR XO19(449) 317 318 #define EXTSB XO31(954) 319 #define EXTSH XO31(922) 320 #define EXTSW XO31(986) 321 #define ADD XO31(266) 322 #define ADDE XO31(138) 323 #define ADDC XO31( 10) 324 #define AND XO31( 28) 325 #define SUBF XO31( 40) 326 #define SUBFC XO31( 8) 327 #define SUBFE XO31(136) 328 #define OR XO31(444) 329 #define XOR XO31(316) 330 #define MULLW XO31(235) 331 #define MULHWU XO31( 11) 332 #define DIVW XO31(491) 333 #define DIVWU XO31(459) 334 #define CMP XO31( 0) 335 #define CMPL XO31( 32) 336 #define LHBRX XO31(790) 337 #define LWBRX XO31(534) 338 #define STHBRX XO31(918) 339 #define STWBRX XO31(662) 340 #define MFSPR XO31(339) 341 #define MTSPR XO31(467) 342 #define SRAWI XO31(824) 343 #define NEG XO31(104) 344 345 #define MULLD XO31(233) 346 #define MULHD XO31( 73) 347 #define MULHDU XO31( 9) 348 #define DIVD XO31(489) 349 #define DIVDU XO31(457) 350 351 #define LBZX XO31( 87) 352 #define LHZX XO31(276) 353 #define LHAX XO31(343) 354 #define LWZX XO31( 23) 355 #define STBX XO31(215) 356 #define STHX XO31(407) 357 #define STWX XO31(151) 358 359 #define SPR(a,b) ((((a)<<5)|(b))<<11) 360 #define LR SPR(8, 0) 361 #define CTR SPR(9, 0) 362 363 #define SLW XO31( 24) 364 #define SRW XO31(536) 365 #define SRAW XO31(792) 366 367 #define SLD XO31( 27) 368 #define SRD XO31(539) 369 #define SRAD XO31(794) 370 #define SRADI XO31(413<<1) 371 372 #define TW XO31( 4) 373 #define TRAP (TW | TO (31)) 374 375 #define RT(r) ((r)<<21) 376 #define RS(r) ((r)<<21) 377 #define RA(r) ((r)<<16) 378 #define RB(r) ((r)<<11) 379 #define TO(t) ((t)<<21) 380 #define SH(s) ((s)<<11) 381 #define MB(b) ((b)<<6) 382 #define ME(e) ((e)<<1) 383 #define BO(o) ((o)<<21) 384 #define MB64(b) ((b)<<5) 385 386 #define LK 1 387 388 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b)) 389 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b)) 390 391 #define BF(n) ((n)<<23) 392 #define BI(n, c) (((c)+((n)*4))<<16) 393 #define BT(n, c) (((c)+((n)*4))<<21) 394 #define BA(n, c) (((c)+((n)*4))<<16) 395 #define BB(n, c) (((c)+((n)*4))<<11) 396 397 #define BO_COND_TRUE BO (12) 398 #define BO_COND_FALSE BO ( 4) 399 #define BO_ALWAYS BO (20) 400 401 enum { 402 CR_LT, 403 CR_GT, 404 CR_EQ, 405 CR_SO 406 }; 407 408 static const uint32_t tcg_to_bc[10] = { 409 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE, 410 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE, 411 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE, 412 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE, 413 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE, 414 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE, 415 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE, 416 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE, 417 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE, 418 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE, 419 }; 420 421 static void tcg_out_mov (TCGContext *s, int ret, int arg) 422 { 423 tcg_out32 (s, OR | SAB (arg, ret, arg)); 424 } 425 426 static void tcg_out_rld (TCGContext *s, int op, int ra, int rs, int sh, int mb) 427 { 428 sh = SH (sh & 0x1f) | (((sh >> 5) & 1) << 1); 429 mb = MB64 ((mb >> 5) | ((mb << 1) & 0x3f)); 430 tcg_out32 (s, op | RA (ra) | RS (rs) | sh | mb); 431 } 432 433 static void tcg_out_movi32 (TCGContext *s, int ret, int32_t arg) 434 { 435 if (arg == (int16_t) arg) 436 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff)); 437 else { 438 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff)); 439 if (arg & 0xffff) 440 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff)); 441 } 442 } 443 444 static void tcg_out_movi (TCGContext *s, TCGType type, 445 int ret, tcg_target_long arg) 446 { 447 int32_t arg32 = arg; 448 449 if (type == TCG_TYPE_I32 || arg == arg32) { 450 tcg_out_movi32 (s, ret, arg32); 451 } 452 else { 453 if ((uint64_t) arg >> 32) { 454 uint16_t h16 = arg >> 16; 455 uint16_t l16 = arg; 456 457 tcg_out_movi32 (s, ret, arg >> 32); 458 tcg_out_rld (s, RLDICR, ret, ret, 32, 31); 459 if (h16) tcg_out32 (s, ORIS | RS (ret) | RA (ret) | h16); 460 if (l16) tcg_out32 (s, ORI | RS (ret) | RA (ret) | l16); 461 } 462 else { 463 tcg_out_movi32 (s, ret, arg32); 464 if (arg32 < 0) 465 tcg_out_rld (s, RLDICL, ret, ret, 0, 32); 466 } 467 } 468 } 469 470 static void tcg_out_call (TCGContext *s, tcg_target_long arg, int const_arg) 471 { 472 int reg; 473 474 if (const_arg) { 475 reg = 2; 476 tcg_out_movi (s, TCG_TYPE_I64, reg, arg); 477 } 478 else reg = arg; 479 480 tcg_out32 (s, LD | RT (0) | RA (reg)); 481 tcg_out32 (s, MTSPR | RA (0) | CTR); 482 tcg_out32 (s, LD | RT (11) | RA (reg) | 16); 483 tcg_out32 (s, LD | RT (2) | RA (reg) | 8); 484 tcg_out32 (s, BCCTR | BO_ALWAYS | LK); 485 } 486 487 static void tcg_out_ldst (TCGContext *s, int ret, int addr, 488 int offset, int op1, int op2) 489 { 490 if (offset == (int16_t) offset) 491 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff)); 492 else { 493 tcg_out_movi (s, TCG_TYPE_I64, 0, offset); 494 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0)); 495 } 496 } 497 498 static void tcg_out_ldsta (TCGContext *s, int ret, int addr, 499 int offset, int op1, int op2) 500 { 501 if (offset == (int16_t) (offset & ~3)) 502 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff)); 503 else { 504 tcg_out_movi (s, TCG_TYPE_I64, 0, offset); 505 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0)); 506 } 507 } 508 509 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target) 510 { 511 tcg_target_long disp; 512 513 disp = target - (tcg_target_long) s->code_ptr; 514 if ((disp << 38) >> 38 == disp) 515 tcg_out32 (s, B | (disp & 0x3fffffc) | mask); 516 else { 517 tcg_out_movi (s, TCG_TYPE_I64, 0, (tcg_target_long) target); 518 tcg_out32 (s, MTSPR | RS (0) | CTR); 519 tcg_out32 (s, BCCTR | BO_ALWAYS | mask); 520 } 521 } 522 523 #if defined (CONFIG_SOFTMMU) 524 525 #include "../../softmmu_defs.h" 526 527 static void *qemu_ld_helpers[4] = { 528 __ldb_mmu, 529 __ldw_mmu, 530 __ldl_mmu, 531 __ldq_mmu, 532 }; 533 534 static void *qemu_st_helpers[4] = { 535 __stb_mmu, 536 __stw_mmu, 537 __stl_mmu, 538 __stq_mmu, 539 }; 540 541 static void tcg_out_tlb_read (TCGContext *s, int r0, int r1, int r2, 542 int addr_reg, int s_bits, int offset) 543 { 544 #if TARGET_LONG_BITS == 32 545 tcg_out_rld (s, RLDICL, addr_reg, addr_reg, 0, 32); 546 547 tcg_out32 (s, (RLWINM 548 | RA (r0) 549 | RS (addr_reg) 550 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS)) 551 | MB (32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS)) 552 | ME (31 - CPU_TLB_ENTRY_BITS) 553 ) 554 ); 555 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0)); 556 tcg_out32 (s, (LWZU | RT (r1) | RA (r0) | offset)); 557 tcg_out32 (s, (RLWINM 558 | RA (r2) 559 | RS (addr_reg) 560 | SH (0) 561 | MB ((32 - s_bits) & 31) 562 | ME (31 - TARGET_PAGE_BITS) 563 ) 564 ); 565 #else 566 tcg_out_rld (s, RLDICL, r0, addr_reg, 567 64 - TARGET_PAGE_BITS, 568 64 - CPU_TLB_BITS); 569 tcg_out_rld (s, RLDICR, r0, r0, 570 CPU_TLB_ENTRY_BITS, 571 63 - CPU_TLB_ENTRY_BITS); 572 573 tcg_out32 (s, ADD | TAB (r0, r0, TCG_AREG0)); 574 tcg_out32 (s, LD_ADDR | RT (r1) | RA (r0) | offset); 575 576 if (!s_bits) { 577 tcg_out_rld (s, RLDICR, r2, addr_reg, 0, 63 - TARGET_PAGE_BITS); 578 } 579 else { 580 tcg_out_rld (s, RLDICL, r2, addr_reg, 581 64 - TARGET_PAGE_BITS, 582 TARGET_PAGE_BITS - s_bits); 583 tcg_out_rld (s, RLDICL, r2, r2, TARGET_PAGE_BITS, 0); 584 } 585 #endif 586 } 587 #endif 588 589 static void tcg_out_qemu_ld (TCGContext *s, const TCGArg *args, int opc) 590 { 591 int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap; 592 #ifdef CONFIG_SOFTMMU 593 int r2; 594 void *label1_ptr, *label2_ptr; 595 #endif 596 597 data_reg = *args++; 598 addr_reg = *args++; 599 mem_index = *args; 600 s_bits = opc & 3; 601 602 #ifdef CONFIG_SOFTMMU 603 r0 = 3; 604 r1 = 4; 605 r2 = 0; 606 607 tcg_out_tlb_read (s, r0, r1, r2, addr_reg, s_bits, 608 offsetof (CPUState, tlb_table[mem_index][0].addr_read)); 609 610 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1) | CMP_L); 611 612 label1_ptr = s->code_ptr; 613 #ifdef FAST_PATH 614 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE); 615 #endif 616 617 /* slow path */ 618 tcg_out_mov (s, 3, addr_reg); 619 tcg_out_movi (s, TCG_TYPE_I64, 4, mem_index); 620 621 tcg_out_call (s, (tcg_target_long) qemu_ld_helpers[s_bits], 1); 622 623 switch (opc) { 624 case 0|4: 625 tcg_out32 (s, EXTSB | RA (data_reg) | RS (3)); 626 break; 627 case 1|4: 628 tcg_out32 (s, EXTSH | RA (data_reg) | RS (3)); 629 break; 630 case 2|4: 631 tcg_out32 (s, EXTSW | RA (data_reg) | RS (3)); 632 break; 633 case 0: 634 case 1: 635 case 2: 636 case 3: 637 if (data_reg != 3) 638 tcg_out_mov (s, data_reg, 3); 639 break; 640 } 641 label2_ptr = s->code_ptr; 642 tcg_out32 (s, B); 643 644 /* label1: fast path */ 645 #ifdef FAST_PATH 646 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr); 647 #endif 648 649 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */ 650 tcg_out32 (s, (LD_ADDEND 651 | RT (r0) 652 | RA (r0) 653 | (offsetof (CPUTLBEntry, addend) 654 - offsetof (CPUTLBEntry, addr_read)) 655 )); 656 /* r0 = env->tlb_table[mem_index][index].addend */ 657 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg)); 658 /* r0 = env->tlb_table[mem_index][index].addend + addr */ 659 660 #else /* !CONFIG_SOFTMMU */ 661 #if TARGET_LONG_BITS == 32 662 tcg_out_rld (s, RLDICL, addr_reg, addr_reg, 0, 32); 663 #endif 664 r0 = addr_reg; 665 r1 = 3; 666 #endif 667 668 #ifdef TARGET_WORDS_BIGENDIAN 669 bswap = 0; 670 #else 671 bswap = 1; 672 #endif 673 switch (opc) { 674 default: 675 case 0: 676 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0)); 677 break; 678 case 0|4: 679 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0)); 680 tcg_out32 (s, EXTSB | RA (data_reg) | RS (data_reg)); 681 break; 682 case 1: 683 if (bswap) tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0)); 684 else tcg_out32 (s, LHZ | RT (data_reg) | RA (r0)); 685 break; 686 case 1|4: 687 if (bswap) { 688 tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0)); 689 tcg_out32 (s, EXTSH | RA (data_reg) | RS (data_reg)); 690 } 691 else tcg_out32 (s, LHA | RT (data_reg) | RA (r0)); 692 break; 693 case 2: 694 if (bswap) tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0)); 695 else tcg_out32 (s, LWZ | RT (data_reg)| RA (r0)); 696 break; 697 case 2|4: 698 if (bswap) { 699 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0)); 700 tcg_out32 (s, EXTSW | RA (data_reg) | RS (data_reg)); 701 } 702 else tcg_out32 (s, LWA | RT (data_reg)| RA (r0)); 703 break; 704 case 3: 705 if (bswap) { 706 tcg_out_movi32 (s, 0, 4); 707 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0)); 708 tcg_out32 (s, LWBRX | RT ( r1) | RA (r0)); 709 tcg_out_rld (s, RLDIMI, data_reg, r1, 32, 0); 710 } 711 else tcg_out32 (s, LD | RT (data_reg) | RA (r0)); 712 break; 713 } 714 715 #ifdef CONFIG_SOFTMMU 716 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr); 717 #endif 718 } 719 720 static void tcg_out_qemu_st (TCGContext *s, const TCGArg *args, int opc) 721 { 722 int addr_reg, r0, r1, data_reg, mem_index, bswap; 723 #ifdef CONFIG_SOFTMMU 724 int r2; 725 void *label1_ptr, *label2_ptr; 726 #endif 727 728 data_reg = *args++; 729 addr_reg = *args++; 730 mem_index = *args; 731 732 #ifdef CONFIG_SOFTMMU 733 r0 = 3; 734 r1 = 4; 735 r2 = 0; 736 737 tcg_out_tlb_read (s, r0, r1, r2, addr_reg, opc, 738 offsetof (CPUState, tlb_table[mem_index][0].addr_write)); 739 740 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1) | CMP_L); 741 742 label1_ptr = s->code_ptr; 743 #ifdef FAST_PATH 744 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE); 745 #endif 746 747 /* slow path */ 748 tcg_out_mov (s, 3, addr_reg); 749 tcg_out_rld (s, RLDICL, 4, data_reg, 0, 64 - (1 << (3 + opc))); 750 tcg_out_movi (s, TCG_TYPE_I64, 5, mem_index); 751 752 tcg_out_call (s, (tcg_target_long) qemu_st_helpers[opc], 1); 753 754 label2_ptr = s->code_ptr; 755 tcg_out32 (s, B); 756 757 /* label1: fast path */ 758 #ifdef FAST_PATH 759 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr); 760 #endif 761 762 tcg_out32 (s, (LD_ADDEND 763 | RT (r0) 764 | RA (r0) 765 | (offsetof (CPUTLBEntry, addend) 766 - offsetof (CPUTLBEntry, addr_write)) 767 )); 768 /* r0 = env->tlb_table[mem_index][index].addend */ 769 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg)); 770 /* r0 = env->tlb_table[mem_index][index].addend + addr */ 771 772 #else /* !CONFIG_SOFTMMU */ 773 #if TARGET_LONG_BITS == 32 774 tcg_out_rld (s, RLDICL, addr_reg, addr_reg, 0, 32); 775 #endif 776 r1 = 3; 777 r0 = addr_reg; 778 #endif 779 780 #ifdef TARGET_WORDS_BIGENDIAN 781 bswap = 0; 782 #else 783 bswap = 1; 784 #endif 785 switch (opc) { 786 case 0: 787 tcg_out32 (s, STB | RS (data_reg) | RA (r0)); 788 break; 789 case 1: 790 if (bswap) tcg_out32 (s, STHBRX | RS (data_reg) | RA (0) | RB (r0)); 791 else tcg_out32 (s, STH | RS (data_reg) | RA (r0)); 792 break; 793 case 2: 794 if (bswap) tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0)); 795 else tcg_out32 (s, STW | RS (data_reg) | RA (r0)); 796 break; 797 case 3: 798 if (bswap) { 799 tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0)); 800 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4); 801 tcg_out_rld (s, RLDICL, 0, data_reg, 32, 0); 802 tcg_out32 (s, STWBRX | RS (0) | RA (0) | RB (r1)); 803 } 804 else tcg_out32 (s, STD | RS (data_reg) | RA (r0)); 805 break; 806 } 807 808 #ifdef CONFIG_SOFTMMU 809 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr); 810 #endif 811 } 812 813 void tcg_target_qemu_prologue (TCGContext *s) 814 { 815 int i, frame_size; 816 uint64_t addr; 817 818 frame_size = 0 819 + 8 /* back chain */ 820 + 8 /* CR */ 821 + 8 /* LR */ 822 + 8 /* compiler doubleword */ 823 + 8 /* link editor doubleword */ 824 + 8 /* TOC save area */ 825 + TCG_STATIC_CALL_ARGS_SIZE 826 + ARRAY_SIZE (tcg_target_callee_save_regs) * 8 827 ; 828 frame_size = (frame_size + 15) & ~15; 829 830 /* First emit adhoc function descriptor */ 831 addr = (uint64_t) s->code_ptr + 24; 832 tcg_out32 (s, addr >> 32); tcg_out32 (s, addr); /* entry point */ 833 s->code_ptr += 16; /* skip TOC and environment pointer */ 834 835 /* Prologue */ 836 tcg_out32 (s, MFSPR | RT (0) | LR); 837 tcg_out32 (s, STDU | RS (1) | RA (1) | (-frame_size & 0xffff)); 838 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i) 839 tcg_out32 (s, (STD 840 | RS (tcg_target_callee_save_regs[i]) 841 | RA (1) 842 | (i * 8 + 48 + TCG_STATIC_CALL_ARGS_SIZE) 843 ) 844 ); 845 tcg_out32 (s, STD | RS (0) | RA (1) | (frame_size + 16)); 846 847 tcg_out32 (s, MTSPR | RS (3) | CTR); 848 tcg_out32 (s, BCCTR | BO_ALWAYS); 849 850 /* Epilogue */ 851 tb_ret_addr = s->code_ptr; 852 853 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i) 854 tcg_out32 (s, (LD 855 | RT (tcg_target_callee_save_regs[i]) 856 | RA (1) 857 | (i * 8 + 48 + TCG_STATIC_CALL_ARGS_SIZE) 858 ) 859 ); 860 tcg_out32 (s, LD | RT (0) | RA (1) | (frame_size + 16)); 861 tcg_out32 (s, MTSPR | RS (0) | LR); 862 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size); 863 tcg_out32 (s, BCLR | BO_ALWAYS); 864 } 865 866 static void tcg_out_ld (TCGContext *s, TCGType type, int ret, int arg1, 867 tcg_target_long arg2) 868 { 869 if (type == TCG_TYPE_I32) 870 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX); 871 else 872 tcg_out_ldsta (s, ret, arg1, arg2, LD, LDX); 873 } 874 875 static void tcg_out_st (TCGContext *s, TCGType type, int arg, int arg1, 876 tcg_target_long arg2) 877 { 878 if (type == TCG_TYPE_I32) 879 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX); 880 else 881 tcg_out_ldsta (s, arg, arg1, arg2, STD, STDX); 882 } 883 884 static void ppc_addi32 (TCGContext *s, int rt, int ra, tcg_target_long si) 885 { 886 if (!si && rt == ra) 887 return; 888 889 if (si == (int16_t) si) 890 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff)); 891 else { 892 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15); 893 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h); 894 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff)); 895 } 896 } 897 898 static void ppc_addi64 (TCGContext *s, int rt, int ra, tcg_target_long si) 899 { 900 /* XXX: suboptimal */ 901 if (si == (int16_t) si 902 || ((((uint64_t) si >> 31) == 0) && (si & 0x8000) == 0)) 903 ppc_addi32 (s, rt, ra, si); 904 else { 905 tcg_out_movi (s, TCG_TYPE_I64, 0, si); 906 tcg_out32 (s, ADD | RT (rt) | RA (ra)); 907 } 908 } 909 910 static void tcg_out_addi (TCGContext *s, int reg, tcg_target_long val) 911 { 912 ppc_addi64 (s, reg, reg, val); 913 } 914 915 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2, 916 int const_arg2, int cr, int arch64) 917 { 918 int imm; 919 uint32_t op; 920 921 switch (cond) { 922 case TCG_COND_EQ: 923 case TCG_COND_NE: 924 if (const_arg2) { 925 if ((int16_t) arg2 == arg2) { 926 op = CMPI; 927 imm = 1; 928 break; 929 } 930 else if ((uint16_t) arg2 == arg2) { 931 op = CMPLI; 932 imm = 1; 933 break; 934 } 935 } 936 op = CMPL; 937 imm = 0; 938 break; 939 940 case TCG_COND_LT: 941 case TCG_COND_GE: 942 case TCG_COND_LE: 943 case TCG_COND_GT: 944 if (const_arg2) { 945 if ((int16_t) arg2 == arg2) { 946 op = CMPI; 947 imm = 1; 948 break; 949 } 950 } 951 op = CMP; 952 imm = 0; 953 break; 954 955 case TCG_COND_LTU: 956 case TCG_COND_GEU: 957 case TCG_COND_LEU: 958 case TCG_COND_GTU: 959 if (const_arg2) { 960 if ((uint16_t) arg2 == arg2) { 961 op = CMPLI; 962 imm = 1; 963 break; 964 } 965 } 966 op = CMPL; 967 imm = 0; 968 break; 969 970 default: 971 tcg_abort (); 972 } 973 op |= BF (cr) | (arch64 << 21); 974 975 if (imm) 976 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff)); 977 else { 978 if (const_arg2) { 979 tcg_out_movi (s, TCG_TYPE_I64, 0, arg2); 980 tcg_out32 (s, op | RA (arg1) | RB (0)); 981 } 982 else 983 tcg_out32 (s, op | RA (arg1) | RB (arg2)); 984 } 985 986 } 987 988 static void tcg_out_bc (TCGContext *s, int bc, int label_index) 989 { 990 TCGLabel *l = &s->labels[label_index]; 991 992 if (l->has_value) 993 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value)); 994 else { 995 uint16_t val = *(uint16_t *) &s->code_ptr[2]; 996 997 /* Thanks to Andrzej Zaborowski */ 998 tcg_out32 (s, bc | (val & 0xfffc)); 999 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0); 1000 } 1001 } 1002 1003 static void tcg_out_brcond (TCGContext *s, int cond, 1004 TCGArg arg1, TCGArg arg2, int const_arg2, 1005 int label_index, int arch64) 1006 { 1007 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7, arch64); 1008 tcg_out_bc (s, tcg_to_bc[cond], label_index); 1009 } 1010 1011 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr) 1012 { 1013 TCGContext s; 1014 unsigned long patch_size; 1015 1016 s.code_ptr = (uint8_t *) jmp_addr; 1017 tcg_out_b (&s, 0, addr); 1018 patch_size = s.code_ptr - (uint8_t *) jmp_addr; 1019 flush_icache_range (jmp_addr, jmp_addr + patch_size); 1020 } 1021 1022 static void tcg_out_op (TCGContext *s, int opc, const TCGArg *args, 1023 const int *const_args) 1024 { 1025 int c; 1026 1027 switch (opc) { 1028 case INDEX_op_exit_tb: 1029 tcg_out_movi (s, TCG_TYPE_I64, TCG_REG_R3, args[0]); 1030 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr); 1031 break; 1032 case INDEX_op_goto_tb: 1033 if (s->tb_jmp_offset) { 1034 /* direct jump method */ 1035 1036 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf; 1037 s->code_ptr += 28; 1038 } 1039 else { 1040 tcg_abort (); 1041 } 1042 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf; 1043 break; 1044 case INDEX_op_br: 1045 { 1046 TCGLabel *l = &s->labels[args[0]]; 1047 1048 if (l->has_value) { 1049 tcg_out_b (s, 0, l->u.value); 1050 } 1051 else { 1052 uint32_t val = *(uint32_t *) s->code_ptr; 1053 1054 /* Thanks to Andrzej Zaborowski */ 1055 tcg_out32 (s, B | (val & 0x3fffffc)); 1056 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0); 1057 } 1058 } 1059 break; 1060 case INDEX_op_call: 1061 tcg_out_call (s, args[0], const_args[0]); 1062 break; 1063 case INDEX_op_jmp: 1064 if (const_args[0]) { 1065 tcg_out_b (s, 0, args[0]); 1066 } 1067 else { 1068 tcg_out32 (s, MTSPR | RS (args[0]) | CTR); 1069 tcg_out32 (s, BCCTR | BO_ALWAYS); 1070 } 1071 break; 1072 case INDEX_op_movi_i32: 1073 tcg_out_movi (s, TCG_TYPE_I32, args[0], args[1]); 1074 break; 1075 case INDEX_op_movi_i64: 1076 tcg_out_movi (s, TCG_TYPE_I64, args[0], args[1]); 1077 break; 1078 case INDEX_op_ld8u_i32: 1079 case INDEX_op_ld8u_i64: 1080 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX); 1081 break; 1082 case INDEX_op_ld8s_i32: 1083 case INDEX_op_ld8s_i64: 1084 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX); 1085 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0])); 1086 break; 1087 case INDEX_op_ld16u_i32: 1088 case INDEX_op_ld16u_i64: 1089 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX); 1090 break; 1091 case INDEX_op_ld16s_i32: 1092 case INDEX_op_ld16s_i64: 1093 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX); 1094 break; 1095 case INDEX_op_ld_i32: 1096 case INDEX_op_ld32u_i64: 1097 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX); 1098 break; 1099 case INDEX_op_ld32s_i64: 1100 tcg_out_ldsta (s, args[0], args[1], args[2], LWA, LWAX); 1101 break; 1102 case INDEX_op_ld_i64: 1103 tcg_out_ldsta (s, args[0], args[1], args[2], LD, LDX); 1104 break; 1105 case INDEX_op_st8_i32: 1106 case INDEX_op_st8_i64: 1107 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX); 1108 break; 1109 case INDEX_op_st16_i32: 1110 case INDEX_op_st16_i64: 1111 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX); 1112 break; 1113 case INDEX_op_st_i32: 1114 case INDEX_op_st32_i64: 1115 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX); 1116 break; 1117 case INDEX_op_st_i64: 1118 tcg_out_ldsta (s, args[0], args[1], args[2], STD, STDX); 1119 break; 1120 1121 case INDEX_op_add_i32: 1122 if (const_args[2]) 1123 ppc_addi32 (s, args[0], args[1], args[2]); 1124 else 1125 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2])); 1126 break; 1127 case INDEX_op_sub_i32: 1128 if (const_args[2]) 1129 ppc_addi32 (s, args[0], args[1], -args[2]); 1130 else 1131 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1])); 1132 break; 1133 1134 case INDEX_op_and_i64: 1135 case INDEX_op_and_i32: 1136 if (const_args[2]) { 1137 if ((args[2] & 0xffff) == args[2]) 1138 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | args[2]); 1139 else if ((args[2] & 0xffff0000) == args[2]) 1140 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0]) 1141 | ((args[2] >> 16) & 0xffff)); 1142 else { 1143 tcg_out_movi (s, (opc == INDEX_op_and_i32 1144 ? TCG_TYPE_I32 1145 : TCG_TYPE_I64), 1146 0, args[2]); 1147 tcg_out32 (s, AND | SAB (args[1], args[0], 0)); 1148 } 1149 } 1150 else 1151 tcg_out32 (s, AND | SAB (args[1], args[0], args[2])); 1152 break; 1153 case INDEX_op_or_i64: 1154 case INDEX_op_or_i32: 1155 if (const_args[2]) { 1156 if (args[2] & 0xffff) { 1157 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0]) 1158 | (args[2] & 0xffff)); 1159 if (args[2] >> 16) 1160 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0]) 1161 | ((args[2] >> 16) & 0xffff)); 1162 } 1163 else { 1164 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0]) 1165 | ((args[2] >> 16) & 0xffff)); 1166 } 1167 } 1168 else 1169 tcg_out32 (s, OR | SAB (args[1], args[0], args[2])); 1170 break; 1171 case INDEX_op_xor_i64: 1172 case INDEX_op_xor_i32: 1173 if (const_args[2]) { 1174 if ((args[2] & 0xffff) == args[2]) 1175 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0]) 1176 | (args[2] & 0xffff)); 1177 else if ((args[2] & 0xffff0000) == args[2]) 1178 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0]) 1179 | ((args[2] >> 16) & 0xffff)); 1180 else { 1181 tcg_out_movi (s, (opc == INDEX_op_and_i32 1182 ? TCG_TYPE_I32 1183 : TCG_TYPE_I64), 1184 0, args[2]); 1185 tcg_out32 (s, XOR | SAB (args[1], args[0], 0)); 1186 } 1187 } 1188 else 1189 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2])); 1190 break; 1191 1192 case INDEX_op_mul_i32: 1193 if (const_args[2]) { 1194 if (args[2] == (int16_t) args[2]) 1195 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1]) 1196 | (args[2] & 0xffff)); 1197 else { 1198 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]); 1199 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0)); 1200 } 1201 } 1202 else 1203 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2])); 1204 break; 1205 1206 case INDEX_op_div_i32: 1207 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2])); 1208 break; 1209 1210 case INDEX_op_divu_i32: 1211 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2])); 1212 break; 1213 1214 case INDEX_op_rem_i32: 1215 tcg_out32 (s, DIVW | TAB (0, args[1], args[2])); 1216 tcg_out32 (s, MULLW | TAB (0, 0, args[2])); 1217 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1])); 1218 break; 1219 1220 case INDEX_op_remu_i32: 1221 tcg_out32 (s, DIVWU | TAB (0, args[1], args[2])); 1222 tcg_out32 (s, MULLW | TAB (0, 0, args[2])); 1223 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1])); 1224 break; 1225 1226 case INDEX_op_shl_i32: 1227 if (const_args[2]) { 1228 tcg_out32 (s, (RLWINM 1229 | RA (args[0]) 1230 | RS (args[1]) 1231 | SH (args[2]) 1232 | MB (0) 1233 | ME (31 - args[2]) 1234 ) 1235 ); 1236 } 1237 else 1238 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2])); 1239 break; 1240 case INDEX_op_shr_i32: 1241 if (const_args[2]) { 1242 tcg_out32 (s, (RLWINM 1243 | RA (args[0]) 1244 | RS (args[1]) 1245 | SH (32 - args[2]) 1246 | MB (args[2]) 1247 | ME (31) 1248 ) 1249 ); 1250 } 1251 else 1252 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2])); 1253 break; 1254 case INDEX_op_sar_i32: 1255 if (const_args[2]) 1256 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2])); 1257 else 1258 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2])); 1259 break; 1260 1261 case INDEX_op_brcond_i32: 1262 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3], 0); 1263 break; 1264 1265 case INDEX_op_brcond_i64: 1266 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3], 1); 1267 break; 1268 1269 case INDEX_op_neg_i32: 1270 case INDEX_op_neg_i64: 1271 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1])); 1272 break; 1273 1274 case INDEX_op_add_i64: 1275 if (const_args[2]) 1276 ppc_addi64 (s, args[0], args[1], args[2]); 1277 else 1278 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2])); 1279 break; 1280 case INDEX_op_sub_i64: 1281 if (const_args[2]) 1282 ppc_addi64 (s, args[0], args[1], -args[2]); 1283 else 1284 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1])); 1285 break; 1286 1287 case INDEX_op_shl_i64: 1288 if (const_args[2]) 1289 tcg_out_rld (s, RLDICR, args[0], args[1], args[2], 63 - args[2]); 1290 else 1291 tcg_out32 (s, SLD | SAB (args[1], args[0], args[2])); 1292 break; 1293 case INDEX_op_shr_i64: 1294 if (const_args[2]) 1295 tcg_out_rld (s, RLDICL, args[0], args[1], 64 - args[2], args[2]); 1296 else 1297 tcg_out32 (s, SRD | SAB (args[1], args[0], args[2])); 1298 break; 1299 case INDEX_op_sar_i64: 1300 if (const_args[2]) { 1301 int sh = SH (args[2] & 0x1f) | (((args[2] >> 5) & 1) << 1); 1302 tcg_out32 (s, SRADI | RA (args[0]) | RS (args[1]) | sh); 1303 } 1304 else 1305 tcg_out32 (s, SRAD | SAB (args[1], args[0], args[2])); 1306 break; 1307 1308 case INDEX_op_mul_i64: 1309 tcg_out32 (s, MULLD | TAB (args[0], args[1], args[2])); 1310 break; 1311 case INDEX_op_div_i64: 1312 tcg_out32 (s, DIVD | TAB (args[0], args[1], args[2])); 1313 break; 1314 case INDEX_op_divu_i64: 1315 tcg_out32 (s, DIVDU | TAB (args[0], args[1], args[2])); 1316 break; 1317 case INDEX_op_rem_i64: 1318 tcg_out32 (s, DIVD | TAB (0, args[1], args[2])); 1319 tcg_out32 (s, MULLD | TAB (0, 0, args[2])); 1320 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1])); 1321 break; 1322 case INDEX_op_remu_i64: 1323 tcg_out32 (s, DIVDU | TAB (0, args[1], args[2])); 1324 tcg_out32 (s, MULLD | TAB (0, 0, args[2])); 1325 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1])); 1326 break; 1327 1328 case INDEX_op_qemu_ld8u: 1329 tcg_out_qemu_ld (s, args, 0); 1330 break; 1331 case INDEX_op_qemu_ld8s: 1332 tcg_out_qemu_ld (s, args, 0 | 4); 1333 break; 1334 case INDEX_op_qemu_ld16u: 1335 tcg_out_qemu_ld (s, args, 1); 1336 break; 1337 case INDEX_op_qemu_ld16s: 1338 tcg_out_qemu_ld (s, args, 1 | 4); 1339 break; 1340 case INDEX_op_qemu_ld32u: 1341 tcg_out_qemu_ld (s, args, 2); 1342 break; 1343 case INDEX_op_qemu_ld32s: 1344 tcg_out_qemu_ld (s, args, 2 | 4); 1345 break; 1346 case INDEX_op_qemu_ld64: 1347 tcg_out_qemu_ld (s, args, 3); 1348 break; 1349 case INDEX_op_qemu_st8: 1350 tcg_out_qemu_st (s, args, 0); 1351 break; 1352 case INDEX_op_qemu_st16: 1353 tcg_out_qemu_st (s, args, 1); 1354 break; 1355 case INDEX_op_qemu_st32: 1356 tcg_out_qemu_st (s, args, 2); 1357 break; 1358 case INDEX_op_qemu_st64: 1359 tcg_out_qemu_st (s, args, 3); 1360 break; 1361 1362 case INDEX_op_ext8s_i32: 1363 case INDEX_op_ext8s_i64: 1364 c = EXTSB; 1365 goto gen_ext; 1366 case INDEX_op_ext16s_i32: 1367 case INDEX_op_ext16s_i64: 1368 c = EXTSH; 1369 goto gen_ext; 1370 case INDEX_op_ext32s_i64: 1371 c = EXTSW; 1372 goto gen_ext; 1373 gen_ext: 1374 tcg_out32 (s, c | RS (args[1]) | RA (args[0])); 1375 break; 1376 1377 default: 1378 tcg_dump_ops (s, stderr); 1379 tcg_abort (); 1380 } 1381 } 1382 1383 static const TCGTargetOpDef ppc_op_defs[] = { 1384 { INDEX_op_exit_tb, { } }, 1385 { INDEX_op_goto_tb, { } }, 1386 { INDEX_op_call, { "ri" } }, 1387 { INDEX_op_jmp, { "ri" } }, 1388 { INDEX_op_br, { } }, 1389 1390 { INDEX_op_mov_i32, { "r", "r" } }, 1391 { INDEX_op_mov_i64, { "r", "r" } }, 1392 { INDEX_op_movi_i32, { "r" } }, 1393 { INDEX_op_movi_i64, { "r" } }, 1394 1395 { INDEX_op_ld8u_i32, { "r", "r" } }, 1396 { INDEX_op_ld8s_i32, { "r", "r" } }, 1397 { INDEX_op_ld16u_i32, { "r", "r" } }, 1398 { INDEX_op_ld16s_i32, { "r", "r" } }, 1399 { INDEX_op_ld_i32, { "r", "r" } }, 1400 { INDEX_op_ld_i64, { "r", "r" } }, 1401 { INDEX_op_st8_i32, { "r", "r" } }, 1402 { INDEX_op_st8_i64, { "r", "r" } }, 1403 { INDEX_op_st16_i32, { "r", "r" } }, 1404 { INDEX_op_st16_i64, { "r", "r" } }, 1405 { INDEX_op_st_i32, { "r", "r" } }, 1406 { INDEX_op_st_i64, { "r", "r" } }, 1407 { INDEX_op_st32_i64, { "r", "r" } }, 1408 1409 { INDEX_op_ld8u_i64, { "r", "r" } }, 1410 { INDEX_op_ld8s_i64, { "r", "r" } }, 1411 { INDEX_op_ld16u_i64, { "r", "r" } }, 1412 { INDEX_op_ld16s_i64, { "r", "r" } }, 1413 { INDEX_op_ld32u_i64, { "r", "r" } }, 1414 { INDEX_op_ld32s_i64, { "r", "r" } }, 1415 { INDEX_op_ld_i64, { "r", "r" } }, 1416 1417 { INDEX_op_add_i32, { "r", "r", "ri" } }, 1418 { INDEX_op_mul_i32, { "r", "r", "ri" } }, 1419 { INDEX_op_div_i32, { "r", "r", "r" } }, 1420 { INDEX_op_divu_i32, { "r", "r", "r" } }, 1421 { INDEX_op_rem_i32, { "r", "r", "r" } }, 1422 { INDEX_op_remu_i32, { "r", "r", "r" } }, 1423 { INDEX_op_sub_i32, { "r", "r", "ri" } }, 1424 { INDEX_op_and_i32, { "r", "r", "ri" } }, 1425 { INDEX_op_or_i32, { "r", "r", "ri" } }, 1426 { INDEX_op_xor_i32, { "r", "r", "ri" } }, 1427 1428 { INDEX_op_shl_i32, { "r", "r", "ri" } }, 1429 { INDEX_op_shr_i32, { "r", "r", "ri" } }, 1430 { INDEX_op_sar_i32, { "r", "r", "ri" } }, 1431 1432 { INDEX_op_brcond_i32, { "r", "ri" } }, 1433 { INDEX_op_brcond_i64, { "r", "ri" } }, 1434 1435 { INDEX_op_neg_i32, { "r", "r" } }, 1436 1437 { INDEX_op_add_i64, { "r", "r", "ri" } }, 1438 { INDEX_op_sub_i64, { "r", "r", "ri" } }, 1439 { INDEX_op_and_i64, { "r", "r", "rZ" } }, 1440 { INDEX_op_or_i64, { "r", "r", "rZ" } }, 1441 { INDEX_op_xor_i64, { "r", "r", "rZ" } }, 1442 1443 { INDEX_op_shl_i64, { "r", "r", "ri" } }, 1444 { INDEX_op_shr_i64, { "r", "r", "ri" } }, 1445 { INDEX_op_sar_i64, { "r", "r", "ri" } }, 1446 1447 { INDEX_op_mul_i64, { "r", "r", "r" } }, 1448 { INDEX_op_div_i64, { "r", "r", "r" } }, 1449 { INDEX_op_divu_i64, { "r", "r", "r" } }, 1450 { INDEX_op_rem_i64, { "r", "r", "r" } }, 1451 { INDEX_op_remu_i64, { "r", "r", "r" } }, 1452 1453 { INDEX_op_neg_i64, { "r", "r" } }, 1454 1455 { INDEX_op_qemu_ld8u, { "r", "L" } }, 1456 { INDEX_op_qemu_ld8s, { "r", "L" } }, 1457 { INDEX_op_qemu_ld16u, { "r", "L" } }, 1458 { INDEX_op_qemu_ld16s, { "r", "L" } }, 1459 { INDEX_op_qemu_ld32u, { "r", "L" } }, 1460 { INDEX_op_qemu_ld32s, { "r", "L" } }, 1461 { INDEX_op_qemu_ld64, { "r", "L" } }, 1462 1463 { INDEX_op_qemu_st8, { "S", "S" } }, 1464 { INDEX_op_qemu_st16, { "S", "S" } }, 1465 { INDEX_op_qemu_st32, { "S", "S" } }, 1466 { INDEX_op_qemu_st64, { "S", "S", "S" } }, 1467 1468 { INDEX_op_ext8s_i32, { "r", "r" } }, 1469 { INDEX_op_ext16s_i32, { "r", "r" } }, 1470 { INDEX_op_ext8s_i64, { "r", "r" } }, 1471 { INDEX_op_ext16s_i64, { "r", "r" } }, 1472 { INDEX_op_ext32s_i64, { "r", "r" } }, 1473 1474 { -1 }, 1475 }; 1476 1477 void tcg_target_init (TCGContext *s) 1478 { 1479 tcg_regset_set32 (tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff); 1480 tcg_regset_set32 (tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff); 1481 tcg_regset_set32 (tcg_target_call_clobber_regs, 0, 1482 (1 << TCG_REG_R0) | 1483 (1 << TCG_REG_R3) | 1484 (1 << TCG_REG_R4) | 1485 (1 << TCG_REG_R5) | 1486 (1 << TCG_REG_R6) | 1487 (1 << TCG_REG_R7) | 1488 (1 << TCG_REG_R8) | 1489 (1 << TCG_REG_R9) | 1490 (1 << TCG_REG_R10) | 1491 (1 << TCG_REG_R11) | 1492 (1 << TCG_REG_R12) 1493 ); 1494 1495 tcg_regset_clear (s->reserved_regs); 1496 tcg_regset_set_reg (s->reserved_regs, TCG_REG_R0); 1497 tcg_regset_set_reg (s->reserved_regs, TCG_REG_R1); 1498 tcg_regset_set_reg (s->reserved_regs, TCG_REG_R2); 1499 tcg_regset_set_reg (s->reserved_regs, TCG_REG_R13); 1500 1501 tcg_add_target_add_op_defs (ppc_op_defs); 1502 } 1503