1 /* 2 * Stack-less Just-In-Time compiler 3 * 4 * Copyright 2009-2012 Zoltan Herczeg (hzmester (at) freemail.hu). All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without modification, are 7 * permitted provided that the following conditions are met: 8 * 9 * 1. Redistributions of source code must retain the above copyright notice, this list of 10 * conditions and the following disclaimer. 11 * 12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list 13 * of conditions and the following disclaimer in the documentation and/or other materials 14 * provided with the distribution. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY 17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT 19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 */ 26 27 /* Latest MIPS architecture. */ 28 /* Automatically detect SLJIT_MIPS_R1 */ 29 30 SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void) 31 { 32 #if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1) 33 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 34 return "MIPS32-R1" SLJIT_CPUINFO; 35 #else 36 return "MIPS64-R1" SLJIT_CPUINFO; 37 #endif 38 #else /* SLJIT_MIPS_R1 */ 39 return "MIPS III" SLJIT_CPUINFO; 40 #endif 41 } 42 43 /* Length of an instruction word 44 Both for mips-32 and mips-64 */ 45 typedef sljit_ui sljit_ins; 46 47 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2) 48 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3) 49 #define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4) 50 51 /* For position independent code, t9 must contain the function address. */ 52 #define PIC_ADDR_REG TMP_REG2 53 54 /* Floating point status register. */ 55 #define FCSR_REG 31 56 /* Return address register. */ 57 #define RETURN_ADDR_REG 31 58 59 /* Flags are kept in volatile registers. */ 60 #define EQUAL_FLAG 12 61 /* And carry flag as well. */ 62 #define ULESS_FLAG 13 63 #define UGREATER_FLAG 14 64 #define LESS_FLAG 15 65 #define GREATER_FLAG 31 66 #define OVERFLOW_FLAG 1 67 68 #define TMP_FREG1 (0) 69 #define TMP_FREG2 ((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) << 1) 70 71 static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = { 72 0, 2, 5, 6, 7, 8, 9, 10, 11, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 3, 25, 4 73 }; 74 75 /* --------------------------------------------------------------------- */ 76 /* Instrucion forms */ 77 /* --------------------------------------------------------------------- */ 78 79 #define S(s) (reg_map[s] << 21) 80 #define T(t) (reg_map[t] << 16) 81 #define D(d) (reg_map[d] << 11) 82 /* Absolute registers. */ 83 #define SA(s) ((s) << 21) 84 #define TA(t) ((t) << 16) 85 #define DA(d) ((d) << 11) 86 #define FT(t) ((t) << 16) 87 #define FS(s) ((s) << 11) 88 #define FD(d) ((d) << 6) 89 #define IMM(imm) ((imm) & 0xffff) 90 #define SH_IMM(imm) ((imm) << 6) 91 92 #define DR(dr) (reg_map[dr]) 93 #define HI(opcode) ((opcode) << 26) 94 #define LO(opcode) (opcode) 95 /* S = (16 << 21) D = (17 << 21) */ 96 #define FMT_S (16 << 21) 97 98 #define ABS_S (HI(17) | FMT_S | LO(5)) 99 #define ADD_S (HI(17) | FMT_S | LO(0)) 100 #define ADDIU (HI(9)) 101 #define ADDU (HI(0) | LO(33)) 102 #define AND (HI(0) | LO(36)) 103 #define ANDI (HI(12)) 104 #define B (HI(4)) 105 #define BAL (HI(1) | (17 << 16)) 106 #define BC1F (HI(17) | (8 << 21)) 107 #define BC1T (HI(17) | (8 << 21) | (1 << 16)) 108 #define BEQ (HI(4)) 109 #define BGEZ (HI(1) | (1 << 16)) 110 #define BGTZ (HI(7)) 111 #define BLEZ (HI(6)) 112 #define BLTZ (HI(1) | (0 << 16)) 113 #define BNE (HI(5)) 114 #define BREAK (HI(0) | LO(13)) 115 #define CFC1 (HI(17) | (2 << 21)) 116 #define C_UN_S (HI(17) | FMT_S | LO(49)) 117 #define C_UEQ_S (HI(17) | FMT_S | LO(51)) 118 #define C_ULE_S (HI(17) | FMT_S | LO(55)) 119 #define C_ULT_S (HI(17) | FMT_S | LO(53)) 120 #define CVT_S_S (HI(17) | FMT_S | LO(32)) 121 #define DADDIU (HI(25)) 122 #define DADDU (HI(0) | LO(45)) 123 #define DDIV (HI(0) | LO(30)) 124 #define DDIVU (HI(0) | LO(31)) 125 #define DIV (HI(0) | LO(26)) 126 #define DIVU (HI(0) | LO(27)) 127 #define DIV_S (HI(17) | FMT_S | LO(3)) 128 #define DMULT (HI(0) | LO(28)) 129 #define DMULTU (HI(0) | LO(29)) 130 #define DSLL (HI(0) | LO(56)) 131 #define DSLL32 (HI(0) | LO(60)) 132 #define DSLLV (HI(0) | LO(20)) 133 #define DSRA (HI(0) | LO(59)) 134 #define DSRA32 (HI(0) | LO(63)) 135 #define DSRAV (HI(0) | LO(23)) 136 #define DSRL (HI(0) | LO(58)) 137 #define DSRL32 (HI(0) | LO(62)) 138 #define DSRLV (HI(0) | LO(22)) 139 #define DSUBU (HI(0) | LO(47)) 140 #define J (HI(2)) 141 #define JAL (HI(3)) 142 #define JALR (HI(0) | LO(9)) 143 #define JR (HI(0) | LO(8)) 144 #define LD (HI(55)) 145 #define LUI (HI(15)) 146 #define LW (HI(35)) 147 #define MFC1 (HI(17)) 148 #define MFHI (HI(0) | LO(16)) 149 #define MFLO (HI(0) | LO(18)) 150 #define MOV_S (HI(17) | FMT_S | LO(6)) 151 #define MTC1 (HI(17) | (4 << 21)) 152 #define MUL_S (HI(17) | FMT_S | LO(2)) 153 #define MULT (HI(0) | LO(24)) 154 #define MULTU (HI(0) | LO(25)) 155 #define NEG_S (HI(17) | FMT_S | LO(7)) 156 #define NOP (HI(0) | LO(0)) 157 #define NOR (HI(0) | LO(39)) 158 #define OR (HI(0) | LO(37)) 159 #define ORI (HI(13)) 160 #define SD (HI(63)) 161 #define SLT (HI(0) | LO(42)) 162 #define SLTI (HI(10)) 163 #define SLTIU (HI(11)) 164 #define SLTU (HI(0) | LO(43)) 165 #define SLL (HI(0) | LO(0)) 166 #define SLLV (HI(0) | LO(4)) 167 #define SRL (HI(0) | LO(2)) 168 #define SRLV (HI(0) | LO(6)) 169 #define SRA (HI(0) | LO(3)) 170 #define SRAV (HI(0) | LO(7)) 171 #define SUB_S (HI(17) | FMT_S | LO(1)) 172 #define SUBU (HI(0) | LO(35)) 173 #define SW (HI(43)) 174 #define TRUNC_W_S (HI(17) | FMT_S | LO(13)) 175 #define XOR (HI(0) | LO(38)) 176 #define XORI (HI(14)) 177 178 #if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1) 179 #define CLZ (HI(28) | LO(32)) 180 #define DCLZ (HI(28) | LO(36)) 181 #define MUL (HI(28) | LO(2)) 182 #define SEB (HI(31) | (16 << 6) | LO(32)) 183 #define SEH (HI(31) | (24 << 6) | LO(32)) 184 #endif 185 186 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 187 #define ADDU_W ADDU 188 #define ADDIU_W ADDIU 189 #define SLL_W SLL 190 #define SUBU_W SUBU 191 #else 192 #define ADDU_W DADDU 193 #define ADDIU_W DADDIU 194 #define SLL_W DSLL 195 #define SUBU_W DSUBU 196 #endif 197 198 #define SIMM_MAX (0x7fff) 199 #define SIMM_MIN (-0x8000) 200 #define UIMM_MAX (0xffff) 201 202 /* dest_reg is the absolute name of the register 203 Useful for reordering instructions in the delay slot. */ 204 static sljit_si push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_si delay_slot) 205 { 206 SLJIT_ASSERT(delay_slot == MOVABLE_INS || delay_slot >= UNMOVABLE_INS 207 || delay_slot == ((ins >> 11) & 0x1f) || delay_slot == ((ins >> 16) & 0x1f)); 208 sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins)); 209 FAIL_IF(!ptr); 210 *ptr = ins; 211 compiler->size++; 212 compiler->delay_slot = delay_slot; 213 return SLJIT_SUCCESS; 214 } 215 216 static SLJIT_INLINE sljit_ins invert_branch(sljit_si flags) 217 { 218 return (flags & IS_BIT26_COND) ? (1 << 26) : (1 << 16); 219 } 220 221 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 222 { 223 sljit_sw diff; 224 sljit_uw target_addr; 225 sljit_ins *inst; 226 sljit_ins saved_inst; 227 228 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 229 if (jump->flags & (SLJIT_REWRITABLE_JUMP | IS_CALL)) 230 return code_ptr; 231 #else 232 if (jump->flags & SLJIT_REWRITABLE_JUMP) 233 return code_ptr; 234 #endif 235 236 if (jump->flags & JUMP_ADDR) 237 target_addr = jump->u.target; 238 else { 239 SLJIT_ASSERT(jump->flags & JUMP_LABEL); 240 target_addr = (sljit_uw)(code + jump->u.label->size); 241 } 242 inst = (sljit_ins*)jump->addr; 243 if (jump->flags & IS_COND) 244 inst--; 245 246 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 247 if (jump->flags & IS_CALL) 248 goto keep_address; 249 #endif 250 251 /* B instructions. */ 252 if (jump->flags & IS_MOVABLE) { 253 diff = ((sljit_sw)target_addr - (sljit_sw)(inst)) >> 2; 254 if (diff <= SIMM_MAX && diff >= SIMM_MIN) { 255 jump->flags |= PATCH_B; 256 257 if (!(jump->flags & IS_COND)) { 258 inst[0] = inst[-1]; 259 inst[-1] = (jump->flags & IS_JAL) ? BAL : B; 260 jump->addr -= sizeof(sljit_ins); 261 return inst; 262 } 263 saved_inst = inst[0]; 264 inst[0] = inst[-1]; 265 inst[-1] = saved_inst ^ invert_branch(jump->flags); 266 jump->addr -= 2 * sizeof(sljit_ins); 267 return inst; 268 } 269 } 270 else { 271 diff = ((sljit_sw)target_addr - (sljit_sw)(inst + 1)) >> 2; 272 if (diff <= SIMM_MAX && diff >= SIMM_MIN) { 273 jump->flags |= PATCH_B; 274 275 if (!(jump->flags & IS_COND)) { 276 inst[0] = (jump->flags & IS_JAL) ? BAL : B; 277 inst[1] = NOP; 278 return inst + 1; 279 } 280 inst[0] = inst[0] ^ invert_branch(jump->flags); 281 inst[1] = NOP; 282 jump->addr -= sizeof(sljit_ins); 283 return inst + 1; 284 } 285 } 286 287 if (jump->flags & IS_COND) { 288 if ((jump->flags & IS_MOVABLE) && (target_addr & ~0xfffffff) == ((jump->addr + 2 * sizeof(sljit_ins)) & ~0xfffffff)) { 289 jump->flags |= PATCH_J; 290 saved_inst = inst[0]; 291 inst[0] = inst[-1]; 292 inst[-1] = (saved_inst & 0xffff0000) | 3; 293 inst[1] = J; 294 inst[2] = NOP; 295 return inst + 2; 296 } 297 else if ((target_addr & ~0xfffffff) == ((jump->addr + 3 * sizeof(sljit_ins)) & ~0xfffffff)) { 298 jump->flags |= PATCH_J; 299 inst[0] = (inst[0] & 0xffff0000) | 3; 300 inst[1] = NOP; 301 inst[2] = J; 302 inst[3] = NOP; 303 jump->addr += sizeof(sljit_ins); 304 return inst + 3; 305 } 306 } 307 else { 308 /* J instuctions. */ 309 if ((jump->flags & IS_MOVABLE) && (target_addr & ~0xfffffff) == (jump->addr & ~0xfffffff)) { 310 jump->flags |= PATCH_J; 311 inst[0] = inst[-1]; 312 inst[-1] = (jump->flags & IS_JAL) ? JAL : J; 313 jump->addr -= sizeof(sljit_ins); 314 return inst; 315 } 316 317 if ((target_addr & ~0xfffffff) == ((jump->addr + sizeof(sljit_ins)) & ~0xfffffff)) { 318 jump->flags |= PATCH_J; 319 inst[0] = (jump->flags & IS_JAL) ? JAL : J; 320 inst[1] = NOP; 321 return inst + 1; 322 } 323 } 324 325 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 326 keep_address: 327 if (target_addr <= 0x7fffffff) { 328 jump->flags |= PATCH_ABS32; 329 if (jump->flags & IS_COND) { 330 inst[0] -= 4; 331 inst++; 332 } 333 inst[2] = inst[6]; 334 inst[3] = inst[7]; 335 return inst + 3; 336 } 337 if (target_addr <= 0x7fffffffffffl) { 338 jump->flags |= PATCH_ABS48; 339 if (jump->flags & IS_COND) { 340 inst[0] -= 2; 341 inst++; 342 } 343 inst[4] = inst[6]; 344 inst[5] = inst[7]; 345 return inst + 5; 346 } 347 #endif 348 349 return code_ptr; 350 } 351 352 #ifdef __GNUC__ 353 static __attribute__ ((noinline)) void sljit_cache_flush(void* code, void* code_ptr) 354 { 355 SLJIT_CACHE_FLUSH(code, code_ptr); 356 } 357 #endif 358 359 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler) 360 { 361 struct sljit_memory_fragment *buf; 362 sljit_ins *code; 363 sljit_ins *code_ptr; 364 sljit_ins *buf_ptr; 365 sljit_ins *buf_end; 366 sljit_uw word_count; 367 sljit_uw addr; 368 369 struct sljit_label *label; 370 struct sljit_jump *jump; 371 struct sljit_const *const_; 372 373 CHECK_ERROR_PTR(); 374 CHECK_PTR(check_sljit_generate_code(compiler)); 375 reverse_buf(compiler); 376 377 code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins)); 378 PTR_FAIL_WITH_EXEC_IF(code); 379 buf = compiler->buf; 380 381 code_ptr = code; 382 word_count = 0; 383 label = compiler->labels; 384 jump = compiler->jumps; 385 const_ = compiler->consts; 386 do { 387 buf_ptr = (sljit_ins*)buf->memory; 388 buf_end = buf_ptr + (buf->used_size >> 2); 389 do { 390 *code_ptr = *buf_ptr++; 391 SLJIT_ASSERT(!label || label->size >= word_count); 392 SLJIT_ASSERT(!jump || jump->addr >= word_count); 393 SLJIT_ASSERT(!const_ || const_->addr >= word_count); 394 /* These structures are ordered by their address. */ 395 if (label && label->size == word_count) { 396 /* Just recording the address. */ 397 label->addr = (sljit_uw)code_ptr; 398 label->size = code_ptr - code; 399 label = label->next; 400 } 401 if (jump && jump->addr == word_count) { 402 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 403 jump->addr = (sljit_uw)(code_ptr - 3); 404 #else 405 jump->addr = (sljit_uw)(code_ptr - 7); 406 #endif 407 code_ptr = detect_jump_type(jump, code_ptr, code); 408 jump = jump->next; 409 } 410 if (const_ && const_->addr == word_count) { 411 /* Just recording the address. */ 412 const_->addr = (sljit_uw)code_ptr; 413 const_ = const_->next; 414 } 415 code_ptr ++; 416 word_count ++; 417 } while (buf_ptr < buf_end); 418 419 buf = buf->next; 420 } while (buf); 421 422 if (label && label->size == word_count) { 423 label->addr = (sljit_uw)code_ptr; 424 label->size = code_ptr - code; 425 label = label->next; 426 } 427 428 SLJIT_ASSERT(!label); 429 SLJIT_ASSERT(!jump); 430 SLJIT_ASSERT(!const_); 431 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size); 432 433 jump = compiler->jumps; 434 while (jump) { 435 do { 436 addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target; 437 buf_ptr = (sljit_ins*)jump->addr; 438 439 if (jump->flags & PATCH_B) { 440 addr = (sljit_sw)(addr - (jump->addr + sizeof(sljit_ins))) >> 2; 441 SLJIT_ASSERT((sljit_sw)addr <= SIMM_MAX && (sljit_sw)addr >= SIMM_MIN); 442 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | (addr & 0xffff); 443 break; 444 } 445 if (jump->flags & PATCH_J) { 446 SLJIT_ASSERT((addr & ~0xfffffff) == ((jump->addr + sizeof(sljit_ins)) & ~0xfffffff)); 447 buf_ptr[0] |= (addr >> 2) & 0x03ffffff; 448 break; 449 } 450 451 /* Set the fields of immediate loads. */ 452 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 453 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 16) & 0xffff); 454 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | (addr & 0xffff); 455 #else 456 if (jump->flags & PATCH_ABS32) { 457 SLJIT_ASSERT(addr <= 0x7fffffff); 458 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 16) & 0xffff); 459 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | (addr & 0xffff); 460 } 461 else if (jump->flags & PATCH_ABS48) { 462 SLJIT_ASSERT(addr <= 0x7fffffffffffl); 463 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 32) & 0xffff); 464 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | ((addr >> 16) & 0xffff); 465 buf_ptr[3] = (buf_ptr[3] & 0xffff0000) | (addr & 0xffff); 466 } 467 else { 468 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 48) & 0xffff); 469 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | ((addr >> 32) & 0xffff); 470 buf_ptr[3] = (buf_ptr[3] & 0xffff0000) | ((addr >> 16) & 0xffff); 471 buf_ptr[5] = (buf_ptr[5] & 0xffff0000) | (addr & 0xffff); 472 } 473 #endif 474 } while (0); 475 jump = jump->next; 476 } 477 478 compiler->error = SLJIT_ERR_COMPILED; 479 compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins); 480 #ifndef __GNUC__ 481 SLJIT_CACHE_FLUSH(code, code_ptr); 482 #else 483 /* GCC workaround for invalid code generation with -O2. */ 484 sljit_cache_flush(code, code_ptr); 485 #endif 486 return code; 487 } 488 489 /* --------------------------------------------------------------------- */ 490 /* Entry, exit */ 491 /* --------------------------------------------------------------------- */ 492 493 /* Creates an index in data_transfer_insts array. */ 494 #define LOAD_DATA 0x01 495 #define WORD_DATA 0x00 496 #define BYTE_DATA 0x02 497 #define HALF_DATA 0x04 498 #define INT_DATA 0x06 499 #define SIGNED_DATA 0x08 500 /* Separates integer and floating point registers */ 501 #define GPR_REG 0x0f 502 #define DOUBLE_DATA 0x10 503 #define SINGLE_DATA 0x12 504 505 #define MEM_MASK 0x1f 506 507 #define WRITE_BACK 0x00020 508 #define ARG_TEST 0x00040 509 #define ALT_KEEP_CACHE 0x00080 510 #define CUMULATIVE_OP 0x00100 511 #define LOGICAL_OP 0x00200 512 #define IMM_OP 0x00400 513 #define SRC2_IMM 0x00800 514 515 #define UNUSED_DEST 0x01000 516 #define REG_DEST 0x02000 517 #define REG1_SOURCE 0x04000 518 #define REG2_SOURCE 0x08000 519 #define SLOW_SRC1 0x10000 520 #define SLOW_SRC2 0x20000 521 #define SLOW_DEST 0x40000 522 523 /* Only these flags are set. UNUSED_DEST is not set when no flags should be set. */ 524 #define CHECK_FLAGS(list) \ 525 (!(flags & UNUSED_DEST) || (op & GET_FLAGS(~(list)))) 526 527 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 528 #define STACK_STORE SW 529 #define STACK_LOAD LW 530 #else 531 #define STACK_STORE SD 532 #define STACK_LOAD LD 533 #endif 534 535 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 536 #include "sljitNativeMIPS_32.c" 537 #else 538 #include "sljitNativeMIPS_64.c" 539 #endif 540 541 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, 542 sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds, 543 sljit_si fscratches, sljit_si fsaveds, sljit_si local_size) 544 { 545 sljit_ins base; 546 sljit_si i, tmp, offs; 547 548 CHECK_ERROR(); 549 CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); 550 set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); 551 552 local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1) + SLJIT_LOCALS_OFFSET; 553 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 554 local_size = (local_size + 15) & ~0xf; 555 #else 556 local_size = (local_size + 31) & ~0x1f; 557 #endif 558 compiler->local_size = local_size; 559 560 if (local_size <= SIMM_MAX) { 561 /* Frequent case. */ 562 FAIL_IF(push_inst(compiler, ADDIU_W | S(SLJIT_SP) | T(SLJIT_SP) | IMM(-local_size), DR(SLJIT_SP))); 563 base = S(SLJIT_SP); 564 } 565 else { 566 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), local_size)); 567 FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | TA(0) | D(TMP_REG2), DR(TMP_REG2))); 568 FAIL_IF(push_inst(compiler, SUBU_W | S(SLJIT_SP) | T(TMP_REG1) | D(SLJIT_SP), DR(SLJIT_SP))); 569 base = S(TMP_REG2); 570 local_size = 0; 571 } 572 573 offs = local_size - (sljit_sw)(sizeof(sljit_sw)); 574 FAIL_IF(push_inst(compiler, STACK_STORE | base | TA(RETURN_ADDR_REG) | IMM(offs), MOVABLE_INS)); 575 576 tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG; 577 for (i = SLJIT_S0; i >= tmp; i--) { 578 offs -= (sljit_si)(sizeof(sljit_sw)); 579 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(i) | IMM(offs), MOVABLE_INS)); 580 } 581 582 for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) { 583 offs -= (sljit_si)(sizeof(sljit_sw)); 584 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(i) | IMM(offs), MOVABLE_INS)); 585 } 586 587 if (args >= 1) 588 FAIL_IF(push_inst(compiler, ADDU_W | SA(4) | TA(0) | D(SLJIT_S0), DR(SLJIT_S0))); 589 if (args >= 2) 590 FAIL_IF(push_inst(compiler, ADDU_W | SA(5) | TA(0) | D(SLJIT_S1), DR(SLJIT_S1))); 591 if (args >= 3) 592 FAIL_IF(push_inst(compiler, ADDU_W | SA(6) | TA(0) | D(SLJIT_S2), DR(SLJIT_S2))); 593 594 return SLJIT_SUCCESS; 595 } 596 597 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler, 598 sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds, 599 sljit_si fscratches, sljit_si fsaveds, sljit_si local_size) 600 { 601 CHECK_ERROR(); 602 CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); 603 set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); 604 605 local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1) + SLJIT_LOCALS_OFFSET; 606 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 607 compiler->local_size = (local_size + 15) & ~0xf; 608 #else 609 compiler->local_size = (local_size + 31) & ~0x1f; 610 #endif 611 return SLJIT_SUCCESS; 612 } 613 614 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw) 615 { 616 sljit_si local_size, i, tmp, offs; 617 sljit_ins base; 618 619 CHECK_ERROR(); 620 CHECK(check_sljit_emit_return(compiler, op, src, srcw)); 621 622 FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); 623 624 local_size = compiler->local_size; 625 if (local_size <= SIMM_MAX) 626 base = S(SLJIT_SP); 627 else { 628 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), local_size)); 629 FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_SP) | T(TMP_REG1) | D(TMP_REG1), DR(TMP_REG1))); 630 base = S(TMP_REG1); 631 local_size = 0; 632 } 633 634 FAIL_IF(push_inst(compiler, STACK_LOAD | base | TA(RETURN_ADDR_REG) | IMM(local_size - (sljit_si)sizeof(sljit_sw)), RETURN_ADDR_REG)); 635 offs = local_size - (sljit_si)GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 1); 636 637 tmp = compiler->scratches; 638 for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) { 639 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(i) | IMM(offs), DR(i))); 640 offs += (sljit_si)(sizeof(sljit_sw)); 641 } 642 643 tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG; 644 for (i = tmp; i <= SLJIT_S0; i++) { 645 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(i) | IMM(offs), DR(i))); 646 offs += (sljit_si)(sizeof(sljit_sw)); 647 } 648 649 SLJIT_ASSERT(offs == local_size - (sljit_sw)(sizeof(sljit_sw))); 650 651 FAIL_IF(push_inst(compiler, JR | SA(RETURN_ADDR_REG), UNMOVABLE_INS)); 652 if (compiler->local_size <= SIMM_MAX) 653 return push_inst(compiler, ADDIU_W | S(SLJIT_SP) | T(SLJIT_SP) | IMM(compiler->local_size), UNMOVABLE_INS); 654 else 655 return push_inst(compiler, ADDU_W | S(TMP_REG1) | TA(0) | D(SLJIT_SP), UNMOVABLE_INS); 656 } 657 658 #undef STACK_STORE 659 #undef STACK_LOAD 660 661 /* --------------------------------------------------------------------- */ 662 /* Operators */ 663 /* --------------------------------------------------------------------- */ 664 665 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 666 #define ARCH_32_64(a, b) a 667 #else 668 #define ARCH_32_64(a, b) b 669 #endif 670 671 static SLJIT_CONST sljit_ins data_transfer_insts[16 + 4] = { 672 /* u w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */), 673 /* u w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */), 674 /* u b s */ HI(40) /* sb */, 675 /* u b l */ HI(36) /* lbu */, 676 /* u h s */ HI(41) /* sh */, 677 /* u h l */ HI(37) /* lhu */, 678 /* u i s */ HI(43) /* sw */, 679 /* u i l */ ARCH_32_64(HI(35) /* lw */, HI(39) /* lwu */), 680 681 /* s w s */ ARCH_32_64(HI(43) /* sw */, HI(63) /* sd */), 682 /* s w l */ ARCH_32_64(HI(35) /* lw */, HI(55) /* ld */), 683 /* s b s */ HI(40) /* sb */, 684 /* s b l */ HI(32) /* lb */, 685 /* s h s */ HI(41) /* sh */, 686 /* s h l */ HI(33) /* lh */, 687 /* s i s */ HI(43) /* sw */, 688 /* s i l */ HI(35) /* lw */, 689 690 /* d s */ HI(61) /* sdc1 */, 691 /* d l */ HI(53) /* ldc1 */, 692 /* s s */ HI(57) /* swc1 */, 693 /* s l */ HI(49) /* lwc1 */, 694 }; 695 696 #undef ARCH_32_64 697 698 /* reg_ar is an absoulute register! */ 699 700 /* Can perform an operation using at most 1 instruction. */ 701 static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw) 702 { 703 SLJIT_ASSERT(arg & SLJIT_MEM); 704 705 if ((!(flags & WRITE_BACK) || !(arg & REG_MASK)) && !(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN) { 706 /* Works for both absoulte and relative addresses. */ 707 if (SLJIT_UNLIKELY(flags & ARG_TEST)) 708 return 1; 709 FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(arg & REG_MASK) 710 | TA(reg_ar) | IMM(argw), ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? reg_ar : MOVABLE_INS)); 711 return -1; 712 } 713 return 0; 714 } 715 716 /* See getput_arg below. 717 Note: can_cache is called only for binary operators. Those 718 operators always uses word arguments without write back. */ 719 static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw) 720 { 721 SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM)); 722 723 /* Simple operation except for updates. */ 724 if (arg & OFFS_REG_MASK) { 725 argw &= 0x3; 726 next_argw &= 0x3; 727 if (argw && argw == next_argw && (arg == next_arg || (arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK))) 728 return 1; 729 return 0; 730 } 731 732 if (arg == next_arg) { 733 if (((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN)) 734 return 1; 735 return 0; 736 } 737 738 return 0; 739 } 740 741 /* Emit the necessary instructions. See can_cache above. */ 742 static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw) 743 { 744 sljit_si tmp_ar, base, delay_slot; 745 746 SLJIT_ASSERT(arg & SLJIT_MEM); 747 if (!(next_arg & SLJIT_MEM)) { 748 next_arg = 0; 749 next_argw = 0; 750 } 751 752 if ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) { 753 tmp_ar = reg_ar; 754 delay_slot = reg_ar; 755 } else { 756 tmp_ar = DR(TMP_REG1); 757 delay_slot = MOVABLE_INS; 758 } 759 base = arg & REG_MASK; 760 761 if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) { 762 argw &= 0x3; 763 if ((flags & WRITE_BACK) && reg_ar == DR(base)) { 764 SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar); 765 FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1))); 766 reg_ar = DR(TMP_REG1); 767 } 768 769 /* Using the cache. */ 770 if (argw == compiler->cache_argw) { 771 if (!(flags & WRITE_BACK)) { 772 if (arg == compiler->cache_arg) 773 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot); 774 if ((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) { 775 if (arg == next_arg && argw == (next_argw & 0x3)) { 776 compiler->cache_arg = arg; 777 compiler->cache_argw = argw; 778 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(TMP_REG3), DR(TMP_REG3))); 779 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot); 780 } 781 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | DA(tmp_ar), tmp_ar)); 782 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot); 783 } 784 } 785 else { 786 if ((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) { 787 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base))); 788 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot); 789 } 790 } 791 } 792 793 if (SLJIT_UNLIKELY(argw)) { 794 compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK); 795 compiler->cache_argw = argw; 796 FAIL_IF(push_inst(compiler, SLL_W | T(OFFS_REG(arg)) | D(TMP_REG3) | SH_IMM(argw), DR(TMP_REG3))); 797 } 798 799 if (!(flags & WRITE_BACK)) { 800 if (arg == next_arg && argw == (next_argw & 0x3)) { 801 compiler->cache_arg = arg; 802 compiler->cache_argw = argw; 803 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? OFFS_REG(arg) : TMP_REG3) | D(TMP_REG3), DR(TMP_REG3))); 804 tmp_ar = DR(TMP_REG3); 805 } 806 else 807 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? OFFS_REG(arg) : TMP_REG3) | DA(tmp_ar), tmp_ar)); 808 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot); 809 } 810 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? OFFS_REG(arg) : TMP_REG3) | D(base), DR(base))); 811 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot); 812 } 813 814 if (SLJIT_UNLIKELY(flags & WRITE_BACK) && base) { 815 /* Update only applies if a base register exists. */ 816 if (reg_ar == DR(base)) { 817 SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar); 818 if (argw <= SIMM_MAX && argw >= SIMM_MIN) { 819 FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar) | IMM(argw), MOVABLE_INS)); 820 if (argw) 821 return push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base)); 822 return SLJIT_SUCCESS; 823 } 824 FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1))); 825 reg_ar = DR(TMP_REG1); 826 } 827 828 if (argw <= SIMM_MAX && argw >= SIMM_MIN) { 829 if (argw) 830 FAIL_IF(push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base))); 831 } 832 else { 833 if (compiler->cache_arg == SLJIT_MEM && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) { 834 if (argw != compiler->cache_argw) { 835 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3))); 836 compiler->cache_argw = argw; 837 } 838 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base))); 839 } 840 else { 841 compiler->cache_arg = SLJIT_MEM; 842 compiler->cache_argw = argw; 843 FAIL_IF(load_immediate(compiler, DR(TMP_REG3), argw)); 844 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base))); 845 } 846 } 847 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot); 848 } 849 850 if (compiler->cache_arg == arg && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) { 851 if (argw != compiler->cache_argw) { 852 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3))); 853 compiler->cache_argw = argw; 854 } 855 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot); 856 } 857 858 if (compiler->cache_arg == SLJIT_MEM && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) { 859 if (argw != compiler->cache_argw) 860 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3))); 861 } 862 else { 863 compiler->cache_arg = SLJIT_MEM; 864 FAIL_IF(load_immediate(compiler, DR(TMP_REG3), argw)); 865 } 866 compiler->cache_argw = argw; 867 868 if (!base) 869 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot); 870 871 if (arg == next_arg && next_argw - argw <= SIMM_MAX && next_argw - argw >= SIMM_MIN) { 872 compiler->cache_arg = arg; 873 FAIL_IF(push_inst(compiler, ADDU_W | S(TMP_REG3) | T(base) | D(TMP_REG3), DR(TMP_REG3))); 874 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot); 875 } 876 877 FAIL_IF(push_inst(compiler, ADDU_W | S(TMP_REG3) | T(base) | DA(tmp_ar), tmp_ar)); 878 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot); 879 } 880 881 static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg_ar, sljit_si arg, sljit_sw argw) 882 { 883 if (getput_arg_fast(compiler, flags, reg_ar, arg, argw)) 884 return compiler->error; 885 compiler->cache_arg = 0; 886 compiler->cache_argw = 0; 887 return getput_arg(compiler, flags, reg_ar, arg, argw, 0, 0); 888 } 889 890 static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w) 891 { 892 if (getput_arg_fast(compiler, flags, reg, arg1, arg1w)) 893 return compiler->error; 894 return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w); 895 } 896 897 static sljit_si emit_op(struct sljit_compiler *compiler, sljit_si op, sljit_si flags, 898 sljit_si dst, sljit_sw dstw, 899 sljit_si src1, sljit_sw src1w, 900 sljit_si src2, sljit_sw src2w) 901 { 902 /* arg1 goes to TMP_REG1 or src reg 903 arg2 goes to TMP_REG2, imm or src reg 904 TMP_REG3 can be used for caching 905 result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */ 906 sljit_si dst_r = TMP_REG2; 907 sljit_si src1_r; 908 sljit_sw src2_r = 0; 909 sljit_si sugg_src2_r = TMP_REG2; 910 911 if (!(flags & ALT_KEEP_CACHE)) { 912 compiler->cache_arg = 0; 913 compiler->cache_argw = 0; 914 } 915 916 if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { 917 if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM)) 918 return SLJIT_SUCCESS; 919 if (GET_FLAGS(op)) 920 flags |= UNUSED_DEST; 921 } 922 else if (FAST_IS_REG(dst)) { 923 dst_r = dst; 924 flags |= REG_DEST; 925 if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) 926 sugg_src2_r = dst_r; 927 } 928 else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, DR(TMP_REG1), dst, dstw)) 929 flags |= SLOW_DEST; 930 931 if (flags & IMM_OP) { 932 if ((src2 & SLJIT_IMM) && src2w) { 933 if ((!(flags & LOGICAL_OP) && (src2w <= SIMM_MAX && src2w >= SIMM_MIN)) 934 || ((flags & LOGICAL_OP) && !(src2w & ~UIMM_MAX))) { 935 flags |= SRC2_IMM; 936 src2_r = src2w; 937 } 938 } 939 if (!(flags & SRC2_IMM) && (flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w) { 940 if ((!(flags & LOGICAL_OP) && (src1w <= SIMM_MAX && src1w >= SIMM_MIN)) 941 || ((flags & LOGICAL_OP) && !(src1w & ~UIMM_MAX))) { 942 flags |= SRC2_IMM; 943 src2_r = src1w; 944 945 /* And swap arguments. */ 946 src1 = src2; 947 src1w = src2w; 948 src2 = SLJIT_IMM; 949 /* src2w = src2_r unneeded. */ 950 } 951 } 952 } 953 954 /* Source 1. */ 955 if (FAST_IS_REG(src1)) { 956 src1_r = src1; 957 flags |= REG1_SOURCE; 958 } 959 else if (src1 & SLJIT_IMM) { 960 if (src1w) { 961 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); 962 src1_r = TMP_REG1; 963 } 964 else 965 src1_r = 0; 966 } 967 else { 968 if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w)) 969 FAIL_IF(compiler->error); 970 else 971 flags |= SLOW_SRC1; 972 src1_r = TMP_REG1; 973 } 974 975 /* Source 2. */ 976 if (FAST_IS_REG(src2)) { 977 src2_r = src2; 978 flags |= REG2_SOURCE; 979 if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) 980 dst_r = src2_r; 981 } 982 else if (src2 & SLJIT_IMM) { 983 if (!(flags & SRC2_IMM)) { 984 if (src2w) { 985 FAIL_IF(load_immediate(compiler, DR(sugg_src2_r), src2w)); 986 src2_r = sugg_src2_r; 987 } 988 else { 989 src2_r = 0; 990 if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_SI) && (dst & SLJIT_MEM)) 991 dst_r = 0; 992 } 993 } 994 } 995 else { 996 if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w)) 997 FAIL_IF(compiler->error); 998 else 999 flags |= SLOW_SRC2; 1000 src2_r = sugg_src2_r; 1001 } 1002 1003 if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) { 1004 SLJIT_ASSERT(src2_r == TMP_REG2); 1005 if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) { 1006 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG2), src2, src2w, src1, src1w)); 1007 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw)); 1008 } 1009 else { 1010 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, src2, src2w)); 1011 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG2), src2, src2w, dst, dstw)); 1012 } 1013 } 1014 else if (flags & SLOW_SRC1) 1015 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw)); 1016 else if (flags & SLOW_SRC2) 1017 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w, dst, dstw)); 1018 1019 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); 1020 1021 if (dst & SLJIT_MEM) { 1022 if (!(flags & SLOW_DEST)) { 1023 getput_arg_fast(compiler, flags, DR(dst_r), dst, dstw); 1024 return compiler->error; 1025 } 1026 return getput_arg(compiler, flags, DR(dst_r), dst, dstw, 0, 0); 1027 } 1028 1029 return SLJIT_SUCCESS; 1030 } 1031 1032 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op) 1033 { 1034 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 1035 sljit_si int_op = op & SLJIT_INT_OP; 1036 #endif 1037 1038 CHECK_ERROR(); 1039 CHECK(check_sljit_emit_op0(compiler, op)); 1040 1041 op = GET_OPCODE(op); 1042 switch (op) { 1043 case SLJIT_BREAKPOINT: 1044 return push_inst(compiler, BREAK, UNMOVABLE_INS); 1045 case SLJIT_NOP: 1046 return push_inst(compiler, NOP, UNMOVABLE_INS); 1047 case SLJIT_LUMUL: 1048 case SLJIT_LSMUL: 1049 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 1050 FAIL_IF(push_inst(compiler, (op == SLJIT_LUMUL ? DMULTU : DMULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS)); 1051 #else 1052 FAIL_IF(push_inst(compiler, (op == SLJIT_LUMUL ? MULTU : MULT) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS)); 1053 #endif 1054 FAIL_IF(push_inst(compiler, MFLO | D(SLJIT_R0), DR(SLJIT_R0))); 1055 return push_inst(compiler, MFHI | D(SLJIT_R1), DR(SLJIT_R1)); 1056 case SLJIT_UDIVMOD: 1057 case SLJIT_SDIVMOD: 1058 case SLJIT_UDIVI: 1059 case SLJIT_SDIVI: 1060 SLJIT_COMPILE_ASSERT((SLJIT_UDIVMOD & 0x2) == 0 && SLJIT_UDIVI - 0x2 == SLJIT_UDIVMOD, bad_div_opcode_assignments); 1061 #if !(defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1) 1062 FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1063 FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1064 #endif 1065 1066 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 1067 if (int_op) 1068 FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS)); 1069 else 1070 FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DDIVU : DDIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS)); 1071 #else 1072 FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_UDIVI ? DIVU : DIV) | S(SLJIT_R0) | T(SLJIT_R1), MOVABLE_INS)); 1073 #endif 1074 1075 FAIL_IF(push_inst(compiler, MFLO | D(SLJIT_R0), DR(SLJIT_R0))); 1076 return (op >= SLJIT_UDIVI) ? SLJIT_SUCCESS : push_inst(compiler, MFHI | D(SLJIT_R1), DR(SLJIT_R1)); 1077 } 1078 1079 return SLJIT_SUCCESS; 1080 } 1081 1082 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op, 1083 sljit_si dst, sljit_sw dstw, 1084 sljit_si src, sljit_sw srcw) 1085 { 1086 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1087 # define flags 0 1088 #else 1089 sljit_si flags = 0; 1090 #endif 1091 1092 CHECK_ERROR(); 1093 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw)); 1094 ADJUST_LOCAL_OFFSET(dst, dstw); 1095 ADJUST_LOCAL_OFFSET(src, srcw); 1096 1097 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 1098 if ((op & SLJIT_INT_OP) && GET_OPCODE(op) >= SLJIT_NOT) { 1099 flags |= INT_DATA | SIGNED_DATA; 1100 if (src & SLJIT_IMM) 1101 srcw = (sljit_si)srcw; 1102 } 1103 #endif 1104 1105 switch (GET_OPCODE(op)) { 1106 case SLJIT_MOV: 1107 case SLJIT_MOV_P: 1108 return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 1109 1110 case SLJIT_MOV_UI: 1111 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1112 return emit_op(compiler, SLJIT_MOV_UI, INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 1113 #else 1114 return emit_op(compiler, SLJIT_MOV_UI, INT_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ui)srcw : srcw); 1115 #endif 1116 1117 case SLJIT_MOV_SI: 1118 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1119 return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 1120 #else 1121 return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_si)srcw : srcw); 1122 #endif 1123 1124 case SLJIT_MOV_UB: 1125 return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw); 1126 1127 case SLJIT_MOV_SB: 1128 return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw); 1129 1130 case SLJIT_MOV_UH: 1131 return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw); 1132 1133 case SLJIT_MOV_SH: 1134 return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw); 1135 1136 case SLJIT_MOVU: 1137 case SLJIT_MOVU_P: 1138 return emit_op(compiler, SLJIT_MOV, WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 1139 1140 case SLJIT_MOVU_UI: 1141 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1142 return emit_op(compiler, SLJIT_MOV_UI, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 1143 #else 1144 return emit_op(compiler, SLJIT_MOV_UI, INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ui)srcw : srcw); 1145 #endif 1146 1147 case SLJIT_MOVU_SI: 1148 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1149 return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 1150 #else 1151 return emit_op(compiler, SLJIT_MOV_SI, INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_si)srcw : srcw); 1152 #endif 1153 1154 case SLJIT_MOVU_UB: 1155 return emit_op(compiler, SLJIT_MOV_UB, BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_ub)srcw : srcw); 1156 1157 case SLJIT_MOVU_SB: 1158 return emit_op(compiler, SLJIT_MOV_SB, BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sb)srcw : srcw); 1159 1160 case SLJIT_MOVU_UH: 1161 return emit_op(compiler, SLJIT_MOV_UH, HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_uh)srcw : srcw); 1162 1163 case SLJIT_MOVU_SH: 1164 return emit_op(compiler, SLJIT_MOV_SH, HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_sh)srcw : srcw); 1165 1166 case SLJIT_NOT: 1167 return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw); 1168 1169 case SLJIT_NEG: 1170 return emit_op(compiler, SLJIT_SUB | GET_ALL_FLAGS(op), flags | IMM_OP, dst, dstw, SLJIT_IMM, 0, src, srcw); 1171 1172 case SLJIT_CLZ: 1173 return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw); 1174 } 1175 1176 return SLJIT_SUCCESS; 1177 1178 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1179 # undef flags 1180 #endif 1181 } 1182 1183 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op, 1184 sljit_si dst, sljit_sw dstw, 1185 sljit_si src1, sljit_sw src1w, 1186 sljit_si src2, sljit_sw src2w) 1187 { 1188 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1189 # define flags 0 1190 #else 1191 sljit_si flags = 0; 1192 #endif 1193 1194 CHECK_ERROR(); 1195 CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); 1196 ADJUST_LOCAL_OFFSET(dst, dstw); 1197 ADJUST_LOCAL_OFFSET(src1, src1w); 1198 ADJUST_LOCAL_OFFSET(src2, src2w); 1199 1200 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 1201 if (op & SLJIT_INT_OP) { 1202 flags |= INT_DATA | SIGNED_DATA; 1203 if (src1 & SLJIT_IMM) 1204 src1w = (sljit_si)src1w; 1205 if (src2 & SLJIT_IMM) 1206 src2w = (sljit_si)src2w; 1207 } 1208 #endif 1209 1210 switch (GET_OPCODE(op)) { 1211 case SLJIT_ADD: 1212 case SLJIT_ADDC: 1213 return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 1214 1215 case SLJIT_SUB: 1216 case SLJIT_SUBC: 1217 return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 1218 1219 case SLJIT_MUL: 1220 return emit_op(compiler, op, flags | CUMULATIVE_OP, dst, dstw, src1, src1w, src2, src2w); 1221 1222 case SLJIT_AND: 1223 case SLJIT_OR: 1224 case SLJIT_XOR: 1225 return emit_op(compiler, op, flags | CUMULATIVE_OP | LOGICAL_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 1226 1227 case SLJIT_SHL: 1228 case SLJIT_LSHR: 1229 case SLJIT_ASHR: 1230 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1231 if (src2 & SLJIT_IMM) 1232 src2w &= 0x1f; 1233 #else 1234 if (src2 & SLJIT_IMM) { 1235 if (op & SLJIT_INT_OP) 1236 src2w &= 0x1f; 1237 else 1238 src2w &= 0x3f; 1239 } 1240 #endif 1241 return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 1242 } 1243 1244 return SLJIT_SUCCESS; 1245 1246 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1247 # undef flags 1248 #endif 1249 } 1250 1251 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg) 1252 { 1253 CHECK_REG_INDEX(check_sljit_get_register_index(reg)); 1254 return reg_map[reg]; 1255 } 1256 1257 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg) 1258 { 1259 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg)); 1260 return reg << 1; 1261 } 1262 1263 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler, 1264 void *instruction, sljit_si size) 1265 { 1266 CHECK_ERROR(); 1267 CHECK(check_sljit_emit_op_custom(compiler, instruction, size)); 1268 1269 return push_inst(compiler, *(sljit_ins*)instruction, UNMOVABLE_INS); 1270 } 1271 1272 /* --------------------------------------------------------------------- */ 1273 /* Floating point operators */ 1274 /* --------------------------------------------------------------------- */ 1275 1276 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void) 1277 { 1278 #ifdef SLJIT_IS_FPU_AVAILABLE 1279 return SLJIT_IS_FPU_AVAILABLE; 1280 #elif defined(__GNUC__) 1281 sljit_sw fir; 1282 asm ("cfc1 %0, $0" : "=r"(fir)); 1283 return (fir >> 22) & 0x1; 1284 #else 1285 #error "FIR check is not implemented for this architecture" 1286 #endif 1287 } 1288 1289 #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_SINGLE_OP) >> 7)) 1290 #define FMT(op) (((op & SLJIT_SINGLE_OP) ^ SLJIT_SINGLE_OP) << (21 - 8)) 1291 1292 static SLJIT_INLINE sljit_si sljit_emit_fop1_convw_fromd(struct sljit_compiler *compiler, sljit_si op, 1293 sljit_si dst, sljit_sw dstw, 1294 sljit_si src, sljit_sw srcw) 1295 { 1296 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1297 # define flags 0 1298 #else 1299 sljit_si flags = (GET_OPCODE(op) == SLJIT_CONVW_FROMD) << 21; 1300 #endif 1301 1302 if (src & SLJIT_MEM) { 1303 FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw)); 1304 src = TMP_FREG1; 1305 } 1306 else 1307 src <<= 1; 1308 1309 FAIL_IF(push_inst(compiler, (TRUNC_W_S ^ (flags >> 19)) | FMT(op) | FS(src) | FD(TMP_FREG1), MOVABLE_INS)); 1310 1311 if (dst == SLJIT_UNUSED) 1312 return SLJIT_SUCCESS; 1313 1314 if (FAST_IS_REG(dst)) 1315 return push_inst(compiler, MFC1 | flags | T(dst) | FS(TMP_FREG1), MOVABLE_INS); 1316 1317 /* Store the integer value from a VFP register. */ 1318 return emit_op_mem2(compiler, flags ? DOUBLE_DATA : SINGLE_DATA, TMP_FREG1, dst, dstw, 0, 0); 1319 1320 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1321 # undef is_long 1322 #endif 1323 } 1324 1325 static SLJIT_INLINE sljit_si sljit_emit_fop1_convd_fromw(struct sljit_compiler *compiler, sljit_si op, 1326 sljit_si dst, sljit_sw dstw, 1327 sljit_si src, sljit_sw srcw) 1328 { 1329 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1330 # define flags 0 1331 #else 1332 sljit_si flags = (GET_OPCODE(op) == SLJIT_CONVD_FROMW) << 21; 1333 #endif 1334 1335 sljit_si dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1; 1336 1337 if (FAST_IS_REG(src)) 1338 FAIL_IF(push_inst(compiler, MTC1 | flags | T(src) | FS(TMP_FREG1), MOVABLE_INS)); 1339 else if (src & SLJIT_MEM) { 1340 /* Load the integer value into a VFP register. */ 1341 FAIL_IF(emit_op_mem2(compiler, ((flags) ? DOUBLE_DATA : SINGLE_DATA) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw)); 1342 } 1343 else { 1344 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1345 if (GET_OPCODE(op) == SLJIT_CONVD_FROMI) 1346 srcw = (sljit_si)srcw; 1347 #endif 1348 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw)); 1349 FAIL_IF(push_inst(compiler, MTC1 | flags | T(TMP_REG1) | FS(TMP_FREG1), MOVABLE_INS)); 1350 } 1351 1352 FAIL_IF(push_inst(compiler, CVT_S_S | flags | (4 << 21) | (((op & SLJIT_SINGLE_OP) ^ SLJIT_SINGLE_OP) >> 8) | FS(TMP_FREG1) | FD(dst_r), MOVABLE_INS)); 1353 1354 if (dst & SLJIT_MEM) 1355 return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0); 1356 return SLJIT_SUCCESS; 1357 1358 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1359 # undef flags 1360 #endif 1361 } 1362 1363 static SLJIT_INLINE sljit_si sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_si op, 1364 sljit_si src1, sljit_sw src1w, 1365 sljit_si src2, sljit_sw src2w) 1366 { 1367 if (src1 & SLJIT_MEM) { 1368 FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w)); 1369 src1 = TMP_FREG1; 1370 } 1371 else 1372 src1 <<= 1; 1373 1374 if (src2 & SLJIT_MEM) { 1375 FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0)); 1376 src2 = TMP_FREG2; 1377 } 1378 else 1379 src2 <<= 1; 1380 1381 /* src2 and src1 are swapped. */ 1382 if (op & SLJIT_SET_E) { 1383 FAIL_IF(push_inst(compiler, C_UEQ_S | FMT(op) | FT(src2) | FS(src1), UNMOVABLE_INS)); 1384 FAIL_IF(push_inst(compiler, CFC1 | TA(EQUAL_FLAG) | DA(FCSR_REG), EQUAL_FLAG)); 1385 FAIL_IF(push_inst(compiler, SRL | TA(EQUAL_FLAG) | DA(EQUAL_FLAG) | SH_IMM(23), EQUAL_FLAG)); 1386 FAIL_IF(push_inst(compiler, ANDI | SA(EQUAL_FLAG) | TA(EQUAL_FLAG) | IMM(1), EQUAL_FLAG)); 1387 } 1388 if (op & SLJIT_SET_S) { 1389 /* Mixing the instructions for the two checks. */ 1390 FAIL_IF(push_inst(compiler, C_ULT_S | FMT(op) | FT(src2) | FS(src1), UNMOVABLE_INS)); 1391 FAIL_IF(push_inst(compiler, CFC1 | TA(ULESS_FLAG) | DA(FCSR_REG), ULESS_FLAG)); 1392 FAIL_IF(push_inst(compiler, C_ULT_S | FMT(op) | FT(src1) | FS(src2), UNMOVABLE_INS)); 1393 FAIL_IF(push_inst(compiler, SRL | TA(ULESS_FLAG) | DA(ULESS_FLAG) | SH_IMM(23), ULESS_FLAG)); 1394 FAIL_IF(push_inst(compiler, ANDI | SA(ULESS_FLAG) | TA(ULESS_FLAG) | IMM(1), ULESS_FLAG)); 1395 FAIL_IF(push_inst(compiler, CFC1 | TA(UGREATER_FLAG) | DA(FCSR_REG), UGREATER_FLAG)); 1396 FAIL_IF(push_inst(compiler, SRL | TA(UGREATER_FLAG) | DA(UGREATER_FLAG) | SH_IMM(23), UGREATER_FLAG)); 1397 FAIL_IF(push_inst(compiler, ANDI | SA(UGREATER_FLAG) | TA(UGREATER_FLAG) | IMM(1), UGREATER_FLAG)); 1398 } 1399 return push_inst(compiler, C_UN_S | FMT(op) | FT(src2) | FS(src1), FCSR_FCC); 1400 } 1401 1402 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op, 1403 sljit_si dst, sljit_sw dstw, 1404 sljit_si src, sljit_sw srcw) 1405 { 1406 sljit_si dst_r; 1407 1408 CHECK_ERROR(); 1409 compiler->cache_arg = 0; 1410 compiler->cache_argw = 0; 1411 1412 SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error); 1413 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw); 1414 1415 if (GET_OPCODE(op) == SLJIT_CONVD_FROMS) 1416 op ^= SLJIT_SINGLE_OP; 1417 1418 dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1; 1419 1420 if (src & SLJIT_MEM) { 1421 FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw)); 1422 src = dst_r; 1423 } 1424 else 1425 src <<= 1; 1426 1427 switch (GET_OPCODE(op)) { 1428 case SLJIT_DMOV: 1429 if (src != dst_r) { 1430 if (dst_r != TMP_FREG1) 1431 FAIL_IF(push_inst(compiler, MOV_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS)); 1432 else 1433 dst_r = src; 1434 } 1435 break; 1436 case SLJIT_DNEG: 1437 FAIL_IF(push_inst(compiler, NEG_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS)); 1438 break; 1439 case SLJIT_DABS: 1440 FAIL_IF(push_inst(compiler, ABS_S | FMT(op) | FS(src) | FD(dst_r), MOVABLE_INS)); 1441 break; 1442 case SLJIT_CONVD_FROMS: 1443 FAIL_IF(push_inst(compiler, CVT_S_S | ((op & SLJIT_SINGLE_OP) ? 1 : (1 << 21)) | FS(src) | FD(dst_r), MOVABLE_INS)); 1444 op ^= SLJIT_SINGLE_OP; 1445 break; 1446 } 1447 1448 if (dst & SLJIT_MEM) 1449 return emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0); 1450 return SLJIT_SUCCESS; 1451 } 1452 1453 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op, 1454 sljit_si dst, sljit_sw dstw, 1455 sljit_si src1, sljit_sw src1w, 1456 sljit_si src2, sljit_sw src2w) 1457 { 1458 sljit_si dst_r, flags = 0; 1459 1460 CHECK_ERROR(); 1461 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); 1462 ADJUST_LOCAL_OFFSET(dst, dstw); 1463 ADJUST_LOCAL_OFFSET(src1, src1w); 1464 ADJUST_LOCAL_OFFSET(src2, src2w); 1465 1466 compiler->cache_arg = 0; 1467 compiler->cache_argw = 0; 1468 1469 dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG2; 1470 1471 if (src1 & SLJIT_MEM) { 1472 if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) { 1473 FAIL_IF(compiler->error); 1474 src1 = TMP_FREG1; 1475 } else 1476 flags |= SLOW_SRC1; 1477 } 1478 else 1479 src1 <<= 1; 1480 1481 if (src2 & SLJIT_MEM) { 1482 if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) { 1483 FAIL_IF(compiler->error); 1484 src2 = TMP_FREG2; 1485 } else 1486 flags |= SLOW_SRC2; 1487 } 1488 else 1489 src2 <<= 1; 1490 1491 if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) { 1492 if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) { 1493 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w)); 1494 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw)); 1495 } 1496 else { 1497 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w)); 1498 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw)); 1499 } 1500 } 1501 else if (flags & SLOW_SRC1) 1502 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw)); 1503 else if (flags & SLOW_SRC2) 1504 FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw)); 1505 1506 if (flags & SLOW_SRC1) 1507 src1 = TMP_FREG1; 1508 if (flags & SLOW_SRC2) 1509 src2 = TMP_FREG2; 1510 1511 switch (GET_OPCODE(op)) { 1512 case SLJIT_DADD: 1513 FAIL_IF(push_inst(compiler, ADD_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS)); 1514 break; 1515 1516 case SLJIT_DSUB: 1517 FAIL_IF(push_inst(compiler, SUB_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS)); 1518 break; 1519 1520 case SLJIT_DMUL: 1521 FAIL_IF(push_inst(compiler, MUL_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS)); 1522 break; 1523 1524 case SLJIT_DDIV: 1525 FAIL_IF(push_inst(compiler, DIV_S | FMT(op) | FT(src2) | FS(src1) | FD(dst_r), MOVABLE_INS)); 1526 break; 1527 } 1528 1529 if (dst_r == TMP_FREG2) 1530 FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0)); 1531 1532 return SLJIT_SUCCESS; 1533 } 1534 1535 /* --------------------------------------------------------------------- */ 1536 /* Other instructions */ 1537 /* --------------------------------------------------------------------- */ 1538 1539 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw) 1540 { 1541 CHECK_ERROR(); 1542 CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw)); 1543 ADJUST_LOCAL_OFFSET(dst, dstw); 1544 1545 /* For UNUSED dst. Uncommon, but possible. */ 1546 if (dst == SLJIT_UNUSED) 1547 return SLJIT_SUCCESS; 1548 1549 if (FAST_IS_REG(dst)) 1550 return push_inst(compiler, ADDU_W | SA(RETURN_ADDR_REG) | TA(0) | D(dst), DR(dst)); 1551 1552 /* Memory. */ 1553 return emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw); 1554 } 1555 1556 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw) 1557 { 1558 CHECK_ERROR(); 1559 CHECK(check_sljit_emit_fast_return(compiler, src, srcw)); 1560 ADJUST_LOCAL_OFFSET(src, srcw); 1561 1562 if (FAST_IS_REG(src)) 1563 FAIL_IF(push_inst(compiler, ADDU_W | S(src) | TA(0) | DA(RETURN_ADDR_REG), RETURN_ADDR_REG)); 1564 else if (src & SLJIT_MEM) 1565 FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, RETURN_ADDR_REG, src, srcw)); 1566 else if (src & SLJIT_IMM) 1567 FAIL_IF(load_immediate(compiler, RETURN_ADDR_REG, srcw)); 1568 1569 FAIL_IF(push_inst(compiler, JR | SA(RETURN_ADDR_REG), UNMOVABLE_INS)); 1570 return push_inst(compiler, NOP, UNMOVABLE_INS); 1571 } 1572 1573 /* --------------------------------------------------------------------- */ 1574 /* Conditional instructions */ 1575 /* --------------------------------------------------------------------- */ 1576 1577 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler) 1578 { 1579 struct sljit_label *label; 1580 1581 CHECK_ERROR_PTR(); 1582 CHECK_PTR(check_sljit_emit_label(compiler)); 1583 1584 if (compiler->last_label && compiler->last_label->size == compiler->size) 1585 return compiler->last_label; 1586 1587 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label)); 1588 PTR_FAIL_IF(!label); 1589 set_label(label, compiler); 1590 compiler->delay_slot = UNMOVABLE_INS; 1591 return label; 1592 } 1593 1594 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 1595 #define JUMP_LENGTH 4 1596 #else 1597 #define JUMP_LENGTH 8 1598 #endif 1599 1600 #define BR_Z(src) \ 1601 inst = BEQ | SA(src) | TA(0) | JUMP_LENGTH; \ 1602 flags = IS_BIT26_COND; \ 1603 delay_check = src; 1604 1605 #define BR_NZ(src) \ 1606 inst = BNE | SA(src) | TA(0) | JUMP_LENGTH; \ 1607 flags = IS_BIT26_COND; \ 1608 delay_check = src; 1609 1610 #define BR_T() \ 1611 inst = BC1T | JUMP_LENGTH; \ 1612 flags = IS_BIT16_COND; \ 1613 delay_check = FCSR_FCC; 1614 1615 #define BR_F() \ 1616 inst = BC1F | JUMP_LENGTH; \ 1617 flags = IS_BIT16_COND; \ 1618 delay_check = FCSR_FCC; 1619 1620 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type) 1621 { 1622 struct sljit_jump *jump; 1623 sljit_ins inst; 1624 sljit_si flags = 0; 1625 sljit_si delay_check = UNMOVABLE_INS; 1626 1627 CHECK_ERROR_PTR(); 1628 CHECK_PTR(check_sljit_emit_jump(compiler, type)); 1629 1630 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1631 PTR_FAIL_IF(!jump); 1632 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP); 1633 type &= 0xff; 1634 1635 switch (type) { 1636 case SLJIT_EQUAL: 1637 case SLJIT_D_NOT_EQUAL: 1638 BR_NZ(EQUAL_FLAG); 1639 break; 1640 case SLJIT_NOT_EQUAL: 1641 case SLJIT_D_EQUAL: 1642 BR_Z(EQUAL_FLAG); 1643 break; 1644 case SLJIT_LESS: 1645 case SLJIT_D_LESS: 1646 BR_Z(ULESS_FLAG); 1647 break; 1648 case SLJIT_GREATER_EQUAL: 1649 case SLJIT_D_GREATER_EQUAL: 1650 BR_NZ(ULESS_FLAG); 1651 break; 1652 case SLJIT_GREATER: 1653 case SLJIT_D_GREATER: 1654 BR_Z(UGREATER_FLAG); 1655 break; 1656 case SLJIT_LESS_EQUAL: 1657 case SLJIT_D_LESS_EQUAL: 1658 BR_NZ(UGREATER_FLAG); 1659 break; 1660 case SLJIT_SIG_LESS: 1661 BR_Z(LESS_FLAG); 1662 break; 1663 case SLJIT_SIG_GREATER_EQUAL: 1664 BR_NZ(LESS_FLAG); 1665 break; 1666 case SLJIT_SIG_GREATER: 1667 BR_Z(GREATER_FLAG); 1668 break; 1669 case SLJIT_SIG_LESS_EQUAL: 1670 BR_NZ(GREATER_FLAG); 1671 break; 1672 case SLJIT_OVERFLOW: 1673 case SLJIT_MUL_OVERFLOW: 1674 BR_Z(OVERFLOW_FLAG); 1675 break; 1676 case SLJIT_NOT_OVERFLOW: 1677 case SLJIT_MUL_NOT_OVERFLOW: 1678 BR_NZ(OVERFLOW_FLAG); 1679 break; 1680 case SLJIT_D_UNORDERED: 1681 BR_F(); 1682 break; 1683 case SLJIT_D_ORDERED: 1684 BR_T(); 1685 break; 1686 default: 1687 /* Not conditional branch. */ 1688 inst = 0; 1689 break; 1690 } 1691 1692 jump->flags |= flags; 1693 if (compiler->delay_slot == MOVABLE_INS || (compiler->delay_slot != UNMOVABLE_INS && compiler->delay_slot != delay_check)) 1694 jump->flags |= IS_MOVABLE; 1695 1696 if (inst) 1697 PTR_FAIL_IF(push_inst(compiler, inst, UNMOVABLE_INS)); 1698 1699 PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1700 if (type <= SLJIT_JUMP) { 1701 PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS)); 1702 jump->addr = compiler->size; 1703 PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1704 } else { 1705 SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2); 1706 /* Cannot be optimized out if type is >= CALL0. */ 1707 jump->flags |= IS_JAL | (type >= SLJIT_CALL0 ? IS_CALL : 0); 1708 PTR_FAIL_IF(push_inst(compiler, JALR | S(TMP_REG2) | DA(RETURN_ADDR_REG), UNMOVABLE_INS)); 1709 jump->addr = compiler->size; 1710 /* A NOP if type < CALL1. */ 1711 PTR_FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_R0) | TA(0) | DA(4), UNMOVABLE_INS)); 1712 } 1713 return jump; 1714 } 1715 1716 #define RESOLVE_IMM1() \ 1717 if (src1 & SLJIT_IMM) { \ 1718 if (src1w) { \ 1719 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); \ 1720 src1 = TMP_REG1; \ 1721 } \ 1722 else \ 1723 src1 = 0; \ 1724 } 1725 1726 #define RESOLVE_IMM2() \ 1727 if (src2 & SLJIT_IMM) { \ 1728 if (src2w) { \ 1729 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG2), src2w)); \ 1730 src2 = TMP_REG2; \ 1731 } \ 1732 else \ 1733 src2 = 0; \ 1734 } 1735 1736 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_si type, 1737 sljit_si src1, sljit_sw src1w, 1738 sljit_si src2, sljit_sw src2w) 1739 { 1740 struct sljit_jump *jump; 1741 sljit_si flags; 1742 sljit_ins inst; 1743 1744 CHECK_ERROR_PTR(); 1745 CHECK_PTR(check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w)); 1746 ADJUST_LOCAL_OFFSET(src1, src1w); 1747 ADJUST_LOCAL_OFFSET(src2, src2w); 1748 1749 compiler->cache_arg = 0; 1750 compiler->cache_argw = 0; 1751 flags = ((type & SLJIT_INT_OP) ? INT_DATA : WORD_DATA) | LOAD_DATA; 1752 if (src1 & SLJIT_MEM) { 1753 PTR_FAIL_IF(emit_op_mem2(compiler, flags, DR(TMP_REG1), src1, src1w, src2, src2w)); 1754 src1 = TMP_REG1; 1755 } 1756 if (src2 & SLJIT_MEM) { 1757 PTR_FAIL_IF(emit_op_mem2(compiler, flags, DR(TMP_REG2), src2, src2w, 0, 0)); 1758 src2 = TMP_REG2; 1759 } 1760 1761 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1762 PTR_FAIL_IF(!jump); 1763 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP); 1764 type &= 0xff; 1765 1766 if (type <= SLJIT_NOT_EQUAL) { 1767 RESOLVE_IMM1(); 1768 RESOLVE_IMM2(); 1769 jump->flags |= IS_BIT26_COND; 1770 if (compiler->delay_slot == MOVABLE_INS || (compiler->delay_slot != UNMOVABLE_INS && compiler->delay_slot != DR(src1) && compiler->delay_slot != DR(src2))) 1771 jump->flags |= IS_MOVABLE; 1772 PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_EQUAL ? BNE : BEQ) | S(src1) | T(src2) | JUMP_LENGTH, UNMOVABLE_INS)); 1773 } 1774 else if (type >= SLJIT_SIG_LESS && (((src1 & SLJIT_IMM) && (src1w == 0)) || ((src2 & SLJIT_IMM) && (src2w == 0)))) { 1775 inst = NOP; 1776 if ((src1 & SLJIT_IMM) && (src1w == 0)) { 1777 RESOLVE_IMM2(); 1778 switch (type) { 1779 case SLJIT_SIG_LESS: 1780 inst = BLEZ; 1781 jump->flags |= IS_BIT26_COND; 1782 break; 1783 case SLJIT_SIG_GREATER_EQUAL: 1784 inst = BGTZ; 1785 jump->flags |= IS_BIT26_COND; 1786 break; 1787 case SLJIT_SIG_GREATER: 1788 inst = BGEZ; 1789 jump->flags |= IS_BIT16_COND; 1790 break; 1791 case SLJIT_SIG_LESS_EQUAL: 1792 inst = BLTZ; 1793 jump->flags |= IS_BIT16_COND; 1794 break; 1795 } 1796 src1 = src2; 1797 } 1798 else { 1799 RESOLVE_IMM1(); 1800 switch (type) { 1801 case SLJIT_SIG_LESS: 1802 inst = BGEZ; 1803 jump->flags |= IS_BIT16_COND; 1804 break; 1805 case SLJIT_SIG_GREATER_EQUAL: 1806 inst = BLTZ; 1807 jump->flags |= IS_BIT16_COND; 1808 break; 1809 case SLJIT_SIG_GREATER: 1810 inst = BLEZ; 1811 jump->flags |= IS_BIT26_COND; 1812 break; 1813 case SLJIT_SIG_LESS_EQUAL: 1814 inst = BGTZ; 1815 jump->flags |= IS_BIT26_COND; 1816 break; 1817 } 1818 } 1819 PTR_FAIL_IF(push_inst(compiler, inst | S(src1) | JUMP_LENGTH, UNMOVABLE_INS)); 1820 } 1821 else { 1822 if (type == SLJIT_LESS || type == SLJIT_GREATER_EQUAL || type == SLJIT_SIG_LESS || type == SLJIT_SIG_GREATER_EQUAL) { 1823 RESOLVE_IMM1(); 1824 if ((src2 & SLJIT_IMM) && src2w <= SIMM_MAX && src2w >= SIMM_MIN) 1825 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTIU : SLTI) | S(src1) | T(TMP_REG1) | IMM(src2w), DR(TMP_REG1))); 1826 else { 1827 RESOLVE_IMM2(); 1828 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTU : SLT) | S(src1) | T(src2) | D(TMP_REG1), DR(TMP_REG1))); 1829 } 1830 type = (type == SLJIT_LESS || type == SLJIT_SIG_LESS) ? SLJIT_NOT_EQUAL : SLJIT_EQUAL; 1831 } 1832 else { 1833 RESOLVE_IMM2(); 1834 if ((src1 & SLJIT_IMM) && src1w <= SIMM_MAX && src1w >= SIMM_MIN) 1835 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTIU : SLTI) | S(src2) | T(TMP_REG1) | IMM(src1w), DR(TMP_REG1))); 1836 else { 1837 RESOLVE_IMM1(); 1838 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_LESS_EQUAL ? SLTU : SLT) | S(src2) | T(src1) | D(TMP_REG1), DR(TMP_REG1))); 1839 } 1840 type = (type == SLJIT_GREATER || type == SLJIT_SIG_GREATER) ? SLJIT_NOT_EQUAL : SLJIT_EQUAL; 1841 } 1842 1843 jump->flags |= IS_BIT26_COND; 1844 PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_EQUAL ? BNE : BEQ) | S(TMP_REG1) | TA(0) | JUMP_LENGTH, UNMOVABLE_INS)); 1845 } 1846 1847 PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1848 PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS)); 1849 jump->addr = compiler->size; 1850 PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1851 return jump; 1852 } 1853 1854 #undef RESOLVE_IMM1 1855 #undef RESOLVE_IMM2 1856 1857 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_si type, 1858 sljit_si src1, sljit_sw src1w, 1859 sljit_si src2, sljit_sw src2w) 1860 { 1861 struct sljit_jump *jump; 1862 sljit_ins inst; 1863 sljit_si if_true; 1864 1865 CHECK_ERROR_PTR(); 1866 CHECK_PTR(check_sljit_emit_fcmp(compiler, type, src1, src1w, src2, src2w)); 1867 1868 compiler->cache_arg = 0; 1869 compiler->cache_argw = 0; 1870 1871 if (src1 & SLJIT_MEM) { 1872 PTR_FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(type) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w)); 1873 src1 = TMP_FREG1; 1874 } 1875 else 1876 src1 <<= 1; 1877 1878 if (src2 & SLJIT_MEM) { 1879 PTR_FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(type) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0)); 1880 src2 = TMP_FREG2; 1881 } 1882 else 1883 src2 <<= 1; 1884 1885 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1886 PTR_FAIL_IF(!jump); 1887 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP); 1888 jump->flags |= IS_BIT16_COND; 1889 1890 switch (type & 0xff) { 1891 case SLJIT_D_EQUAL: 1892 inst = C_UEQ_S; 1893 if_true = 1; 1894 break; 1895 case SLJIT_D_NOT_EQUAL: 1896 inst = C_UEQ_S; 1897 if_true = 0; 1898 break; 1899 case SLJIT_D_LESS: 1900 inst = C_ULT_S; 1901 if_true = 1; 1902 break; 1903 case SLJIT_D_GREATER_EQUAL: 1904 inst = C_ULT_S; 1905 if_true = 0; 1906 break; 1907 case SLJIT_D_GREATER: 1908 inst = C_ULE_S; 1909 if_true = 0; 1910 break; 1911 case SLJIT_D_LESS_EQUAL: 1912 inst = C_ULE_S; 1913 if_true = 1; 1914 break; 1915 case SLJIT_D_UNORDERED: 1916 inst = C_UN_S; 1917 if_true = 1; 1918 break; 1919 default: /* Make compilers happy. */ 1920 SLJIT_ASSERT_STOP(); 1921 case SLJIT_D_ORDERED: 1922 inst = C_UN_S; 1923 if_true = 0; 1924 break; 1925 } 1926 1927 PTR_FAIL_IF(push_inst(compiler, inst | FMT(type) | FT(src2) | FS(src1), UNMOVABLE_INS)); 1928 /* Intentionally the other opcode. */ 1929 PTR_FAIL_IF(push_inst(compiler, (if_true ? BC1F : BC1T) | JUMP_LENGTH, UNMOVABLE_INS)); 1930 PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1931 PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS)); 1932 jump->addr = compiler->size; 1933 PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1934 return jump; 1935 } 1936 1937 #undef JUMP_LENGTH 1938 #undef BR_Z 1939 #undef BR_NZ 1940 #undef BR_T 1941 #undef BR_F 1942 1943 #undef FLOAT_DATA 1944 #undef FMT 1945 1946 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw) 1947 { 1948 sljit_si src_r = TMP_REG2; 1949 struct sljit_jump *jump = NULL; 1950 1951 CHECK_ERROR(); 1952 CHECK(check_sljit_emit_ijump(compiler, type, src, srcw)); 1953 ADJUST_LOCAL_OFFSET(src, srcw); 1954 1955 if (FAST_IS_REG(src)) { 1956 if (DR(src) != 4) 1957 src_r = src; 1958 else 1959 FAIL_IF(push_inst(compiler, ADDU_W | S(src) | TA(0) | D(TMP_REG2), DR(TMP_REG2))); 1960 } 1961 1962 if (type >= SLJIT_CALL0) { 1963 SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2); 1964 if (src & (SLJIT_IMM | SLJIT_MEM)) { 1965 if (src & SLJIT_IMM) 1966 FAIL_IF(load_immediate(compiler, DR(PIC_ADDR_REG), srcw)); 1967 else { 1968 SLJIT_ASSERT(src_r == TMP_REG2 && (src & SLJIT_MEM)); 1969 FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, TMP_REG2, 0, TMP_REG1, 0, src, srcw)); 1970 } 1971 FAIL_IF(push_inst(compiler, JALR | S(PIC_ADDR_REG) | DA(RETURN_ADDR_REG), UNMOVABLE_INS)); 1972 /* We need an extra instruction in any case. */ 1973 return push_inst(compiler, ADDU_W | S(SLJIT_R0) | TA(0) | DA(4), UNMOVABLE_INS); 1974 } 1975 1976 /* Register input. */ 1977 if (type >= SLJIT_CALL1) 1978 FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_R0) | TA(0) | DA(4), 4)); 1979 FAIL_IF(push_inst(compiler, JALR | S(src_r) | DA(RETURN_ADDR_REG), UNMOVABLE_INS)); 1980 return push_inst(compiler, ADDU_W | S(src_r) | TA(0) | D(PIC_ADDR_REG), UNMOVABLE_INS); 1981 } 1982 1983 if (src & SLJIT_IMM) { 1984 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1985 FAIL_IF(!jump); 1986 set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_JAL : 0)); 1987 jump->u.target = srcw; 1988 1989 if (compiler->delay_slot != UNMOVABLE_INS) 1990 jump->flags |= IS_MOVABLE; 1991 1992 FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1993 } 1994 else if (src & SLJIT_MEM) 1995 FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, TMP_REG2, 0, TMP_REG1, 0, src, srcw)); 1996 1997 FAIL_IF(push_inst(compiler, JR | S(src_r), UNMOVABLE_INS)); 1998 if (jump) 1999 jump->addr = compiler->size; 2000 FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 2001 return SLJIT_SUCCESS; 2002 } 2003 2004 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op, 2005 sljit_si dst, sljit_sw dstw, 2006 sljit_si src, sljit_sw srcw, 2007 sljit_si type) 2008 { 2009 sljit_si sugg_dst_ar, dst_ar; 2010 sljit_si flags = GET_ALL_FLAGS(op); 2011 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 2012 # define mem_type WORD_DATA 2013 #else 2014 sljit_si mem_type = (op & SLJIT_INT_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA; 2015 #endif 2016 2017 CHECK_ERROR(); 2018 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type)); 2019 ADJUST_LOCAL_OFFSET(dst, dstw); 2020 2021 if (dst == SLJIT_UNUSED) 2022 return SLJIT_SUCCESS; 2023 2024 op = GET_OPCODE(op); 2025 #if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64) 2026 if (op == SLJIT_MOV_SI || op == SLJIT_MOV_UI) 2027 mem_type = INT_DATA | SIGNED_DATA; 2028 #endif 2029 sugg_dst_ar = DR((op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2); 2030 2031 compiler->cache_arg = 0; 2032 compiler->cache_argw = 0; 2033 if (op >= SLJIT_ADD && (src & SLJIT_MEM)) { 2034 ADJUST_LOCAL_OFFSET(src, srcw); 2035 FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, DR(TMP_REG1), src, srcw, dst, dstw)); 2036 src = TMP_REG1; 2037 srcw = 0; 2038 } 2039 2040 switch (type & 0xff) { 2041 case SLJIT_EQUAL: 2042 case SLJIT_NOT_EQUAL: 2043 FAIL_IF(push_inst(compiler, SLTIU | SA(EQUAL_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar)); 2044 dst_ar = sugg_dst_ar; 2045 break; 2046 case SLJIT_LESS: 2047 case SLJIT_GREATER_EQUAL: 2048 case SLJIT_D_LESS: 2049 case SLJIT_D_GREATER_EQUAL: 2050 dst_ar = ULESS_FLAG; 2051 break; 2052 case SLJIT_GREATER: 2053 case SLJIT_LESS_EQUAL: 2054 case SLJIT_D_GREATER: 2055 case SLJIT_D_LESS_EQUAL: 2056 dst_ar = UGREATER_FLAG; 2057 break; 2058 case SLJIT_SIG_LESS: 2059 case SLJIT_SIG_GREATER_EQUAL: 2060 dst_ar = LESS_FLAG; 2061 break; 2062 case SLJIT_SIG_GREATER: 2063 case SLJIT_SIG_LESS_EQUAL: 2064 dst_ar = GREATER_FLAG; 2065 break; 2066 case SLJIT_OVERFLOW: 2067 case SLJIT_NOT_OVERFLOW: 2068 dst_ar = OVERFLOW_FLAG; 2069 break; 2070 case SLJIT_MUL_OVERFLOW: 2071 case SLJIT_MUL_NOT_OVERFLOW: 2072 FAIL_IF(push_inst(compiler, SLTIU | SA(OVERFLOW_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar)); 2073 dst_ar = sugg_dst_ar; 2074 type ^= 0x1; /* Flip type bit for the XORI below. */ 2075 break; 2076 case SLJIT_D_EQUAL: 2077 case SLJIT_D_NOT_EQUAL: 2078 dst_ar = EQUAL_FLAG; 2079 break; 2080 2081 case SLJIT_D_UNORDERED: 2082 case SLJIT_D_ORDERED: 2083 FAIL_IF(push_inst(compiler, CFC1 | TA(sugg_dst_ar) | DA(FCSR_REG), sugg_dst_ar)); 2084 FAIL_IF(push_inst(compiler, SRL | TA(sugg_dst_ar) | DA(sugg_dst_ar) | SH_IMM(23), sugg_dst_ar)); 2085 FAIL_IF(push_inst(compiler, ANDI | SA(sugg_dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar)); 2086 dst_ar = sugg_dst_ar; 2087 break; 2088 2089 default: 2090 SLJIT_ASSERT_STOP(); 2091 dst_ar = sugg_dst_ar; 2092 break; 2093 } 2094 2095 if (type & 0x1) { 2096 FAIL_IF(push_inst(compiler, XORI | SA(dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar)); 2097 dst_ar = sugg_dst_ar; 2098 } 2099 2100 if (op >= SLJIT_ADD) { 2101 if (DR(TMP_REG2) != dst_ar) 2102 FAIL_IF(push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | D(TMP_REG2), DR(TMP_REG2))); 2103 return emit_op(compiler, op | flags, mem_type | CUMULATIVE_OP | LOGICAL_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0); 2104 } 2105 2106 if (dst & SLJIT_MEM) 2107 return emit_op_mem(compiler, mem_type, dst_ar, dst, dstw); 2108 2109 if (sugg_dst_ar != dst_ar) 2110 return push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | DA(sugg_dst_ar), sugg_dst_ar); 2111 return SLJIT_SUCCESS; 2112 2113 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32) 2114 # undef mem_type 2115 #endif 2116 } 2117 2118 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value) 2119 { 2120 struct sljit_const *const_; 2121 sljit_si reg; 2122 2123 CHECK_ERROR_PTR(); 2124 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value)); 2125 ADJUST_LOCAL_OFFSET(dst, dstw); 2126 2127 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const)); 2128 PTR_FAIL_IF(!const_); 2129 set_const(const_, compiler); 2130 2131 reg = SLOW_IS_REG(dst) ? dst : TMP_REG2; 2132 2133 PTR_FAIL_IF(emit_const(compiler, reg, init_value)); 2134 2135 if (dst & SLJIT_MEM) 2136 PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0)); 2137 return const_; 2138 } 2139