Home | History | Annotate | Download | only in sljit

Lines Matching refs:reg_map

69 static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
92 static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
95 /* low-map. reg_map & 0x7. */
101 static SLJIT_CONST sljit_ub reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
104 /* low-map. reg_map & 0x7. */
675 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
682 return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, MOV_r_i32 + reg_lmap[dst], srcw);
750 reg_map[SLJIT_R0] == 0
751 && reg_map[SLJIT_R1] == 2
752 && reg_map[TMP_REG1] > 7,
756 reg_map[SLJIT_R0] == 0
757 && reg_map[SLJIT_R1] < 7
758 && reg_map[TMP_REG1] == 2,
807 *inst = MOD_REG | ((op >= SLJIT_UDIV) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
828 *inst = MOD_REG | reg_map[SLJIT_R1];
882 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
900 if (reg_map[src] >= 4) {
910 else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
913 if (reg_map[dst] < 4) {
944 /* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
954 /* Find a non-used register, whose reg_map[src] < 4. */
971 ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
984 ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
1024 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
1211 *inst++ = MOD_REG | (reg_map[dst_r] << 3) | reg_map[TMP_REG1];
1219 *inst++ = REX_W | (reg_map[dst_r] >= 8 ? REX_R : 0) | (reg_map[TMP_REG1] >= 8 ? REX_B : 0);
2217 return reg_map[reg];
2682 *inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
2686 *inst++ = REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B);
2698 *inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
2702 *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
2720 if (reg_map[dst] <= 4) {
2728 *inst++ = MOD_REG | reg_map[dst];
2732 *inst = MOD_REG | (reg_map[dst] << 3) | reg_map[dst];
2752 *inst++ = MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1];
2759 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2767 *inst++ = MOD_REG | (reg_map[dst] << 3) | 0 /* eax */;
2768 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2772 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && dst == src && reg_map[dst] <= 4) {
2773 SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] == 0, scratch_reg1_must_be_eax);
2779 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2784 *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
2785 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2793 *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
2800 *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
2809 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2819 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];