Lines Matching refs:reg
47 void ppHRegAMD64 ( HReg reg )
54 if (hregIsVirtual(reg)) {
55 ppHReg(reg);
59 switch (hregClass(reg)) {
61 r = hregNumber(reg);
66 r = hregNumber(reg);
71 r = hregNumber(reg);
80 static void ppHRegAMD64_lo32 ( HReg reg )
87 if (hregIsVirtual(reg)) {
88 ppHReg(reg);
93 switch (hregClass(reg)) {
95 r = hregNumber(reg);
216 AMD64AMode* AMD64AMode_IR ( UInt imm32, HReg reg ) {
220 am->Aam.IR.reg = reg;
237 //.. return AMD64AMode_IR( am->Xam.IR.imm, am->Xam.IR.reg );
253 ppHRegAMD64(am->Aam.IR.reg);
271 addHRegUse(u, HRmRead, am->Aam.IR.reg);
285 am->Aam.IR.reg = lookupHRegRemap(m, am->Aam.IR.reg);
296 /* --------- Operand, which can be reg, immediate or memory. --------- */
304 AMD64RMI* AMD64RMI_Reg ( HReg reg ) {
307 op->Armi.Reg.reg = reg;
324 ppHRegAMD64_lo32(op->Armi.Reg.reg);
326 ppHRegAMD64(op->Armi.Reg.reg);
350 addHRegUse(u, HRmRead, op->Armi.Reg.reg);
365 op->Armi.Reg.reg = lookupHRegRemap(m, op->Armi.Reg.reg);
376 /* --------- Operand, which can be reg or immediate only. --------- */
384 AMD64RI* AMD64RI_Reg ( HReg reg ) {
387 op->Ari.Reg.reg = reg;
397 ppHRegAMD64(op->Ari.Reg.reg);
412 addHRegUse(u, HRmRead, op->Ari.Reg.reg);
424 op->Ari.Reg.reg = lookupHRegRemap(m, op->Ari.Reg.reg);
432 /* --------- Operand, which can be reg or memory only. --------- */
434 AMD64RM* AMD64RM_Reg ( HReg reg ) {
437 op->Arm.Reg.reg = reg;
453 ppHRegAMD64(op->Arm.Reg.reg);
471 /* reg is read, written or modified. Add it in the
473 addHRegUse(u, mode, op->Arm.Reg.reg);
487 op->Arm.Reg.reg = lookupHRegRemap(m, op->Arm.Reg.reg);
884 //.. AMD64Instr* AMD64Instr_FpLdSt ( Bool isLoad, UChar sz, HReg reg, AMD64AMode* addr ) {
889 //.. i->Xin.FpLdSt.reg = reg;
895 //.. HReg reg, AMD64AMode* addr ) {
900 //.. i->Xin.FpLdStI.reg = reg;
983 HReg reg, AMD64AMode* addr ) {
988 i->Ain.SseLdSt.reg = reg;
993 AMD64Instr* AMD64Instr_SseLdzLO ( Int sz, HReg reg, AMD64AMode* addr )
998 i->Ain.SseLdzLO.reg = reg;
1263 //.. ppHRegAMD64(i->Xin.FpLdSt.reg);
1266 //.. ppHRegAMD64(i->Xin.FpLdSt.reg);
1277 //.. ppHRegAMD64(i->Xin.FpLdStI.reg);
1281 //.. ppHRegAMD64(i->Xin.FpLdStI.reg);
1351 ppHRegAMD64(i->Ain.SseLdSt.reg);
1353 ppHRegAMD64(i->Ain.SseLdSt.reg);
1362 ppHRegAMD64(i->Ain.SseLdzLO.reg);
1609 //.. i->Xin.FpLdSt.reg);
1614 //.. i->Xin.FpLdStI.reg);
1650 i->Ain.SseLdSt.reg);
1654 addHRegUse(u, HRmWrite, i->Ain.SseLdzLO.reg);
1699 /* reg-alloc needs to understand 'xor r,r' and 'cmpeqd
1836 //.. mapReg(m, &i->Xin.FpLdSt.reg);
1840 //.. mapReg(m, &i->Xin.FpLdStI.reg);
1876 mapReg(m, &i->Ain.SseLdSt.reg);
1880 mapReg(m, &i->Ain.SseLdzLO.reg);
1917 /* Figure out if i represents a reg-reg move, and if so assign the
1929 *src = i->Ain.Alu64R.src->Armi.Reg.reg;
2044 static UChar mkModRegRM ( UChar mod, UChar reg, UChar regmem )
2047 | ((reg & 7) << 3)
2094 /* Forming mod-reg-rm bytes and scale-index-base bytes.
2127 && am->Aam.IR.reg != hregAMD64_RSP()
2128 && am->Aam.IR.reg != hregAMD64_RBP()
2129 && am->Aam.IR.reg != hregAMD64_R12()
2130 && am->Aam.IR.reg != hregAMD64_R13()
2133 iregBits210(am->Aam.IR.reg));
2137 && am->Aam.IR.reg != hregAMD64_RSP()
2138 && am->Aam.IR.reg != hregAMD64_R12()
2141 iregBits210(am->Aam.IR.reg));
2145 if (am->Aam.IR.reg != hregAMD64_RSP()
2146 && am->Aam.IR.reg != hregAMD64_R12()
2149 iregBits210(am->Aam.IR.reg));
2153 if ((am->Aam.IR.reg == hregAMD64_RSP()
2154 || am->Aam.IR.reg == hregAMD64_R12())
2161 if (/* (am->Aam.IR.reg == hregAMD64_RSP()
2163 am->Aam.IR.reg == hregAMD64_R12()) {
2198 /* Emit a mod-reg-rm byte when the rm bit denotes a reg. */
2221 UChar B = iregBit3(am->Aam.IR.reg);
2353 UInt reg;
2372 /* Use the short form (load into 32 bit reg, + default
2414 *p++ = rexAMode_R( i->Ain.Alu64R.src->Armi.Reg.reg,
2417 p = doAMode_R(p, i->Ain.Alu64R.src->Armi.Reg.reg,
2436 i->Ain.Alu64R.src->Armi.Reg.reg);
2440 i->Ain.Alu64R.src->Armi.Reg.reg);
2509 *p++ = rexAMode_R( i->Ain.Alu64R.src->Armi.Reg.reg,
2512 p = doAMode_R(p, i->Ain.Alu64R.src->Armi.Reg.reg,
2532 *p++ = rexAMode_M(i->Ain.Alu64M.src->Ari.Reg.reg,
2535 p = doAMode_M(p, i->Ain.Alu64M.src->Ari.Reg.reg,
2559 //.. p = doAMode_M(p, i->Xin.Alu32M.src->Xri.Reg.reg,
2602 /* testq sign-extend($imm32), %reg */
2672 rexAMode_R( i->Ain.Alu32R.src->Armi.Reg.reg,
2676 p = doAMode_R(p, i->Ain.Alu32R.src->Armi.Reg.reg,
2705 i->Ain.MulL.src->Arm.Reg.reg);
2708 i->Ain.MulL.src->Arm.Reg.reg);
2728 rexAMode_R( fake(0), i->Ain.Div.src->Arm.Reg.reg));
2731 i->Ain.Div.src->Arm.Reg.reg);
2748 i->Ain.Div.src->Arm.Reg.reg);
2751 i->Ain.Div.src->Arm.Reg.reg);
2787 *p++ = toUChar(0x40 + (1 & iregBit3(i->Ain.Push.src->Armi.Reg.reg)));
2788 *p++ = toUChar(0x50 + iregBits210(i->Ain.Push.src->Armi.Reg.reg));
2906 /* movq %reg, %rax ; ret */
2907 if (i->Ain.Goto.dst->Ari.Reg.reg != hregAMD64_RAX()) {
2908 *p++ = rexAMode_R(i->Ain.Goto.dst->Ari.Reg.reg, hregAMD64_RAX());
2910 p = doAMode_R(p, i->Ain.Goto.dst->Ari.Reg.reg, hregAMD64_RAX());
2947 *p++ = rexAMode_R(i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Reg.reg);
2950 p = doAMode_R(p, i->Ain.CMov64.dst, i->Ain.CMov64.src->Arm.Reg.reg);
2971 /* Produce a 32-bit reg-reg move, since the implicit
3017 reg = iregBits3210(i->Ain.Set64.dst);
3018 vassert(reg < 16);
3021 *p++ = toUChar(reg >= 8 ? 0x49 : 0x48);
3023 *p++ = toUChar(0xC0 + (reg & 7));
3028 *p++ = toUChar(reg >= 8 ? 0x41 : 0x40);
3031 *p++ = toUChar(0xC0 + (reg & 7));
3250 //.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpLdSt.reg));
3257 //.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpLdSt.reg));
3278 //.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpLdStI.reg));
3291 //.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpLdStI.reg));
3451 rexAMode_M( vreg2ireg(i->Ain.SseLdSt.reg), i->Ain.SseLdSt.addr));
3454 p = doAMode_M(p, vreg2ireg(i->Ain.SseLdSt.reg), i->Ain.SseLdSt.addr);
3462 rexAMode_M(vreg2ireg(i->Ain.SseLdzLO.reg),
3466 p = doAMode_M(p, vreg2ireg(i->Ain.SseLdzLO.reg),