Home | History | Annotate | Download | only in priv

Lines Matching refs:HReg

47 void ppHRegAMD64 ( HReg reg ) 
80 static void ppHRegAMD64_lo32 ( HReg reg )
104 HReg hregAMD64_RAX ( void ) { return mkHReg( 0, HRcInt64, False); }
105 HReg hregAMD64_RCX ( void ) { return mkHReg( 1, HRcInt64, False); }
106 HReg hregAMD64_RDX ( void ) { return mkHReg( 2, HRcInt64, False); }
107 HReg hregAMD64_RBX ( void ) { return mkHReg( 3, HRcInt64, False); }
108 HReg hregAMD64_RSP ( void ) { return mkHReg( 4, HRcInt64, False); }
109 HReg hregAMD64_RBP ( void ) { return mkHReg( 5, HRcInt64, False); }
110 HReg hregAMD64_RSI ( void ) { return mkHReg( 6, HRcInt64, False); }
111 HReg hregAMD64_RDI ( void ) { return mkHReg( 7, HRcInt64, False); }
112 HReg hregAMD64_R8 ( void ) { return mkHReg( 8, HRcInt64, False); }
113 HReg hregAMD64_R9 ( void ) { return mkHReg( 9, HRcInt64, False); }
114 HReg hregAMD64_R10 ( void ) { return mkHReg(10, HRcInt64, False); }
115 HReg hregAMD64_R11 ( void ) { return mkHReg(11, HRcInt64, False); }
116 HReg hregAMD64_R12 ( void ) { return mkHReg(12, HRcInt64, False); }
117 HReg hregAMD64_R13 ( void ) { return mkHReg(13, HRcInt64, False); }
118 HReg hregAMD64_R14 ( void ) { return mkHReg(14, HRcInt64, False); }
119 HReg hregAMD64_R15 ( void ) { return mkHReg(15, HRcInt64, False); }
121 HReg hregAMD64_XMM0 ( void ) { return mkHReg( 0, HRcVec128, False); }
122 HReg hregAMD64_XMM1 ( void ) { return mkHReg( 1, HRcVec128, False); }
123 HReg hregAMD64_XMM3 ( void ) { return mkHReg( 3, HRcVec128, False); }
124 HReg hregAMD64_XMM4 ( void ) { return mkHReg( 4, HRcVec128, False); }
125 HReg hregAMD64_XMM5 ( void ) { return mkHReg( 5, HRcVec128, False); }
126 HReg hregAMD64_XMM6 ( void ) { return mkHReg( 6, HRcVec128, False); }
127 HReg hregAMD64_XMM7 ( void ) { return mkHReg( 7, HRcVec128, False); }
128 HReg hregAMD64_XMM8 ( void ) { return mkHReg( 8, HRcVec128, False); }
129 HReg hregAMD64_XMM9 ( void ) { return mkHReg( 9, HRcVec128, False); }
130 HReg hregAMD64_XMM10 ( void ) { return mkHReg(10, HRcVec128, False); }
131 HReg hregAMD64_XMM11 ( void ) { return mkHReg(11, HRcVec128, False); }
132 HReg hregAMD64_XMM12 ( void ) { return mkHReg(12, HRcVec128, False); }
135 void getAllocableRegs_AMD64 ( Int* nregs, HReg** arr )
139 *arr = LibVEX_Alloc(*nregs * sizeof(HReg));
150 *arr = LibVEX_Alloc(*nregs * sizeof(HReg));
205 AMD64AMode* AMD64AMode_IR ( UInt imm32, HReg reg ) {
212 AMD64AMode* AMD64AMode_IRRS ( UInt imm32, HReg base, HReg indEx, Int shift ) {
281 AMD64RMI* AMD64RMI_Reg ( HReg reg ) {
361 AMD64RI* AMD64RI_Reg ( HReg reg ) {
411 AMD64RM* AMD64RM_Reg ( HReg reg ) {
608 AMD64Instr* AMD64Instr_Imm64 ( ULong imm64, HReg dst ) {
615 AMD64Instr* AMD64Instr_Alu64R ( AMD64AluOp op, AMD64RMI* src, HReg dst ) {
632 AMD64Instr* AMD64Instr_Sh64 ( AMD64ShiftOp op, UInt src, HReg dst ) {
640 AMD64Instr* AMD64Instr_Test64 ( UInt imm32, HReg dst ) {
647 AMD64Instr* AMD64Instr_Unary64 ( AMD64UnaryOp op, HReg dst ) {
654 AMD64Instr* AMD64Instr_Lea64 ( AMD64AMode* am, HReg dst ) {
661 AMD64Instr* AMD64Instr_Alu32R ( AMD64AluOp op, AMD64RMI* src, HReg dst ) {
719 AMD64Instr* AMD64Instr_XIndir ( HReg dstGA, AMD64AMode* amRIP,
728 AMD64Instr* AMD64Instr_XAssisted ( HReg dstGA, AMD64AMode* amRIP,
739 AMD64Instr* AMD64Instr_CMov64 ( AMD64CondCode cond, AMD64RM* src, HReg dst ) {
748 AMD64Instr* AMD64Instr_MovxLQ ( Bool syned, HReg src, HReg dst ) {
757 AMD64AMode* src, HReg dst ) {
767 AMD64Instr* AMD64Instr_Store ( UChar sz, HReg src, AMD64AMode* dst ) {
776 AMD64Instr* AMD64Instr_Set64 ( AMD64CondCode cond, HReg dst ) {
783 AMD64Instr* AMD64Instr_Bsfr64 ( Bool isFwds, HReg src, HReg dst ) {
858 AMD64Instr* AMD64Instr_SseUComIS ( Int sz, HReg srcL, HReg srcR, HReg dst ) {
868 AMD64Instr* AMD64Instr_SseSI2SF ( Int szS, Int szD, HReg src, HReg dst ) {
879 AMD64Instr* AMD64Instr_SseSF2SI ( Int szS, Int szD, HReg src, HReg dst ) {
890 AMD64Instr* AMD64Instr_SseSDSS ( Bool from64, HReg src, HReg dst )
900 HReg reg, AMD64AMode* addr ) {
910 AMD64Instr* AMD64Instr_SseLdzLO ( Int sz, HReg reg, AMD64AMode* addr )
920 AMD64Instr* AMD64Instr_Sse32Fx4 ( AMD64SseOp op, HReg src, HReg dst ) {
929 AMD64Instr* AMD64Instr_Sse32FLo ( AMD64SseOp op, HReg src, HReg dst ) {
938 AMD64Instr* AMD64Instr_Sse64Fx2 ( AMD64SseOp op, HReg src, HReg dst ) {
947 AMD64Instr* AMD64Instr_Sse64FLo ( AMD64SseOp op, HReg src, HReg dst ) {
956 AMD64Instr* AMD64Instr_SseReRg ( AMD64SseOp op, HReg re, HReg rg ) {
964 AMD64Instr* AMD64Instr_SseCMov ( AMD64CondCode cond, HReg src, HReg dst ) {
973 AMD64Instr* AMD64Instr_SseShuf ( Int order, HReg src, HReg dst ) {
983 //uu HReg reg, AMD64AMode* addr ) {
991 //uu AMD64Instr* AMD64Instr_AvxReRg ( AMD64SseOp op, HReg re, HReg rg ) {
1635 static inline void mapReg(HRegRemap* m, HReg* r)
1821 Bool isMove_AMD64Instr ( AMD64Instr* i, HReg* src, HReg* dst )
1859 HReg rreg, Int offsetB, Bool mode64 )
1881 HReg rreg, Int offsetB, Bool mode64 )
1906 static UChar iregBits210 ( HReg r )
1917 static UChar iregBit3 ( HReg r )
1928 static UChar iregBits3210 ( HReg r )
1943 static HReg vreg2ireg ( HReg r )
1954 //uu static HReg dvreg2ireg ( HReg r )
2047 static UChar* doAMode_M ( UChar* p, HReg greg, AMD64AMode* am )
2123 static UChar* doAMode_R ( UChar* p, HReg greg, HReg ereg )
2139 static UChar rexAMode_M ( HReg greg, AMD64AMode* am )
2160 static UChar rexAMode_R ( HReg greg, HReg ereg )
2204 //uu static UInt vexAMode_M ( HReg greg, AMD64AMode* am )
2723 HReg r11 = hregAMD64_R11();