Home | History | Annotate | Download | only in priv

Lines Matching refs:op

276    AMD64RMI* op       = LibVEX_Alloc(sizeof(AMD64RMI));
277 op->tag = Armi_Imm;
278 op->Armi.Imm.imm32 = imm32;
279 return op;
282 AMD64RMI* op = LibVEX_Alloc(sizeof(AMD64RMI));
283 op->tag = Armi_Reg;
284 op->Armi.Reg.reg = reg;
285 return op;
288 AMD64RMI* op = LibVEX_Alloc(sizeof(AMD64RMI));
289 op->tag = Armi_Mem;
290 op->Armi.Mem.am = am;
291 return op;
294 static void ppAMD64RMI_wrk ( AMD64RMI* op, Bool lo32 ) {
295 switch (op->tag) {
297 vex_printf("$0x%x", op->Armi.Imm.imm32);
301 ppHRegAMD64_lo32(op->Armi.Reg.reg);
303 ppHRegAMD64(op->Armi.Reg.reg);
306 ppAMD64AMode(op->Armi.Mem.am);
312 void ppAMD64RMI ( AMD64RMI* op ) {
313 ppAMD64RMI_wrk(op, False/*!lo32*/);
315 void ppAMD64RMI_lo32 ( AMD64RMI* op ) {
316 ppAMD64RMI_wrk(op, True/*lo32*/);
322 static void addRegUsage_AMD64RMI ( HRegUsage* u, AMD64RMI* op ) {
323 switch (op->tag) {
327 addHRegUse(u, HRmRead, op->Armi.Reg.reg);
330 addRegUsage_AMD64AMode(u, op->Armi.Mem.am);
337 static void mapRegs_AMD64RMI ( HRegRemap* m, AMD64RMI* op ) {
338 switch (op->tag) {
342 op->Armi.Reg.reg = lookupHRegRemap(m, op->Armi.Reg.reg);
345 mapRegs_AMD64AMode(m, op->Armi.Mem.am);
356 AMD64RI* op = LibVEX_Alloc(sizeof(AMD64RI));
357 op->tag = Ari_Imm;
358 op->Ari.Imm.imm32 = imm32;
359 return op;
362 AMD64RI* op = LibVEX_Alloc(sizeof(AMD64RI));
363 op->tag = Ari_Reg;
364 op->Ari.Reg.reg = reg;
365 return op;
368 void ppAMD64RI ( AMD64RI* op ) {
369 switch (op->tag) {
371 vex_printf("$0x%x", op->Ari.Imm.imm32);
374 ppHRegAMD64(op->Ari.Reg.reg);
384 static void addRegUsage_AMD64RI ( HRegUsage* u, AMD64RI* op ) {
385 switch (op->tag) {
389 addHRegUse(u, HRmRead, op->Ari.Reg.reg);
396 static void mapRegs_AMD64RI ( HRegRemap* m, AMD64RI* op ) {
397 switch (op->tag) {
401 op->Ari.Reg.reg = lookupHRegRemap(m, op->Ari.Reg.reg);
412 AMD64RM* op = LibVEX_Alloc(sizeof(AMD64RM));
413 op->tag = Arm_Reg;
414 op->Arm.Reg.reg = reg;
415 return op;
418 AMD64RM* op = LibVEX_Alloc(sizeof(AMD64RM));
419 op->tag = Arm_Mem;
420 op->Arm.Mem.am = am;
421 return op;
424 void ppAMD64RM ( AMD64RM* op ) {
425 switch (op->tag) {
427 ppAMD64AMode(op->Arm.Mem.am);
430 ppHRegAMD64(op->Arm.Reg.reg);
440 static void addRegUsage_AMD64RM ( HRegUsage* u, AMD64RM* op, HRegMode mode ) {
441 switch (op->tag) {
445 addRegUsage_AMD64AMode(u, op->Arm.Mem.am);
450 addHRegUse(u, mode, op->Arm.Reg.reg);
457 static void mapRegs_AMD64RM ( HRegRemap* m, AMD64RM* op )
459 switch (op->tag) {
461 mapRegs_AMD64AMode(m, op->Arm.Mem.am);
464 op->Arm.Reg.reg = lookupHRegRemap(m, op->Arm.Reg.reg);
483 HChar* showAMD64UnaryOp ( AMD64UnaryOp op ) {
484 switch (op) {
491 HChar* showAMD64AluOp ( AMD64AluOp op ) {
492 switch (op) {
507 HChar* showAMD64ShiftOp ( AMD64ShiftOp op ) {
508 switch (op) {
516 HChar* showA87FpOp ( A87FpOp op ) {
517 switch (op) {
534 HChar* showAMD64SseOp ( AMD64SseOp op ) {
535 switch (op) {
615 AMD64Instr* AMD64Instr_Alu64R ( AMD64AluOp op, AMD64RMI* src, HReg dst ) {
618 i->Ain.Alu64R.op = op;
623 AMD64Instr* AMD64Instr_Alu64M ( AMD64AluOp op, AMD64RI* src, AMD64AMode* dst ) {
626 i->Ain.Alu64M.op = op;
629 vassert(op != Aalu_MUL);
632 AMD64Instr* AMD64Instr_Sh64 ( AMD64ShiftOp op, UInt src, HReg dst ) {
635 i->Ain.Sh64.op = op;
647 AMD64Instr* AMD64Instr_Unary64 ( AMD64UnaryOp op, HReg dst ) {
650 i->Ain.Unary64.op = op;
661 AMD64Instr* AMD64Instr_Alu32R ( AMD64AluOp op, AMD64RMI* src, HReg dst ) {
664 i->Ain.Alu32R.op = op;
667 switch (op) {
828 AMD64Instr* AMD64Instr_A87FpOp ( A87FpOp op )
832 i->Ain.A87FpOp.op = op;
917 AMD64Instr* AMD64Instr_Sse32Fx4 ( AMD64SseOp op, HReg src, HReg dst ) {
920 i->Ain.Sse32Fx4.op = op;
923 vassert(op != Asse_MOV);
926 AMD64Instr* AMD64Instr_Sse32FLo ( AMD64SseOp op, HReg src, HReg dst ) {
929 i->Ain.Sse32FLo.op = op;
932 vassert(op != Asse_MOV);
935 AMD64Instr* AMD64Instr_Sse64Fx2 ( AMD64SseOp op, HReg src, HReg dst ) {
938 i->Ain.Sse64Fx2.op = op;
941 vassert(op != Asse_MOV);
944 AMD64Instr* AMD64Instr_Sse64FLo ( AMD64SseOp op, HReg src, HReg dst ) {
947 i->Ain.Sse64FLo.op = op;
950 vassert(op != Asse_MOV);
953 AMD64Instr* AMD64Instr_SseReRg ( AMD64SseOp op, HReg re, HReg rg ) {
956 i->Ain.SseReRg.op = op;
988 //uu AMD64Instr* AMD64Instr_AvxReRg ( AMD64SseOp op, HReg re, HReg rg ) {
991 //uu i->Ain.AvxReRg.op = op;
1019 vex_printf("%sq ", showAMD64AluOp(i->Ain.Alu64R.op));
1025 vex_printf("%sq ", showAMD64AluOp(i->Ain.Alu64M.op));
1031 vex_printf("%sq ", showAMD64ShiftOp(i->Ain.Sh64.op));
1043 vex_printf("%sq ", showAMD64UnaryOp(i->Ain.Unary64.op));
1053 vex_printf("%sl ", showAMD64AluOp(i->Ain.Alu32R.op));
1184 vex_printf("f%s", showA87FpOp(i->Ain.A87FpOp.op));
1250 vex_printf("%sps ", showAMD64SseOp(i->Ain.Sse32Fx4.op));
1256 vex_printf("%sss ", showAMD64SseOp(i->Ain.Sse32FLo.op));
1262 vex_printf("%spd ", showAMD64SseOp(i->Ain.Sse64Fx2.op));
1268 vex_printf("%ssd ", showAMD64SseOp(i->Ain.Sse64FLo.op));
1274 vex_printf("%s ", showAMD64SseOp(i->Ain.SseReRg.op));
1304 //uu vex_printf("v%s ", showAMD64SseOp(i->Ain.SseReRg.op));
1337 if (i->Ain.Alu64R.op == Aalu_MOV) {
1341 if (i->Ain.Alu64R.op == Aalu_CMP) {
1367 vassert(i->Ain.Alu32R.op != Aalu_MOV);
1369 if (i->Ain.Alu32R.op == Aalu_CMP) {
1538 vassert(i->Ain.Sse32Fx4.op != Asse_MOV);
1539 unary = toBool( i->Ain.Sse32Fx4.op == Asse_RCPF
1540 || i->Ain.Sse32Fx4.op == Asse_RSQRTF
1541 || i->Ain.Sse32Fx4.op == Asse_SQRTF );
1547 vassert(i->Ain.Sse32FLo.op != Asse_MOV);
1548 unary = toBool( i->Ain.Sse32FLo.op == Asse_RCPF
1549 || i->Ain.Sse32FLo.op == Asse_RSQRTF
1550 || i->Ain.Sse32FLo.op == Asse_SQRTF );
1556 vassert(i->Ain.Sse64Fx2.op != Asse_MOV);
1557 unary = toBool( i->Ain.Sse64Fx2.op == Asse_RCPF
1558 || i->Ain.Sse64Fx2.op == Asse_RSQRTF
1559 || i->Ain.Sse64Fx2.op == Asse_SQRTF );
1565 vassert(i->Ain.Sse64FLo.op != Asse_MOV);
1566 unary = toBool( i->Ain.Sse64FLo.op == Asse_RCPF
1567 || i->Ain.Sse64FLo.op == Asse_RSQRTF
1568 || i->Ain.Sse64FLo.op == Asse_SQRTF );
1574 if ( (i->Ain.SseReRg.op == Asse_XOR
1575 || i->Ain.SseReRg.op == Asse_CMPEQ32)
1584 addHRegUse(u, i->Ain.SseReRg.op == Asse_MOV
1603 //uu if ( (i->Ain.AvxReRg.op == Asse_XOR
1604 //uu || i->Ain.AvxReRg.op == Asse_CMPEQ32)
1610 //uu addHRegUse(u, i->Ain.AvxReRg.op == Asse_MOV
1822 if (i->Ain.Alu64R.op != Aalu_MOV)
1831 if (i->Ain.SseReRg.op != Asse_MOV)
1838 //uu if (i->Ain.AvxReRg.op != Asse_MOV)
2304 if (i->Ain.Alu64R.op == Aalu_MOV) {
2347 if (i->Ain.Alu64R.op == Aalu_MUL) {
2384 switch (i->Ain.Alu64R.op) {
2444 if (i->Ain.Alu64M.op == Aalu_MOV) {
2467 switch (i->Ain.Sh64.op) {
2496 if (i->Ain.Unary64.op == Aun_NOT) {
2502 if (i->Ain.Unary64.op == Aun_NEG) {
2519 switch (i->Ain.Alu32R.op) {
3050 switch (i->Ain.A87FpOp.op) {
3212 switch (i->Ain.Sse32Fx4.op) {
3241 switch (i->Ain.Sse64Fx2.op) {
3268 switch (i->Ain.Sse32FLo.op) {
3297 switch (i->Ain.Sse64FLo.op) {
3324 switch (i->Ain.SseReRg.op) {