Home | History | Annotate | Download | only in useful

Lines Matching defs:mce

550 static IRExpr* expr2vbits ( struct _MCEnv* mce, IRExpr* e );
591 Shadow IRTemps are therefore allocated on demand. mce.tmpMap is a
610 static IRTemp findShadowTmp ( MCEnv* mce, IRTemp orig )
612 tl_assert(orig < mce->n_originalTmps);
613 if (mce->tmpMap[orig] == IRTemp_INVALID) {
614 mce->tmpMap[orig]
615 = newIRTemp(mce->bb->tyenv,
616 shadowType(mce->bb->tyenv->types[orig]));
618 return mce->tmpMap[orig];
627 static void newShadowTmp ( MCEnv* mce, IRTemp orig )
629 tl_assert(orig < mce->n_originalTmps);
630 mce->tmpMap[orig]
631 = newIRTemp(mce->bb->tyenv,
632 shadowType(mce->bb->tyenv->types[orig]));
651 static Bool isOriginalAtom ( MCEnv* mce, IRAtom* a1 )
655 if (a1->tag == Iex_RdTmp && a1->Iex.RdTmp.tmp < mce->n_originalTmps)
662 static Bool isShadowAtom ( MCEnv* mce, IRAtom* a1 )
666 if (a1->tag == Iex_RdTmp && a1->Iex.RdTmp.tmp >= mce->n_originalTmps)
748 static IRAtom* assignNew ( MCEnv* mce, IRType ty, IRExpr* e ) {
749 IRTemp t = newIRTemp(mce->bb->tyenv, ty);
750 assign(mce->bb, t, e);
761 static IRAtom* mkDifD8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
762 tl_assert(isShadowAtom(mce,a1));
763 tl_assert(isShadowAtom(mce,a2));
764 return assignNew(mce, Ity_I8, binop(Iop_And8, a1, a2));
767 static IRAtom* mkDifD16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
768 tl_assert(isShadowAtom(mce,a1));
769 tl_assert(isShadowAtom(mce,a2));
770 return assignNew(mce, Ity_I16, binop(Iop_And16, a1, a2));
773 static IRAtom* mkDifD32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
774 tl_assert(isShadowAtom(mce,a1));
775 tl_assert(isShadowAtom(mce,a2));
776 return assignNew(mce, Ity_I32, binop(Iop_And32, a1, a2));
779 static IRAtom* mkDifD64 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
780 tl_assert(isShadowAtom(mce,a1));
781 tl_assert(isShadowAtom(mce,a2));
782 return assignNew(mce, Ity_I64, binop(Iop_And64, a1, a2));
785 static IRAtom* mkDifDV128 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
786 tl_assert(isShadowAtom(mce,a1));
787 tl_assert(isShadowAtom(mce,a2));
788 return assignNew(mce, Ity_V128, binop(Iop_AndV128, a1, a2));
793 static IRAtom* mkUifU8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
794 tl_assert(isShadowAtom(mce,a1));
795 tl_assert(isShadowAtom(mce,a2));
796 return assignNew(mce, Ity_I8, binop(Iop_Or8, a1, a2));
799 static IRAtom* mkUifU16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
800 tl_assert(isShadowAtom(mce,a1));
801 tl_assert(isShadowAtom(mce,a2));
802 return assignNew(mce, Ity_I16, binop(Iop_Or16, a1, a2));
805 static IRAtom* mkUifU32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
806 tl_assert(isShadowAtom(mce,a1));
807 tl_assert(isShadowAtom(mce,a2));
808 return assignNew(mce, Ity_I32, binop(Iop_Or32, a1, a2));
811 static IRAtom* mkUifU64 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
812 tl_assert(isShadowAtom(mce,a1));
813 tl_assert(isShadowAtom(mce,a2));
814 return assignNew(mce, Ity_I64, binop(Iop_Or64, a1, a2));
817 static IRAtom* mkUifUV128 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) {
818 tl_assert(isShadowAtom(mce,a1));
819 tl_assert(isShadowAtom(mce,a2));
820 return assignNew(mce, Ity_V128, binop(Iop_OrV128, a1, a2));
823 static IRAtom* mkUifU ( MCEnv* mce, IRType vty, IRAtom* a1, IRAtom* a2 ) {
825 case Ity_I8: return mkUifU8(mce, a1, a2);
826 case Ity_I16: return mkUifU16(mce, a1, a2);
827 case Ity_I32: return mkUifU32(mce, a1, a2);
828 case Ity_I64: return mkUifU64(mce, a1, a2);
829 case Ity_V128: return mkUifUV128(mce, a1, a2);
838 static IRAtom* mkLeft8 ( MCEnv* mce, IRAtom* a1 ) {
839 tl_assert(isShadowAtom(mce,a1));
841 return assignNew(mce, Ity_I8,
843 assignNew(mce, Ity_I8,
848 static IRAtom* mkLeft16 ( MCEnv* mce, IRAtom* a1 ) {
849 tl_assert(isShadowAtom(mce,a1));
851 return assignNew(mce, Ity_I16,
853 assignNew(mce, Ity_I16,
858 static IRAtom* mkLeft32 ( MCEnv* mce, IRAtom* a1 ) {
859 tl_assert(isShadowAtom(mce,a1));
861 return assignNew(mce, Ity_I32,
863 assignNew(mce, Ity_I32,
873 static IRAtom* mkImproveAND8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
875 tl_assert(isOriginalAtom(mce, data));
876 tl_assert(isShadowAtom(mce, vbits));
878 return assignNew(mce, Ity_I8, binop(Iop_Or8, data, vbits));
881 static IRAtom* mkImproveAND16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
883 tl_assert(isOriginalAtom(mce, data));
884 tl_assert(isShadowAtom(mce, vbits));
886 return assignNew(mce, Ity_I16, binop(Iop_Or16, data, vbits));
889 static IRAtom* mkImproveAND32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
891 tl_assert(isOriginalAtom(mce, data));
892 tl_assert(isShadowAtom(mce, vbits));
894 return assignNew(mce, Ity_I32, binop(Iop_Or32, data, vbits));
897 static IRAtom* mkImproveAND64 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
899 tl_assert(isOriginalAtom(mce, data));
900 tl_assert(isShadowAtom(mce, vbits));
902 return assignNew(mce, Ity_I64, binop(Iop_Or64, data, vbits));
905 static IRAtom* mkImproveANDV128 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
907 tl_assert(isOriginalAtom(mce, data));
908 tl_assert(isShadowAtom(mce, vbits));
910 return assignNew(mce, Ity_V128, binop(Iop_OrV128, data, vbits));
916 static IRAtom* mkImproveOR8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
918 tl_assert(isOriginalAtom(mce, data));
919 tl_assert(isShadowAtom(mce, vbits));
922 mce, Ity_I8,
924 assignNew(mce, Ity_I8, unop(Iop_Not8, data)),
928 static IRAtom* mkImproveOR16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
930 tl_assert(isOriginalAtom(mce, data));
931 tl_assert(isShadowAtom(mce, vbits));
934 mce, Ity_I16,
936 assignNew(mce, Ity_I16, unop(Iop_Not16, data)),
940 static IRAtom* mkImproveOR32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
942 tl_assert(isOriginalAtom(mce, data));
943 tl_assert(isShadowAtom(mce, vbits));
946 mce, Ity_I32,
948 assignNew(mce, Ity_I32, unop(Iop_Not32, data)),
952 static IRAtom* mkImproveOR64 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
954 tl_assert(isOriginalAtom(mce, data));
955 tl_assert(isShadowAtom(mce, vbits));
958 mce, Ity_I64,
960 assignNew(mce, Ity_I64, unop(Iop_Not64, data)),
964 static IRAtom* mkImproveORV128 ( MCEnv* mce, IRAtom* data, IRAtom* vbits )
966 tl_assert(isOriginalAtom(mce, data));
967 tl_assert(isShadowAtom(mce, vbits));
970 mce, Ity_V128,
972 assignNew(mce, Ity_V128, unop(Iop_NotV128, data)),
978 static IRAtom* mkPCastTo( MCEnv* mce, IRType dst_ty, IRAtom* vbits )
984 tl_assert(isShadowAtom(mce,vbits));
985 ty = typeOfIRExpr(mce->bb->tyenv, vbits);
992 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE8, vbits, mkU8(0)));
995 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE16, vbits, mkU16(0)));
998 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE32, vbits, mkU32(0)));
1001 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE64, vbits, mkU64(0)));
1012 return assignNew(mce, Ity_I8, unop(Iop_1Sto8, tmp1));
1014 return assignNew(mce, Ity_I16, unop(Iop_1Sto16, tmp1));
1016 return assignNew(mce, Ity_I32, unop(Iop_1Sto32, tmp1));
1018 return assignNew(mce, Ity_I64, unop(Iop_1Sto64, tmp1));
1020 tmp1 = assignNew(mce, Ity_I64, unop(Iop_1Sto64, tmp1));
1021 tmp1 = assignNew(mce, Ity_V128, binop(Iop_64HLtoV128, tmp1, tmp1));
1039 static void setHelperAnns ( MCEnv* mce, IRDirty* di ) {
1042 di->fxState[0].offset = mce->layout->offset_SP;
1043 di->fxState[0].size = mce->layout->sizeof_SP;
1045 di->fxState[1].offset = mce->layout->offset_IP;
1046 di->fxState[1].size = mce->layout->sizeof_IP;
1060 static void complainIfUndefined ( MCEnv* mce, IRAtom* atom )
1072 tl_assert(isOriginalAtom(mce, atom));
1073 vatom = expr2vbits( mce, atom );
1074 tl_assert(isShadowAtom(mce, vatom));
1077 ty = typeOfIRExpr(mce->bb->tyenv, vatom);
1082 cond = mkPCastTo( mce, Ity_I1, vatom );
1116 setHelperAnns( mce, di );
1117 stmt( mce->bb, IRStmt_Dirty(di));
1126 newShadowTmp(mce, atom->Iex.RdTmp.tmp);
1127 assign(mce->bb, findShadowTmp(mce, atom->Iex.RdTmp.tmp),
1142 static Bool isAlwaysDefd ( MCEnv* mce, Int offset, Int size )
1150 for (i = 0; i < mce->layout->n_alwaysDefd; i++) {
1151 minoffD = mce->layout->alwaysDefd[i].offset;
1152 maxoffD = minoffD + mce->layout->alwaysDefd[i].size - 1;
1174 void do_shadow_PUT ( MCEnv* mce, Int offset,
1180 tl_assert(isOriginalAtom(mce, atom));
1181 vatom = expr2vbits( mce, atom );
1184 tl_assert(isShadowAtom(mce, vatom));
1187 ty = typeOfIRExpr(mce->bb->tyenv, vatom);
1189 if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) {
1192 /* complainIfUndefined(mce, atom); */
1195 stmt( mce->bb, IRStmt_Put( offset + mce->layout->total_sizeB, vatom ) );
1204 void do_shadow_PUTI ( MCEnv* mce,
1211 tl_assert(isOriginalAtom(mce,atom));
1212 vatom = expr2vbits( mce, atom );
1218 tl_assert(isOriginalAtom(mce,ix));
1219 complainIfUndefined(mce,ix);
1220 if (isAlwaysDefd(mce, descr->base, arrSize)) {
1223 /* complainIfUndefined(mce, atom); */
1228 = mkIRRegArray( descr->base + mce->layout->total_sizeB,
1230 stmt( mce->bb, IRStmt_PutI( mkIRPutI( new_descr, ix, bias, vatom ) ));
1239 IRExpr* shadow_GET ( MCEnv* mce, Int offset, IRType ty )
1243 if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) {
1249 return IRExpr_Get( offset + mce->layout->total_sizeB, tyS );
1258 IRExpr* shadow_GETI ( MCEnv* mce, IRRegArray* descr, IRAtom* ix, Int bias )
1264 tl_assert(isOriginalAtom(mce,ix));
1265 complainIfUndefined(mce,ix);
1266 if (isAlwaysDefd(mce, descr->base, arrSize)) {
1273 = mkIRRegArray( descr->base + mce->layout->total_sizeB,
1289 IRAtom* mkLazy2 ( MCEnv* mce, IRType finalVty, IRAtom* va1, IRAtom* va2 )
1293 tl_assert(isShadowAtom(mce,va1));
1294 tl_assert(isShadowAtom(mce,va2));
1295 at = mkPCastTo(mce, Ity_I32, va1);
1296 at = mkUifU(mce, Ity_I32, at, mkPCastTo(mce, Ity_I32, va2));
1297 at = mkPCastTo(mce, finalVty, at);
1308 IRAtom* mkLazyN ( MCEnv* mce,
1316 tl_assert(isOriginalAtom(mce, exprvec[i]));
1326 here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, exprvec[i]) );
1327 curr = mkUifU32(mce, here, curr);
1330 return mkPCastTo(mce, finalVtype, curr );
1341 IRAtom* expensiveAdd32 ( MCEnv* mce, IRAtom* qaa, IRAtom* qbb,
1348 tl_assert(isShadowAtom(mce,qaa));
1349 tl_assert(isShadowAtom(mce,qbb));
1350 tl_assert(isOriginalAtom(mce,aa));
1351 tl_assert(isOriginalAtom(mce,bb));
1363 a_min = assignNew(mce,ty,
1365 assignNew(mce,ty, unop(opNOT, qaa))));
1368 b_min = assignNew(mce,ty,
1370 assignNew(mce,ty, unop(opNOT, qbb))));
1373 a_max = assignNew(mce,ty, binop(opOR, aa, qaa));
1376 b_max = assignNew(mce,ty, binop(opOR, bb, qbb));
1380 assignNew(mce,ty,
1382 assignNew(mce,ty, binop(opOR, qaa, qbb)),
1383 assignNew(mce,ty,
1384 binop(opXOR, assignNew(mce,ty, binop(opADD, a_min, b_min)),
1385 assignNew(mce,ty, binop(opADD, a_max, b_max))
1399 static IRAtom* mkPCast8x16 ( MCEnv* mce, IRAtom* at )
1401 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ8x16, at));
1404 static IRAtom* mkPCast16x8 ( MCEnv* mce, IRAtom* at )
1406 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ16x8, at));
1409 static IRAtom* mkPCast32x4 ( MCEnv* mce, IRAtom* at )
1411 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ32x4, at));
1414 static IRAtom* mkPCast64x2 ( MCEnv* mce, IRAtom* at )
1416 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ64x2, at));
1457 IRAtom* binary32Fx4 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY )
1460 tl_assert(isShadowAtom(mce, vatomX));
1461 tl_assert(isShadowAtom(mce, vatomY));
1462 at = mkUifUV128(mce, vatomX, vatomY);
1463 at = assignNew(mce, Ity_V128, mkPCast32x4(mce, at));
1468 IRAtom* unary32Fx4 ( MCEnv* mce, IRAtom* vatomX )
1471 tl_assert(isShadowAtom(mce, vatomX));
1472 at = assignNew(mce, Ity_V128, mkPCast32x4(mce, vatomX));
1477 IRAtom* binary32F0x4 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY )
1480 tl_assert(isShadowAtom(mce, vatomX));
1481 tl_assert(isShadowAtom(mce, vatomY));
1482 at = mkUifUV128(mce, vatomX, vatomY);
1483 at = assignNew(mce, Ity_I32, unop(Iop_V128to32, at));
1484 at = mkPCastTo(mce, Ity_I32, at);
1485 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo32, vatomX, at));
1490 IRAtom* unary32F0x4 ( MCEnv* mce, IRAtom* vatomX )
1493 tl_assert(isShadowAtom(mce, vatomX));
1494 at = assignNew(mce, Ity_I32, unop(Iop_V128to32, vatomX));
1495 at = mkPCastTo(mce, Ity_I32, at);
1496 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo32, vatomX, at));
1503 IRAtom* binary64Fx2 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY )
1506 tl_assert(isShadowAtom(mce, vatomX));
1507 tl_assert(isShadowAtom(mce, vatomY));
1508 at = mkUifUV128(mce, vatomX, vatomY);
1509 at = assignNew(mce, Ity_V128, mkPCast64x2(mce, at));
1514 IRAtom* unary64Fx2 ( MCEnv* mce, IRAtom* vatomX )
1517 tl_assert(isShadowAtom(mce, vatomX));
1518 at = assignNew(mce, Ity_V128, mkPCast64x2(mce, vatomX));
1523 IRAtom* binary64F0x2 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY )
1526 tl_assert(isShadowAtom(mce, vatomX));
1527 tl_assert(isShadowAtom(mce, vatomY));
1528 at = mkUifUV128(mce, vatomX, vatomY);
1529 at = assignNew(mce, Ity_I64, unop(Iop_V128to64, at));
1530 at = mkPCastTo(mce, Ity_I64, at);
1531 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo64, vatomX, at));
1536 IRAtom* unary64F0x2 ( MCEnv* mce, IRAtom* vatomX )
1539 tl_assert(isShadowAtom(mce, vatomX));
1540 at = assignNew(mce, Ity_I64, unop(Iop_V128to64, vatomX));
1541 at = mkPCastTo(mce, Ity_I64, at);
1542 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo64, vatomX, at));
1575 IRAtom* vectorNarrowV128 ( MCEnv* mce, IROp narrow_op,
1586 tl_assert(isShadowAtom(mce,vatom1));
1587 tl_assert(isShadowAtom(mce,vatom2));
1588 at1 = assignNew(mce, Ity_V128, pcast(mce, vatom1));
1589 at2 = assignNew(mce, Ity_V128, pcast(mce, vatom2));
1590 at3 = assignNew(mce, Ity_V128, binop(narrow_op, at1, at2));
1599 IRAtom* binary8Ix16 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 )
1602 at = mkUifUV128(mce, vatom1, vatom2);
1603 at = mkPCast8x16(mce, at);
1608 IRAtom* binary16Ix8 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 )
1611 at = mkUifUV128(mce, vatom1, vatom2);
1612 at = mkPCast16x8(mce, at);
1617 IRAtom* binary32Ix4 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 )
1620 at = mkUifUV128(mce, vatom1, vatom2);
1621 at = mkPCast32x4(mce, at);
1626 IRAtom* binary64Ix2 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 )
1629 at = mkUifUV128(mce, vatom1, vatom2);
1630 at = mkPCast64x2(mce, at);
1640 IRAtom* expr2vbits_Binop ( MCEnv* mce,
1649 IRAtom* vatom1 = expr2vbits( mce, atom1 );
1650 IRAtom* vatom2 = expr2vbits( mce, atom2 );
1652 tl_assert(isOriginalAtom(mce,atom1));
1653 tl_assert(isOriginalAtom(mce,atom2));
1654 tl_assert(isShadowAtom(mce,vatom1));
1655 tl_assert(isShadowAtom(mce,vatom2));
1671 complainIfUndefined(mce, atom2);
1672 return assignNew(mce, Ity_V128, binop(op, vatom1, atom2));
1685 return binary8Ix16(mce, vatom1, vatom2);
1701 return binary16Ix8(mce, vatom1, vatom2);
1711 return binary32Ix4(mce, vatom1, vatom2);
1719 return binary64Ix2(mce, vatom1, vatom2);
1724 return vectorNarrowV128(mce, op, vatom1, vatom2);
1735 return binary64Fx2(mce, vatom1, vatom2);
1746 return binary64F0x2(mce, vatom1, vatom2);
1759 return binary32Fx4(mce, vatom1, vatom2);
1770 return binary32F0x4(mce, vatom1, vatom2);
1784 return assignNew(mce, Ity_V128, binop(op, vatom1, vatom2));
1793 return mkLazy2(mce, Ity_I64, vatom1, vatom2);
1800 return mkLazy2(mce, Ity_I32, vatom1, vatom2);
1804 return mkLazy2(mce, Ity_I16, vatom1, vatom2);
1815 return mkLazy2(mce, Ity_I64, vatom1, vatom2);
1818 return mkLazy2(mce, Ity_I32, vatom1, vatom2);
1824 return mkLazy2(mce, Ity_I64, vatom1, vatom2);
1827 return assignNew(mce, Ity_I32, binop(op, vatom1, vatom2));
1829 return assignNew(mce, Ity_I64, binop(op, vatom1, vatom2));
1833 IRAtom* vLo32 = mkLeft32(mce, mkUifU32(mce, vatom1,vatom2));
1834 IRAtom* vHi32 = mkPCastTo(mce, Ity_I32, vLo32);
1835 return assignNew(mce, Ity_I64, binop(Iop_32HLto64, vHi32, vLo32));
1840 IRAtom* vLo16 = mkLeft16(mce, mkUifU16(mce, vatom1,vatom2));
1841 IRAtom* vHi16 = mkPCastTo(mce, Ity_I16, vLo16);
1842 return assignNew(mce, Ity_I32, binop(Iop_16HLto32, vHi16, vLo16));
1847 IRAtom* vLo8 = mkLeft8(mce, mkUifU8(mce, vatom1,vatom2));
1848 IRAtom* vHi8 = mkPCastTo(mce, Ity_I8, vLo8);
1849 return assignNew(mce, Ity_I16, binop(Iop_8HLto16, vHi8, vLo8));
1854 return expensiveAdd32(mce, vatom1,vatom2, atom1,atom2);
1858 return mkLeft32(mce, mkUifU32(mce, vatom1,vatom2));
1863 return mkLeft16(mce, mkUifU16(mce, vatom1,vatom2));
1867 return mkLeft8(mce, mkUifU8(mce, vatom1,vatom2));
1872 return mkPCastTo(mce, Ity_I1, mkUifU32(mce, vatom1,vatom2));
1875 return mkPCastTo(mce, Ity_I1, mkUifU16(mce, vatom1,vatom2));
1878 return mkPCastTo(mce, Ity_I1, mkUifU8(mce, vatom1,vatom2));
1883 complainIfUndefined(mce, atom2);
1884 return assignNew(mce, Ity_I32, binop(op, vatom1, atom2));
1888 complainIfUndefined(mce, atom2);
1889 return assignNew(mce, Ity_I16, binop(op, vatom1, atom2));
1893 complainIfUndefined(mce, atom2);
1894 return assignNew(mce, Ity_I8, binop(op, vatom1, atom2));
1898 complainIfUndefined(mce, atom2);
1899 return assignNew(mce, Ity_I64, binop(op, vatom1, atom2));
1936 mce,
1938 difd(mce, uifu(mce, vatom1, vatom2),
1939 difd(mce, improve(mce, atom1, vatom1),
1940 improve(mce, atom2, vatom2) ) ) );
1943 return mkUifU8(mce, vatom1, vatom2);
1945 return mkUifU16(mce, vatom1, vatom2);
1947 return mkUifU32(mce, vatom1, vatom2);
1949 return mkUifU64(mce, vatom1, vatom2);
1951 return mkUifUV128(mce, vatom1, vatom2);
1961 IRExpr* expr2vbits_Unop ( MCEnv* mce, IROp op, IRAtom* atom )
1963 IRAtom* vatom = expr2vbits( mce, atom );
1964 tl_assert(isOriginalAtom(mce,atom));
1968 return unary64Fx2(mce, vatom);
1971 return unary64F0x2(mce, vatom);
1975 return unary32Fx4(mce, vatom);
1980 return unary32F0x4(mce, vatom);
1984 return assignNew(mce, Ity_V128, unop(op, vatom));
1995 return mkPCastTo(mce, Ity_I64, vatom);
1999 return mkPCastTo(mce, Ity_I32, vatom);
2005 return assignNew(mce, Ity_I64, unop(op, vatom));
2014 return assignNew(mce, Ity_I32, unop(op, vatom));
2020 return assignNew(mce, Ity_I16, unop(op, vatom));
2025 return assignNew(mce, Ity_I8, unop(op, vatom));
2028 return assignNew(mce, Ity_I1, unop(Iop_32to1, vatom));
2050 IRAtom* expr2vbits_LDle_WRK ( MCEnv* mce, IRType ty, IRAtom* addr, UInt bias )
2058 tl_assert(isOriginalAtom(mce,addr));
2062 complainIfUndefined( mce, addr );
2090 IRType tyAddr = mce->hWordTy;
2094 addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) );
2099 datavbits = newIRTemp(mce->bb->tyenv, ty);
2103 setHelperAnns( mce, di );
2104 stmt( mce->bb, IRStmt_Dirty(di) );
2111 IRAtom* expr2vbits_LDle ( MCEnv* mce, IRType ty, IRAtom* addr, UInt bias )
2119 return expr2vbits_LDle_WRK(mce, ty, addr, bias);
2121 v64lo = expr2vbits_LDle_WRK(mce, Ity_I64, addr, bias);
2122 v64hi = expr2vbits_LDle_WRK(mce, Ity_I64, addr, bias+8);
2123 return assignNew( mce,
2133 IRAtom* expr2vbits_ITE ( MCEnv* mce,
2143 tl_assert(isOriginalAtom(mce, cond));
2144 tl_assert(isOriginalAtom(mce, iftrue));
2145 tl_assert(isOriginalAtom(mce, iffalse));
2147 vbitsC = expr2vbits(mce, cond);
2148 vbits0 = expr2vbits(mce, iffalse);
2149 vbits1 = expr2vbits(mce, iftrue);
2150 ty = typeOfIRExpr(mce->bb->tyenv, vbits0);
2153 mkUifU(mce, ty, assignNew(mce, ty, IRExpr_ITE(cond, vbits1, vbits0)),
2154 mkPCastTo(mce, ty, vbitsC) );
2160 IRExpr* expr2vbits ( MCEnv* mce, IRExpr* e )
2165 return shadow_GET( mce, e->Iex.Get.offset, e->Iex.Get.ty );
2168 return shadow_GETI( mce, e->Iex.GetI.descr,
2172 return IRExpr_RdTmp( findShadowTmp(mce, e->Iex.RdTmp.tmp) );
2175 return definedOfType(shadowType(typeOfIRExpr(mce->bb->tyenv, e)));
2179 mce,
2185 return expr2vbits_Unop( mce, e->Iex.Unop.op, e->Iex.Unop.arg );
2188 return expr2vbits_LDle( mce, e->Iex.Load.ty,
2192 return mkLazyN( mce, e->Iex.CCall.args,
2197 return expr2vbits_ITE( mce, e->Iex.ITE.cond, e->Iex.ITE.iftrue,
2215 IRExpr* zwidenToHostWord ( MCEnv* mce, IRAtom* vatom )
2220 tl_assert(isShadowAtom(mce,vatom));
2222 ty = typeOfIRExpr(mce->bb->tyenv, vatom);
2223 tyH = mce->hWordTy;
2228 case Ity_I16: return assignNew(mce, tyH, unop(Iop_16Uto32, vatom));
2229 case Ity_I8: return assignNew(mce, tyH, unop(Iop_8Uto32, vatom));
2246 void do_shadow_STle ( MCEnv* mce,
2259 tyAddr = mce->hWordTy;
2270 tl_assert(isOriginalAtom(mce, data));
2272 vdata = expr2vbits( mce, data );
2277 tl_assert(isOriginalAtom(mce,addr));
2278 tl_assert(isShadowAtom(mce,vdata));
2280 ty = typeOfIRExpr(mce->bb->tyenv, vdata);
2284 complainIfUndefined( mce, addr );
2310 addrLo64 = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias0) );
2311 vdataLo64 = assignNew(mce, Ity_I64, unop(Iop_V128to64, vdata));
2317 addrHi64 = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias8) );
2318 vdataHi64 = assignNew(mce, Ity_I64, unop(Iop_V128HIto64, vdata));
2323 setHelperAnns( mce, diLo64 );
2324 setHelperAnns( mce, diHi64 );
2325 stmt( mce->bb, IRStmt_Dirty(diLo64) );
2326 stmt( mce->bb, IRStmt_Dirty(diHi64) );
2336 addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) );
2350 zwidenToHostWord( mce, vdata )));
2352 setHelperAnns( mce, di );
2353 stmt( mce->bb, IRStmt_Dirty(di) );
2375 void do_shadow_Dirty ( MCEnv* mce, IRDirty* d )
2383 complainIfUndefined(mce, d->guard);
2393 here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, d->args[i]) );
2394 curr = mkUifU32(mce, here, curr);
2405 if (isAlwaysDefd(mce, d->fxState[i].offset, d->fxState[i].size )) {
2424 src = assignNew( mce, tySrc,
2425 shadow_GET(mce, gOff, tySrc ) );
2426 here = mkPCastTo( mce, Ity_I32, src );
2427 curr = mkUifU32(mce, here, curr);
2444 complainIfUndefined(mce, d->mAddr);
2446 tyAddr = typeOfIRExpr(mce->bb->tyenv, d->mAddr);
2448 tl_assert(tyAddr == mce->hWordTy); /* not really right */
2458 mce, Ity_I32,
2459 expr2vbits_LDle ( mce, Ity_I32,
2462 curr = mkUifU32(mce, here, curr);
2468 mce, Ity_I32,
2469 expr2vbits_LDle ( mce, Ity_I16,
2472 curr = mkUifU32(mce, here, curr);
2484 dst = findShadowTmp(mce, d->tmp);
2485 tyDst = typeOfIRTemp(mce->bb->tyenv, d->tmp);
2486 assign( mce->bb, dst, mkPCastTo( mce, tyDst, curr) );
2495 if (isAlwaysDefd(mce, d->fxState[i].offset, d->fxState[i].size ))
2509 do_shadow_PUT( mce, gOff,
2511 mkPCastTo( mce, tyDst, curr ) );
2523 do_shadow_STle( mce, d->mAddr, d->mSize - toDo,
2525 mkPCastTo( mce, Ity_I32, curr ) );
2530 do_shadow_STle( mce, d->mAddr, d->mSize - toDo,
2532 mkPCastTo( mce, Ity_I16, curr ) );
2624 MCEnv mce;
2634 mce.bb = bb;
2635 mce.layout = layout;
2636 mce.n_originalTmps = bb->tyenv->types_used;
2637 mce.hWordTy = hWordTy;
2638 mce.tmpMap = LibVEX_Alloc(mce.n_originalTmps * sizeof(IRTemp));
2639 for (i = 0; i < mce.n_originalTmps; i++)
2640 mce.tmpMap[i] = IRTemp_INVALID;
2670 assign( bb, findShadowTmp(&mce, st->Ist.WrTmp.tmp),
2671 expr2vbits( &mce, st->Ist.WrTmp.data) );
2675 do_shadow_PUT( &mce,
2682 do_shadow_PUTI( &mce,
2690 do_shadow_STle( &mce, st->Ist.Store.addr, 0/* addr bias */,
2697 complainIfUndefined( &mce, st->Ist.Exit.guard );
2701 do_shadow_Dirty( &mce, st->Ist.Dirty.details );
2738 complainIfUndefined( &mce, bb->next );