Home | History | Annotate | Download | only in x64

Lines Matching refs:dst

268     Register dst,
286 leap(dst, FieldOperand(object, offset));
289 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
295 RecordWrite(object, dst, value, save_fp, remembered_set_action,
304 Move(dst, kZapValue, Assembler::RelocInfoNone());
327 Register dst = index;
328 leap(dst, Operand(object, index, times_pointer_size,
331 RecordWrite(object, dst, value, save_fp, remembered_set_action,
347 Register dst,
351 DCHECK(!object.is(dst));
352 DCHECK(!map.is(dst));
380 leap(dst, FieldOperand(object, HeapObject::kMapOffset));
397 RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
410 Move(dst, kZapValue, Assembler::RelocInfoNone());
527 const Register dst = scratch;
528 leap(dst, FieldOperand(js_function, offset));
542 movp(arg_reg_2, dst); // rdx gets r15.
548 movp(arg_reg_2, dst); // rsi gets r15.
796 void MacroAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
799 vcvtss2sd(dst, src, src);
801 cvtss2sd(dst, src);
806 void MacroAssembler::Cvtss2sd(XMMRegister dst, const Operand& src) {
809 vcvtss2sd(dst, dst, src);
811 cvtss2sd(dst, src);
816 void MacroAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
819 vcvtsd2ss(dst, src, src);
821 cvtsd2ss(dst, src);
826 void MacroAssembler::Cvtsd2ss(XMMRegister dst, const Operand& src) {
829 vcvtsd2ss(dst, dst, src);
831 cvtsd2ss(dst, src);
836 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
839 vxorpd(dst, dst, dst);
840 vcvtlsi2sd(dst, dst, src);
842 xorpd(dst, dst);
843 cvtlsi2sd(dst, src);
848 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
851 vxorpd(dst, dst, dst);
852 vcvtlsi2sd(dst, dst, src);
854 xorpd(dst, dst);
855 cvtlsi2sd(dst, src);
860 void MacroAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
863 vxorps(dst, dst, dst);
864 vcvtlsi2ss(dst, dst, src);
866 xorps(dst, dst);
867 cvtlsi2ss(dst, src);
872 void MacroAssembler::Cvtlsi2ss(XMMRegister dst, const Operand& src) {
875 vxorps(dst, dst, dst);
876 vcvtlsi2ss(dst, dst, src);
878 xorps(dst, dst);
879 cvtlsi2ss(dst, src);
884 void MacroAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
887 vxorps(dst, dst, dst);
888 vcvtqsi2ss(dst, dst, src);
890 xorps(dst, dst);
891 cvtqsi2ss(dst, src);
896 void MacroAssembler::Cvtqsi2ss(XMMRegister dst, const Operand& src) {
899 vxorps(dst, dst, dst);
900 vcvtqsi2ss(dst, dst, src);
902 xorps(dst, dst);
903 cvtqsi2ss(dst, src);
908 void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
911 vxorpd(dst, dst, dst);
912 vcvtqsi2sd(dst, dst, src);
914 xorpd(dst, dst);
915 cvtqsi2sd(dst, src);
920 void MacroAssembler::Cvtqsi2sd(XMMRegister dst, const Operand& src) {
923 vxorpd(dst, dst, dst);
924 vcvtqsi2sd(dst, dst, src);
926 xorpd(dst, dst);
927 cvtqsi2sd(dst, src);
932 void MacroAssembler::Cvtqui2ss(XMMRegister dst, Register src, Register tmp) {
937 Cvtqsi2ss(dst, src);
945 Cvtqsi2ss(dst, src);
946 addss(dst, dst);
951 void MacroAssembler::Cvtqui2sd(XMMRegister dst, Register src, Register tmp) {
956 Cvtqsi2sd(dst, src);
963 Cvtqsi2sd(dst, src);
964 addsd(dst, dst);
969 void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) {
972 vcvtsd2si(dst, src);
974 cvtsd2si(dst, src);
979 void MacroAssembler::Cvttss2si(Register dst, XMMRegister src) {
982 vcvttss2si(dst, src);
984 cvttss2si(dst, src);
989 void MacroAssembler::Cvttss2si(Register dst, const Operand& src) {
992 vcvttss2si(dst, src);
994 cvttss2si(dst, src);
999 void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) {
1002 vcvttsd2si(dst, src);
1004 cvttsd2si(dst, src);
1009 void MacroAssembler::Cvttsd2si(Register dst, const Operand& src) {
1012 vcvttsd2si(dst, src);
1014 cvttsd2si(dst, src);
1019 void MacroAssembler::Cvttss2siq(Register dst, XMMRegister src) {
1022 vcvttss2siq(dst, src);
1024 cvttss2siq(dst, src);
1029 void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) {
1032 vcvttss2siq(dst, src);
1034 cvttss2siq(dst, src);
1039 void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
1042 vcvttsd2siq(dst, src);
1044 cvttsd2siq(dst, src);
1049 void MacroAssembler::Cvttsd2siq(Register dst, const Operand& src) {
1052 vcvttsd2siq(dst, src);
1054 cvttsd2siq(dst, src);
1059 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
1062 movsxbq(dst, src);
1064 movzxbl(dst, src);
1066 movsxwq(dst, src);
1068 movzxwl(dst, src);
1070 movl(dst, src);
1072 movp(dst, src);
1077 void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
1080 movb(dst, src);
1082 movw(dst, src);
1084 movl(dst, src);
1091 movp(dst, src);
1096 void MacroAssembler::Set(Register dst, int64_t x) {
1098 xorl(dst, dst);
1100 movl(dst, Immediate(static_cast<uint32_t>(x)));
1102 movq(dst, Immediate(static_cast<int32_t>(x)));
1104 movq(dst, x);
1108 void MacroAssembler::Set(const Operand& dst, intptr_t x) {
1111 movp(dst, Immediate(static_cast<int32_t>(x)));
1114 movp(dst, kScratchRegister);
1117 movp(dst, Immediate(static_cast<int32_t>(x)));
1131 void MacroAssembler::SafeMove(Register dst, Smi* src) {
1132 DCHECK(!dst.is(kScratchRegister));
1136 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1138 xorp(dst, kScratchRegister);
1142 movp(dst, Immediate(value ^ jit_cookie()));
1143 xorp(dst, Immediate(jit_cookie()));
1146 Move(dst, src);
1182 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1186 xorl(dst, dst);
1188 Move(dst, source, Assembler::RelocInfoNone());
1193 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1195 if (!dst.is(src)) {
1196 movl(dst, src);
1198 shlp(dst, Immediate(kSmiShift));
1202 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1204 testb(dst, Immediate(0x01));
1213 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1217 movp(dst, kScratchRegister);
1222 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1225 if (dst.is(src)) {
1226 addl(dst, Immediate(constant));
1228 leal(dst, Operand(src, constant));
1230 shlp(dst, Immediate(kSmiShift));
1234 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1236 if (!dst.is(src)) {
1237 movp(dst, src);
1241 shrp(dst, Immediate(kSmiShift));
1244 sarl(dst, Immediate(kSmiShift));
1249 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1251 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1254 movl(dst, src);
1255 sarl(dst, Immediate(kSmiShift));
1260 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1262 if (!dst.is(src)) {
1263 movp(dst, src);
1265 sarp(dst, Immediate(kSmiShift));
1268 movsxlq(dst, dst);
1273 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1275 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1278 movp(dst, src);
1279 SmiToInteger64(dst, dst);
1297 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1298 AssertSmi(dst);
1299 Cmp(dst, src);
1303 void MacroAssembler::Cmp(Register dst, Smi* src) {
1304 DCHECK(!dst.is(kScratchRegister));
1306 testp(dst, dst);
1309 cmpp(dst, constant_reg);
1314 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
1315 AssertSmi(dst);
1317 cmpp(dst, src);
1321 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
1322 AssertSmi(dst);
1324 cmpp(dst, src);
1328 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1329 AssertSmi(dst);
1331 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1334 cmpl(dst, Immediate(src));
1339 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1342 DCHECK(!dst.AddressUsesRegister(smi_reg));
1343 cmpp(dst, smi_reg);
1347 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1349 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1352 SmiToInteger32(kScratchRegister, dst);
1358 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1364 SmiToInteger64(dst, src);
1367 if (!dst.is(src)) {
1368 movp(dst, src);
1371 sarp(dst, Immediate(kSmiShift - power));
1373 shlp(dst, Immediate(power - kSmiShift));
1378 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1382 if (dst.is(src)) {
1383 shrp(dst, Immediate(power + kSmiShift));
1390 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1393 if (dst.is(src1) || dst.is(src2)) {
1399 movp(dst, kScratchRegister);
1401 movp(dst, src1);
1402 orp(dst, src2);
1403 JumpIfNotSmi(dst, on_not_smis, near_jump);
1508 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1509 if (dst.is(src)) {
1510 andl(dst, Immediate(kSmiTagMask));
1512 movl(dst, Immediate(kSmiTagMask));
1513 andl(dst, src);
1518 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1519 if (!(src.AddressUsesRegister(dst))) {
1520 movl(dst, Immediate(kSmiTagMask));
1521 andl(dst, src);
1523 movl(dst, src);
1524 andl(dst, Immediate(kSmiTagMask));
1617 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1619 if (!dst.is(src)) {
1620 movp(dst, src);
1623 } else if (dst.is(src)) {
1624 DCHECK(!dst.is(kScratchRegister));
1626 addp(dst, constant_reg);
1628 LoadSmiConstant(dst, constant);
1629 addp(dst, src);
1634 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1637 addl(Operand(dst, kSmiShift / kBitsPerByte),
1641 addp(dst, Immediate(constant));
1647 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant,
1652 if (!dst.is(src)) {
1653 movp(dst, src);
1655 } else if (dst.is(src)) {
1656 DCHECK(!dst.is(kScratchRegister));
1658 addp(dst, kScratchRegister);
1662 subp(dst, kScratchRegister);
1667 subp(dst, kScratchRegister);
1680 LoadSmiConstant(dst, constant);
1681 addp(dst, src);
1687 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1689 if (!dst.is(src)) {
1690 movp(dst, src);
1692 } else if (dst.is(src)) {
1693 DCHECK(!dst.is(kScratchRegister));
1695 subp(dst, constant_reg);
1698 LoadSmiConstant(dst, constant);
1701 addp(dst, src);
1704 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1705 addp(dst, src);
1711 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant,
1716 if (!dst.is(src)) {
1717 movp(dst, src);
1719 } else if (dst.is(src)) {
1720 DCHECK(!dst.is(kScratchRegister));
1722 subp(dst, kScratchRegister);
1726 addp(dst, kScratchRegister);
1731 addp(dst, kScratchRegister);
1745 DCHECK(!dst.is(kScratchRegister));
1746 movp(dst, src);
1748 subp(dst, kScratchRegister);
1752 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1753 addp(dst, src);
1760 void MacroAssembler::SmiNeg(Register dst,
1764 if (dst.is(src)) {
1765 DCHECK(!dst.is(kScratchRegister));
1767 negp(dst); // Low 32 bits are retained as zero by negation.
1769 cmpp(dst, kScratchRegister);
1773 movp(dst, src);
1774 negp(dst);
1775 cmpp(dst, src);
1784 Register dst,
1789 if (dst.is(src1)) {
1791 masm->addp(dst, src2);
1794 masm->subp(dst, src2);
1798 masm->movp(dst, src1);
1799 masm->addp(dst, src2);
1805 void MacroAssembler::SmiAdd(Register dst,
1811 DCHECK(!dst.is(src2));
1812 SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1816 void MacroAssembler::SmiAdd(Register dst,
1822 DCHECK(!src2.AddressUsesRegister(dst));
1823 SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1827 void MacroAssembler::SmiAdd(Register dst,
1832 if (!dst.is(src1)) {
1838 leap(dst, Operand(src1, src2, times_1, 0));
1840 addp(dst, src2);
1848 Register dst,
1853 if (dst.is(src1)) {
1855 masm->subp(dst, src2);
1858 masm->addp(dst, src2);
1862 masm->movp(dst, src1);
1863 masm->subp(dst, src2);
1869 void MacroAssembler::SmiSub(Register dst,
1875 DCHECK(!dst.is(src2));
1876 SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1880 void MacroAssembler::SmiSub(Register dst,
1886 DCHECK(!src2.AddressUsesRegister(dst));
1887 SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1893 Register dst,
1898 if (!dst.is(src1)) {
1899 masm->movp(dst, src1);
1901 masm->subp(dst, src2);
1906 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1907 DCHECK(!dst.is(src2));
1908 SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
1912 void MacroAssembler::SmiSub(Register dst,
1915 SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
1919 void MacroAssembler::SmiMul(Register dst,
1924 DCHECK(!dst.is(src2));
1925 DCHECK(!dst.is(kScratchRegister));
1929 if (dst.is(src1)) {
1932 SmiToInteger64(dst, src1);
1933 imulp(dst, src2);
1939 testp(dst, dst);
1942 movp(dst, kScratchRegister);
1943 xorp(dst, src2);
1952 Set(dst, 0);
1956 SmiToInteger64(dst, src1);
1957 imulp(dst, src2);
1962 testp(dst, dst);
1974 void MacroAssembler::SmiDiv(Register dst,
1981 DCHECK(!dst.is(kScratchRegister));
2030 if (!dst.is(src1) && src1.is(rax)) {
2033 Integer32ToSmi(dst, rax);
2037 void MacroAssembler::SmiMod(Register dst,
2042 DCHECK(!dst.is(kScratchRegister));
2089 Integer32ToSmi(dst, rdx);
2093 void MacroAssembler::SmiNot(Register dst, Register src) {
2094 DCHECK(!dst.is(kScratchRegister));
2104 if (dst.is(src)) {
2105 xorp(dst, kScratchRegister);
2107 leap(dst, Operand(src, kScratchRegister, times_1, 0));
2109 notp(dst);
2113 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
2114 DCHECK(!dst.is(src2));
2115 if (!dst.is(src1)) {
2116 movp(dst, src1);
2118 andp(dst, src2);
2122 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2124 Set(dst, 0);
2125 } else if (dst.is(src)) {
2126 DCHECK(!dst.is(kScratchRegister));
2128 andp(dst, constant_reg);
2130 LoadSmiConstant(dst, constant);
2131 andp(dst, src);
2136 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2137 if (!dst.is(src1)) {
2139 movp(dst, src1);
2141 orp(dst, src2);
2145 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2146 if (dst.is(src)) {
2147 DCHECK(!dst.is(kScratchRegister));
2149 orp(dst, constant_reg);
2151 LoadSmiConstant(dst, constant);
2152 orp(dst, src);
2157 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2158 if (!dst.is(src1)) {
2160 movp(dst, src1);
2162 xorp(dst, src2);
2166 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2167 if (dst.is(src)) {
2168 DCHECK(!dst.is(kScratchRegister));
2170 xorp(dst, constant_reg);
2172 LoadSmiConstant(dst, constant);
2173 xorp(dst, src);
2178 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2183 if (dst.is(src)) {
2184 sarp(dst, Immediate(shift_value + kSmiShift));
2185 shlp(dst, Immediate(kSmiShift));
2193 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2199 if (!dst.is(src)) {
2200 movp(dst, src);
2204 shlq(dst, Immediate(shift_value & 0x1f));
2208 if (dst.is(src)) {
2211 SmiToInteger32(dst, src);
2212 shll(dst, Immediate(shift_value));
2213 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2214 Integer32ToSmi(dst, dst);
2221 Register dst, Register src, int shift_value,
2224 if (dst.is(src)) {
2232 movp(dst, src);
2233 shrp(dst, Immediate(shift_value + kSmiShift));
2234 shlp(dst, Immediate(kSmiShift));
2237 SmiToInteger32(dst, src);
2238 shrp(dst, Immediate(shift_value));
2239 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2240 Integer32ToSmi(dst, dst);
2246 void MacroAssembler::SmiShiftLeft(Register dst,
2252 DCHECK(!dst.is(rcx));
2253 if (!dst.is(src1)) {
2254 movp(dst, src1);
2260 shlq_cl(dst);
2263 DCHECK(!dst.is(kScratchRegister));
2266 DCHECK(!dst.is(src2));
2267 DCHECK(!dst.is(rcx));
2272 if (dst.is(src1)) {
2276 SmiToInteger32(dst, src1);
2278 shll_cl(dst
2279 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2280 // As src1 or src2 could not be dst, we do not need to restore them for
2281 // clobbering dst.
2291 Integer32ToSmi(dst, dst);
2297 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2302 DCHECK(!dst.is(kScratchRegister));
2305 DCHECK(!dst.is(src2));
2306 DCHECK(!dst.is(rcx));
2310 if (dst.is(src1)) {
2314 SmiToInteger32(dst, src1);
2316 shrl_cl(dst);
2317 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2318 // As src1 or src2 could not be dst, we do not need to restore them for
2319 // clobbering dst.
2329 Integer32ToSmi(dst, dst);
2334 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2337 DCHECK(!dst.is(kScratchRegister));
2340 DCHECK(!dst.is(rcx));
2343 if (!dst.is(src1)) {
2344 movp(dst, src1);
2346 SmiToInteger32(dst, dst);
2347 sarl_cl(dst);
2348 Integer32ToSmi(dst, dst);
2352 void MacroAssembler::SelectNonSmi(Register dst,
2357 DCHECK(!dst.is(kScratchRegister));
2360 DCHECK(!dst.is(src1));
2361 DCHECK(!dst.is(src2));
2380 movp(dst, src1);
2381 xorp(dst, src2);
2382 andp(dst, kScratchRegister);
2383 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2384 xorp(dst, src1);
2385 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2389 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2396 if (!dst.is(src)) {
2397 movp(dst, src);
2400 sarp(dst, Immediate(kSmiShift - shift));
2402 shlp(dst, Immediate(shift - kSmiShift));
2404 return SmiIndex(dst, times_1);
2408 if (!dst.is(src)) {
2409 movp(dst, src);
2413 movsxlq(dst, dst);
2415 sarq(dst, Immediate(kSmiShift));
2416 return SmiIndex(dst, times_1);
2418 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2423 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2429 if (!dst.is(src)) {
2430 movp(dst, src);
2432 negp(dst);
2434 sarp(dst, Immediate(kSmiShift - shift));
2436 shlp(dst, Immediate(shift - kSmiShift));
2438 return SmiIndex(dst, times_1);
2442 if (!dst.is(src)) {
2443 movp(dst, src);
2445 negq(dst);
2447 sarq(dst, Immediate(kSmiShift));
2448 return SmiIndex(dst, times_1);
2450 return SmiIndex(dst, static_cast<ScaleFactor>(shift - 1));
2455 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2458 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2462 addl(dst, kScratchRegister);
2500 void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2501 DCHECK(!dst.is(scratch));
2505 Pop(dst);
2506 shrp(dst, Immediate(kSmiShift));
2508 shlp(dst, Immediate(kPointerSize * kBitsPerByte - kSmiShift));
2509 orp(dst, scratch);
2626 void MacroAssembler::Move(Register dst, Register src) {
2627 if (!dst.is(src)) {
2628 movp(dst, src);
2633 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2636 Move(dst, Smi::cast(*source));
2638 MoveHeapObject(dst, source);
2643 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
2646 Move(dst, Smi::cast(*source));
2649 movp(dst, kScratchRegister);
2654 void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2656 Xorpd(dst, dst);
2661 Pcmpeqd(dst, dst);
2664 Movq(dst, kScratchRegister);
2670 void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
2672 Xorpd(dst, dst);
2679 Pcmpeqd(dst, dst);
2681 Pcmpeqd(dst, dst);
2682 Psllq(dst, ntz);
2684 Pcmpeqd(dst, dst);
2685 Psrlq(dst, nlz);
2690 Move(dst, lower);
2693 Movq(dst, kScratchRegister);
2700 void MacroAssembler::Movaps(XMMRegister dst, XMMRegister src) {
2703 vmovaps(dst, src);
2705 movaps(dst, src);
2709 void MacroAssembler::Movups(XMMRegister dst, XMMRegister src) {
2712 vmovups(dst, src);
2714 movups(dst, src);
2718 void MacroAssembler::Movups(XMMRegister dst, const Operand& src) {
2721 vmovups(dst, src);
2723 movups(dst, src);
2727 void MacroAssembler::Movups(const Operand& dst, XMMRegister src) {
2730 vmovups(dst, src);
2732 movups(dst, src);
2736 void MacroAssembler::Movapd(XMMRegister dst, XMMRegister src) {
2739 vmovapd(dst, src);
2741 movapd(dst, src);
2745 void MacroAssembler::Movupd(XMMRegister dst, const Operand& src) {
2748 vmovupd(dst, src);
2750 movupd(dst, src);
2754 void MacroAssembler::Movupd(const Operand& dst, XMMRegister src) {
2757 vmovupd(dst, src);
2759 movupd(dst, src);
2763 void MacroAssembler::Movsd(XMMRegister dst, XMMRegister src) {
2766 vmovsd(dst, dst, src);
2768 movsd(dst, src);
2773 void MacroAssembler::Movsd(XMMRegister dst, const Operand& src) {
2776 vmovsd(dst, src);
2778 movsd(dst, src);
2783 void MacroAssembler::Movsd(const Operand& dst, XMMRegister src) {
2786 vmovsd(dst, src);
2788 movsd(dst, src);
2793 void MacroAssembler::Movss(XMMRegister dst, XMMRegister src) {
2796 vmovss(dst, dst, src);
2798 movss(dst, src);
2803 void MacroAssembler::Movss(XMMRegister dst, const Operand& src) {
2806 vmovss(dst, src);
2808 movss(dst, src);
2813 void MacroAssembler::Movss(const Operand& dst, XMMRegister src) {
2816 vmovss(dst, src);
2818 movss(dst, src);
2823 void MacroAssembler::Movd(XMMRegister dst, Register src) {
2826 vmovd(dst, src);
2828 movd(dst, src);
2833 void MacroAssembler::Movd(XMMRegister dst, const Operand& src) {
2836 vmovd(dst, src);
2838 movd(dst, src);
2843 void MacroAssembler::Movd(Register dst, XMMRegister src) {
2846 vmovd(dst, src);
2848 movd(dst, src);
2853 void MacroAssembler::Movq(XMMRegister dst, Register src) {
2856 vmovq(dst, src);
2858 movq(dst, src);
2863 void MacroAssembler::Movq(Register dst, XMMRegister src) {
2866 vmovq(dst, src);
2868 movq(dst, src);
2872 void MacroAssembler::Movmskps(Register dst, XMMRegister src) {
2875 vmovmskps(dst, src);
2877 movmskps(dst, src);
2881 void MacroAssembler::Movmskpd(Register dst, XMMRegister src) {
2884 vmovmskpd(dst, src);
2886 movmskpd(dst, src);
2890 void MacroAssembler::Xorps(XMMRegister dst, XMMRegister src) {
2893 vxorps(dst, dst, src);
2895 xorps(dst, src);
2899 void MacroAssembler::Xorps(XMMRegister dst, const Operand& src) {
2902 vxorps(dst, dst, src);
2904 xorps(dst, src);
2908 void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src,
2912 vroundss(dst, dst, src, mode);
2914 roundss(dst, src, mode);
2919 void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src,
2923 vroundsd(dst, dst, src, mode);
2925 roundsd(dst, src, mode);
2930 void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
2933 vsqrtsd(dst, dst, src);
2935 sqrtsd(dst, src);
2940 void MacroAssembler::Sqrtsd(XMMRegister dst, const Operand& src) {
2943 vsqrtsd(dst, dst, src);
2945 sqrtsd(dst, src);
2991 void MacroAssembler::Absps(XMMRegister dst) {
2992 Andps(dst,
2996 void MacroAssembler::Negps(XMMRegister dst) {
2997 Xorps(dst,
3001 void MacroAssembler::Abspd(XMMRegister dst) {
3002 Andps(dst,
3006 void MacroAssembler::Negpd(XMMRegister dst) {
3007 Xorps(dst,
3011 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
3014 Cmp(dst, Smi::cast(*source));
3017 cmpp(dst, kScratchRegister);
3022 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
3025 Cmp(dst, Smi::cast(*source));
3028 cmpp(dst, kScratchRegister);
3051 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
3052 if (dst.is(rax)) {
3056 Move(dst, cell, RelocInfo::CELL);
3057 movp(dst, Operand(dst, 0));
3156 void MacroAssembler::Pop(Register dst) {
3158 popq(dst);
3161 DCHECK(dst.code() != rbp.code());
3162 movp(dst, Operand(rsp, 0));
3168 void MacroAssembler::Pop(const Operand& dst) {
3170 popq(dst);
3172 Register scratch = dst.AddressUsesRegister(kScratchRegister)
3175 movp(dst, scratch);
3185 void MacroAssembler::PopQuad(const Operand& dst) {
3187 popq(dst);
3190 movp(dst, kScratchRegister);
3195 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
3202 movsxlq(dst, FieldOperand(base, offset));
3204 movp(dst, FieldOperand(base, offset));
3205 SmiToInteger32(dst, dst);
3310 void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
3312 Movd(dst, src);
3317 pextrd(dst, src, imm8);
3321 movq(dst, src);
3322 shrq(dst, Immediate(32));
3326 void MacroAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
3329 pinsrd(dst, src, imm8);
3334 punpckldq(dst, kScratchDoubleReg);
3337 Movss(dst, kScratchDoubleReg);
3342 void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
3346 pinsrd(dst, src, imm8);
3351 punpckldq(dst, kScratchDoubleReg);
3354 Movss(dst, kScratchDoubleReg);
3359 void MacroAssembler::Lzcntl(Register dst, Register src) {
3362 lzcntl(dst, src);
3366 bsrl(dst, src);
3368 Set(dst, 63); // 63^31 == 32
3370 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3374 void MacroAssembler::Lzcntl(Register dst, const Operand& src) {
3377 lzcntl(dst, src);
3381 bsrl(dst, src);
3383 Set(dst, 63); // 63^31 == 32
3385 xorl(dst, Immediate(31)); // for x in [0..31], 31^x == 31 - x
3389 void MacroAssembler::Lzcntq(Register dst, Register src) {
3392 lzcntq(dst, src);
3396 bsrq(dst, src);
3398 Set(dst, 127); // 127^63 == 64
3400 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3404 void MacroAssembler::Lzcntq(Register dst, const Operand& src) {
3407 lzcntq(dst, src);
3411 bsrq(dst, src);
3413 Set(dst, 127); // 127^63 == 64
3415 xorl(dst, Immediate(63)); // for x in [0..63], 63^x == 63 - x
3419 void MacroAssembler::Tzcntq(Register dst, Register src) {
3422 tzcntq(dst, src);
3426 bsfq(dst, src);
3429 Set(dst, 64);
3434 void MacroAssembler::Tzcntq(Register dst, const Operand& src) {
3437 tzcntq(dst, src);
3441 bsfq(dst, src);
3444 Set(dst, 64);
3449 void MacroAssembler::Tzcntl(Register dst, Register src) {
3452 tzcntl(dst, src);
3456 bsfl(dst, src);
3458 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3463 void MacroAssembler::Tzcntl(Register dst, const Operand& src) {
3466 tzcntl(dst, src);
3470 bsfl(dst, src);
3472 Set(dst, 32); // The result of tzcnt is 32 if src = 0.
3477 void MacroAssembler::Popcntl(Register dst, Register src) {
3480 popcntl(dst, src);
3487 void MacroAssembler::Popcntl(Register dst, const Operand& src) {
3490 popcntl(dst, src);
3497 void MacroAssembler::Popcntq(Register dst, Register src) {
3500 popcntq(dst, src);
3507 void MacroAssembler::Popcntq(Register dst, const Operand& src) {
3510 popcntq(dst, src);
3589 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3591 movp(SafepointRegisterSlot(dst), imm);
3595 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
3596 movp(SafepointRegisterSlot(dst), src);
3600 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
3601 movp(dst, SafepointRegisterSlot(src));
3721 void MacroAssembler::LoadUint32(XMMRegister dst,
3727 Cvtqsi2sd(dst, src);
3785 Label* minus_zero, Label::Distance dst) {
3789 j(not_equal, lost_precision, dst);
3790 j(parity_even, is_nan, dst); // NaN.
3802 j(not_zero, minus_zero, dst);
3814 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
3815 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3816 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3820 void MacroAssembler::EnumLength(Register dst, Register map) {
3822 movl(dst, FieldOperand(map, Map::kBitField3Offset));
3823 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3824 Integer32ToSmi(dst, dst);
3828 void MacroAssembler::LoadAccessor(Register dst, Register holder,
3831 movp(dst, FieldOperand(holder, HeapObject::kMapOffset));
3832 LoadInstanceDescriptors(dst, dst);
3833 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
3836 movp(dst, FieldOperand(dst, offset));
4909 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4912 movp(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4914 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4920 movp(dst, rsi);
4928 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4941 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
4942 movp(dst, NativeContextOperand());
4943 movp(dst, ContextOperand(dst, index));