Lines Matching refs:VIXL_ASSERT
45 VIXL_ASSERT((deletion_policy == kManuallyDeleted) || (literal_pool_ != NULL));
61 VIXL_ASSERT((offset >= 0) && (offset <= GetBuffer()->GetCursorOffset()));
62 VIXL_ASSERT(offset % kInstructionSize == 0);
111 VIXL_ASSERT(!literal->IsPlaced());
120 VIXL_ASSERT(ldr->IsLoadLiteral());
123 VIXL_ASSERT(imm19 <= 0);
142 VIXL_ASSERT(literal->GetSize() == kQRegSizeInBytes);
152 VIXL_ASSERT(IsWordAligned(GetCursorOffset()));
159 VIXL_ASSERT((literal->GetOffset() - GetCursorOffset()) <= 0);
181 VIXL_ASSERT(xn.Is64Bits());
187 VIXL_ASSERT(xn.Is64Bits());
193 VIXL_ASSERT(xn.Is64Bits());
208 VIXL_ASSERT(Instruction::IsValidImmPCOffset(UncondBranchType, offset));
215 VIXL_ASSERT(Instruction::IsValidImmPCOffset(CondBranchType, offset));
225 VIXL_ASSERT(Instruction::IsValidImmPCOffset(UncondBranchType, offset));
237 VIXL_ASSERT(Instruction::IsValidImmPCOffset(CompareBranchType, offset));
249 VIXL_ASSERT(Instruction::IsValidImmPCOffset(CompareBranchType, offset));
258 VIXL_ASSERT(vd.Is16B() || vd.Is8B());
259 VIXL_ASSERT(vn.Is16B());
260 VIXL_ASSERT(AreSameFormat(vd, vm));
277 VIXL_ASSERT(AreSameFormat(vn, vn2));
278 VIXL_ASSERT(AreConsecutive(vn, vn2));
289 VIXL_ASSERT(AreSameFormat(vn, vn2, vn3));
290 VIXL_ASSERT(AreConsecutive(vn, vn2, vn3));
302 VIXL_ASSERT(AreSameFormat(vn, vn2, vn3, vn4));
303 VIXL_ASSERT(AreConsecutive(vn, vn2, vn3, vn4));
320 VIXL_ASSERT(AreSameFormat(vn, vn2));
321 VIXL_ASSERT(AreConsecutive(vn, vn2));
332 VIXL_ASSERT(AreSameFormat(vn, vn2, vn3));
333 VIXL_ASSERT(AreConsecutive(vn, vn2, vn3));
345 VIXL_ASSERT(AreSameFormat(vn, vn2, vn3, vn4));
346 VIXL_ASSERT(AreConsecutive(vn, vn2, vn3, vn4));
352 VIXL_ASSERT(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSize)));
359 VIXL_ASSERT(Instruction::IsValidImmPCOffset(TestBranchType, offset));
365 VIXL_ASSERT(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSize)));
372 VIXL_ASSERT(Instruction::IsValidImmPCOffset(TestBranchType, offset));
378 VIXL_ASSERT(xd.Is64Bits());
389 VIXL_ASSERT(xd.Is64Bits());
395 VIXL_ASSERT(AllowPageOffsetDependentCode());
557 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
558 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
566 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
567 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
575 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
576 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
584 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
585 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
595 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
606 VIXL_ASSERT(rd.Is64Bits() || rn.Is32Bits());
617 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
628 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
629 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
669 VIXL_ASSERT((cond != al) && (cond != nv));
676 VIXL_ASSERT((cond != al) && (cond != nv));
683 VIXL_ASSERT((cond != al) && (cond != nv));
689 VIXL_ASSERT((cond != al) && (cond != nv));
695 VIXL_ASSERT((cond != al) && (cond != nv));
705 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
706 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
739 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
747 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
755 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
763 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && xm.Is64Bits());
771 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
779 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
787 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && wm.Is32Bits());
795 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && xm.Is64Bits());
803 VIXL_ASSERT(AreSameSizeAndType(rd, rn, rm));
819 VIXL_ASSERT(AreSameSizeAndType(rd, rn, rm));
836 VIXL_ASSERT(xd.Is64Bits() && xa.Is64Bits());
837 VIXL_ASSERT(wn.Is32Bits() && wm.Is32Bits());
846 VIXL_ASSERT(xd.Is64Bits() && xa.Is64Bits());
847 VIXL_ASSERT(wn.Is32Bits() && wm.Is32Bits());
856 VIXL_ASSERT(xd.Is64Bits() && xa.Is64Bits());
857 VIXL_ASSERT(wn.Is32Bits() && wm.Is32Bits());
866 VIXL_ASSERT(xd.Is64Bits() && xa.Is64Bits());
867 VIXL_ASSERT(wn.Is32Bits() && wm.Is32Bits());
875 VIXL_ASSERT(xd.Is64Bits());
876 VIXL_ASSERT(wn.Is32Bits() && wm.Is32Bits());
884 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
885 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
893 VIXL_ASSERT(xd.Is64Bits() && xn.Is64Bits() && xm.Is64Bits());
901 VIXL_ASSERT(xd.Is64Bits() && xn.Is64Bits() && xm.Is64Bits());
909 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
910 VIXL_ASSERT(rd.GetSizeInBits() == rm.GetSizeInBits());
926 VIXL_ASSERT(xd.Is64Bits());
963 VIXL_ASSERT(xt.Is64Bits() && xt2.Is64Bits());
973 VIXL_ASSERT(((op & LoadStorePairLBit) == 0) || !rt.Is(rt2));
974 VIXL_ASSERT(AreSameSizeAndType(rt, rt2));
975 VIXL_ASSERT(IsImmLSPair(addr.GetOffset(), CalcLSPairDataSize(op)));
985 VIXL_ASSERT(addr.GetOffset() != 0);
989 VIXL_ASSERT(addr.IsPostIndex());
1015 VIXL_ASSERT(!rt.Is(rt2));
1016 VIXL_ASSERT(AreSameSizeAndType(rt, rt2));
1017 VIXL_ASSERT(addr.IsImmediateOffset());
1021 VIXL_ASSERT(IsImmLSPair(addr.GetOffset(), size));
1032 VIXL_ASSERT(option != RequireUnscaledOffset);
1033 VIXL_ASSERT(option != PreferUnscaledOffset);
1041 VIXL_ASSERT(option != RequireUnscaledOffset);
1042 VIXL_ASSERT(option != PreferUnscaledOffset);
1050 VIXL_ASSERT(option != RequireUnscaledOffset);
1051 VIXL_ASSERT(option != PreferUnscaledOffset);
1059 VIXL_ASSERT(option != RequireUnscaledOffset);
1060 VIXL_ASSERT(option != PreferUnscaledOffset);
1068 VIXL_ASSERT(option != RequireUnscaledOffset);
1069 VIXL_ASSERT(option != PreferUnscaledOffset);
1077 VIXL_ASSERT(option != RequireUnscaledOffset);
1078 VIXL_ASSERT(option != PreferUnscaledOffset);
1086 VIXL_ASSERT(option != RequireUnscaledOffset);
1087 VIXL_ASSERT(option != PreferUnscaledOffset);
1095 VIXL_ASSERT(option != RequireUnscaledOffset);
1096 VIXL_ASSERT(option != PreferUnscaledOffset);
1104 VIXL_ASSERT(xt.Is64Bits());
1105 VIXL_ASSERT(option != RequireUnscaledOffset);
1106 VIXL_ASSERT(option != PreferUnscaledOffset);
1114 VIXL_ASSERT(option != RequireScaledOffset);
1115 VIXL_ASSERT(option != PreferScaledOffset);
1123 VIXL_ASSERT(option != RequireScaledOffset);
1124 VIXL_ASSERT(option != PreferScaledOffset);
1132 VIXL_ASSERT(option != RequireScaledOffset);
1133 VIXL_ASSERT(option != PreferScaledOffset);
1141 VIXL_ASSERT(option != RequireScaledOffset);
1142 VIXL_ASSERT(option != PreferScaledOffset);
1150 VIXL_ASSERT(option != RequireScaledOffset);
1151 VIXL_ASSERT(option != PreferScaledOffset);
1159 VIXL_ASSERT(option != RequireScaledOffset);
1160 VIXL_ASSERT(option != PreferScaledOffset);
1168 VIXL_ASSERT(option != RequireScaledOffset);
1169 VIXL_ASSERT(option != PreferScaledOffset);
1177 VIXL_ASSERT(option != RequireScaledOffset);
1178 VIXL_ASSERT(option != PreferScaledOffset);
1186 VIXL_ASSERT(xt.Is64Bits());
1187 VIXL_ASSERT(option != RequireScaledOffset);
1188 VIXL_ASSERT(option != PreferScaledOffset);
1194 VIXL_ASSERT(xt.Is64Bits());
1195 VIXL_ASSERT(literal->GetSize() == kWRegSizeInBytes);
1201 VIXL_ASSERT(literal->GetSize() == static_cast<size_t>(rt.GetSizeInBytes()));
1226 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1234 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1242 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1249 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1255 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1261 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1271 VIXL_ASSERT(rt.GetSizeInBits() == rt2.GetSizeInBits());
1272 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1281 VIXL_ASSERT(rt.GetSizeInBits() == rt2.GetSizeInBits());
1282 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1291 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1299 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1307 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1314 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1320 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1326 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1336 VIXL_ASSERT(rt.GetSizeInBits() == rt2.GetSizeInBits());
1337 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1346 VIXL_ASSERT(rt.GetSizeInBits() == rt2.GetSizeInBits());
1347 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1354 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1360 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1366 VIXL_ASSERT(dst.IsImmediateOffset() && (dst.GetOffset() == 0));
1373 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1379 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1385 VIXL_ASSERT(src.IsImmediateOffset() && (src.GetOffset() == 0));
1394 VIXL_ASSERT(option != RequireUnscaledOffset);
1395 VIXL_ASSERT(option != PreferUnscaledOffset);
1403 VIXL_ASSERT(option != RequireScaledOffset);
1404 VIXL_ASSERT(option != PreferScaledOffset);
1415 VIXL_ASSERT(xt.Is64Bits());
1421 VIXL_ASSERT(xt.Is64Bits());
1427 VIXL_ASSERT((op == CVAC) || (op == CVAU) || (op == CIVAC) || (op == ZVA));
1433 VIXL_ASSERT(op == IVAU);
1459 VIXL_ASSERT(addr.IsImmediateOffset() && (addr.GetOffset() == 0));
1472 VIXL_ASSERT(addr.GetOffset() == 0);
1524 VIXL_ASSERT(!addr.GetRegisterOffset().Is(NoReg) ||
1536 VIXL_ASSERT(vt.IsVector() || vt.Is1D());
1558 VIXL_ASSERT(AreSameFormat(vt, vt2));
1559 VIXL_ASSERT(AreConsecutive(vt, vt2));
1569 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1570 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1581 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1582 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1591 VIXL_ASSERT(AreSameFormat(vt, vt2));
1592 VIXL_ASSERT(AreConsecutive(vt, vt2));
1602 VIXL_ASSERT(AreSameFormat(vt, vt2));
1603 VIXL_ASSERT(AreConsecutive(vt, vt2));
1612 VIXL_ASSERT(AreSameFormat(vt, vt2));
1613 VIXL_ASSERT(AreConsecutive(vt, vt2));
1623 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1624 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1635 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1636 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1646 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1647 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1658 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1659 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1671 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1672 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1683 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1684 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1698 VIXL_ASSERT(AreSameFormat(vt, vt2));
1699 VIXL_ASSERT(AreConsecutive(vt, vt2));
1709 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1710 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1721 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1722 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1731 VIXL_ASSERT(AreSameFormat(vt, vt2));
1732 VIXL_ASSERT(AreConsecutive(vt, vt2));
1742 VIXL_ASSERT(AreSameFormat(vt, vt2));
1743 VIXL_ASSERT(AreConsecutive(vt, vt2));
1753 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1754 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1765 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3));
1766 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3));
1777 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1778 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1790 VIXL_ASSERT(AreSameFormat(vt, vt2, vt3, vt4));
1791 VIXL_ASSERT(AreConsecutive(vt, vt2, vt3, vt4));
1805 VIXL_ASSERT(lane < (kQRegSizeInBytes / lane_size));
1828 VIXL_ASSERT(lane_size == 8);
1855 VIXL_ASSERT(AreSameFormat(vn, vm));
1856 VIXL_ASSERT((vn.Is1H() && vd.Is1S()) || (vn.Is1S() && vd.Is1D()) ||
1875 VIXL_ASSERT(AreSameFormat(vd, vn));
1876 VIXL_ASSERT((vm.Is8B() && vd.Is8H()) || (vm.Is4H() && vd.Is4S()) ||
1887 VIXL_ASSERT(AreSameFormat(vm, vn));
1888 VIXL_ASSERT((vd.Is8B() && vn.Is8H()) || (vd.Is4H() && vn.Is4S()) ||
1940 VIXL_ASSERT(AS); \
1962 VIXL_ASSERT(AS); \
1971 VIXL_ASSERT(vm.IsD());
1979 VIXL_ASSERT(vm.IsQ());
1987 VIXL_ASSERT(vm.IsD());
1995 VIXL_ASSERT(vm.IsQ());
2003 VIXL_ASSERT(vm.IsD());
2011 VIXL_ASSERT(vm.IsQ());
2019 VIXL_ASSERT(vm.IsD());
2027 VIXL_ASSERT(vm.IsQ());
2050 VIXL_ASSERT(xt.Is64Bits());
2056 VIXL_ASSERT(xt.Is64Bits());
2081 VIXL_ASSERT(vd.Is1D());
2084 VIXL_ASSERT(vd.Is2D());
2095 VIXL_ASSERT(vd.Is1S());
2098 VIXL_ASSERT(vd.Is2S() | vd.Is4S());
2108 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2109 VIXL_ASSERT(rd.GetSizeInBits() == vn.GetSizeInBits());
2116 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2117 VIXL_ASSERT(vd.GetSizeInBits() == rn.GetSizeInBits());
2124 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2125 VIXL_ASSERT(vd.IsSameFormat(vn));
2131 VIXL_ASSERT((index == 1) && vd.Is1D() && rn.IsX());
2138 VIXL_ASSERT((index == 1) && vn.Is1D() && rd.IsX());
2179 VIXL_ASSERT(AreSameSizeAndType(vd, vn, vm));
2192 VIXL_ASSERT(value == 0.0);
2193 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2202 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2203 VIXL_ASSERT(vn.IsSameSizeAndType(vm));
2234 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2235 VIXL_ASSERT(vn.IsSameSizeAndType(vm));
2260 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2261 VIXL_ASSERT(AreSameFormat(vd, vn, vm));
2277 VIXL_ASSERT((vd.Is1S() && vn.Is1S()) || (vd.Is1D() && vn.Is1D()));
2287 VIXL_ASSERT(vn.Is1S() || vn.Is1H());
2290 VIXL_ASSERT(vn.Is1D() || vn.Is1H());
2293 VIXL_ASSERT(vd.Is1H());
2294 VIXL_ASSERT(vn.Is1D() || vn.Is1S());
2302 VIXL_ASSERT((vd.Is4S() && vn.Is4H()) || (vd.Is2D() && vn.Is2S()));
2309 VIXL_ASSERT((vd.Is4S() && vn.Is8H()) || (vd.Is2D() && vn.Is4S()));
2316 VIXL_ASSERT((vn.Is4S() && vd.Is4H()) || (vn.Is2D() && vd.Is2S()));
2323 VIXL_ASSERT((vn.Is4S() && vd.Is8H()) || (vn.Is2D() && vd.Is4S()));
2332 VIXL_ASSERT(vd.Is1S() && vn.Is1D());
2335 VIXL_ASSERT(vd.Is2S() && vn.Is2D());
2342 VIXL_ASSERT(vd.Is4S() && vn.Is2D());
2370 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2371 VIXL_ASSERT((fbits >= 0) && (fbits <= rd.GetSizeInBits()));
2382 VIXL_ASSERT(fbits >= 0);
2386 VIXL_ASSERT(vd.Is1D() || vd.Is1S() || vd.Is2D() || vd.Is2S() || vd.Is4S());
2393 VIXL_ASSERT(vn.Is1S() || vn.Is1D());
2394 VIXL_ASSERT((fbits >= 0) && (fbits <= rd.GetSizeInBits()));
2405 VIXL_ASSERT(fbits >= 0);
2409 VIXL_ASSERT(vd.Is1D() || vd.Is1S() || vd.Is2D() || vd.Is2S() || vd.Is4S());
2415 VIXL_ASSERT(fbits >= 0);
2419 VIXL_ASSERT(vd.Is1D() || vd.Is1S() || vd.Is2D() || vd.Is2S() || vd.Is4S());
2425 VIXL_ASSERT(fbits >= 0);
2429 VIXL_ASSERT(vd.Is1D() || vd.Is1S() || vd.Is2D() || vd.Is2S() || vd.Is4S());
2436 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2437 VIXL_ASSERT(fbits >= 0);
2448 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2449 VIXL_ASSERT(fbits >= 0);
2463 VIXL_ASSERT(AreSameFormat(vd, vn, vm));
2464 VIXL_ASSERT(vd.IsVector() || !vd.IsQ());
2482 VIXL_ASSERT(AreSameFormat(vd, vn, vm));
2508 VIXL_ASSERT(vd.Is1S() || vd.Is1D()); \
2511 VIXL_ASSERT(vd.Is2S() || vd.Is2D() || vd.Is4S()); \
2523 VIXL_ASSERT(AreSameFormat(vd, vn));
2532 VIXL_ASSERT(AreSameFormat(vd, vn));
2533 VIXL_ASSERT(value == 0);
2549 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
2555 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
2561 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
2567 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
2573 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
2579 VIXL_ASSERT((vd.Is8H() && vn.Is8B() && shift == 8) ||
2589 VIXL_ASSERT((vd.Is8H() && vn.Is16B() && shift == 8) ||
2600 VIXL_ASSERT(AreSameFormat(vd, vn));
2601 VIXL_ASSERT(value == 0.0);
2606 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2609 VIXL_ASSERT(vd.Is2S() || vd.Is2D() || vd.Is4S());
2642 VIXL_ASSERT(vd.IsScalar());
2643 VIXL_ASSERT(AreSameFormat(vd, vn));
2644 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
2710 VIXL_ASSERT(AS); \
2751 VIXL_ASSERT(vd.Is1S() || vd.Is1D()); \
2754 VIXL_ASSERT(vd.IsVector()); \
2755 VIXL_ASSERT(vd.Is2S() || vd.Is2D() || vd.Is4S()); \
2765 VIXL_ASSERT((vd.Is1D() && vn.Is2D()));
2771 VIXL_ASSERT((vd.Is1S() && vn.Is2S()) || (vd.Is1D() && vn.Is2D()));
2777 VIXL_ASSERT((vd.Is1S() && vn.Is2S()) || (vd.Is1D() && vn.Is2D()));
2783 VIXL_ASSERT((vd.Is1S() && vn.Is2S()) || (vd.Is1D() && vn.Is2D()));
2789 VIXL_ASSERT((vd.Is1S() && vn.Is2S()) || (vd.Is1D() && vn.Is2D()));
2795 VIXL_ASSERT((vd.Is1S() && vn.Is2S()) || (vd.Is1D() && vn.Is2D()));
2806 VIXL_ASSERT(AreSameFormat(vd, vn));
2810 VIXL_ASSERT(vd.IsQ());
2825 VIXL_ASSERT((shift == LSL) || (shift == MSL));
2827 VIXL_ASSERT(shift_amount == 0);
2831 VIXL_ASSERT((byte == 0) || (byte == 0xff));
2840 VIXL_ASSERT(IsUint8(imm));
2846 VIXL_ASSERT(IsUint8(imm));
2856 VIXL_ASSERT(AreSameFormat(vd, vn));
2860 VIXL_ASSERT(vd.IsQ());
2870 VIXL_ASSERT((shift == LSL) || (shift == MSL));
2884 VIXL_ASSERT(AreSameFormat(vd, vn));
2885 VIXL_ASSERT((vd.Is2S() && vm.Is1S()) || (vd.Is4S() && vm.Is1S()) ||
2888 VIXL_ASSERT((vm.Is1S() && (vm_index < 4)) || (vm.Is1D() && (vm_index < 2)));
2906 VIXL_ASSERT(AreSameFormat(vd, vn));
2907 VIXL_ASSERT((vd.Is4H() && vm.Is1H()) || (vd.Is8H() && vm.Is1H()) ||
2910 VIXL_ASSERT((vm.Is1H() && (vm.GetCode() < 16) && (vm_index < 8)) ||
2931 VIXL_ASSERT((vd.Is4S() && vn.Is4H() && vm.Is1H()) ||
2938 VIXL_ASSERT((vm.Is1H() && (vm.GetCode() < 16) && (vm_index < 8)) ||
2969 VIXL_ASSERT(AS); \
3024 VIXL_ASSERT(AS); \
3042 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3053 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3068 VIXL_ASSERT((vd.Is1B() && vn.Is1H()) || (vd.Is1H() && vn.Is1S()) ||
3073 VIXL_ASSERT((vd.Is8B() && vn.Is8H()) || (vd.Is4H() && vn.Is4S()) ||
3083 VIXL_ASSERT(vd.IsVector() && vd.IsD());
3089 VIXL_ASSERT(vd.IsVector() && vd.IsQ());
3095 VIXL_ASSERT(vd.IsScalar() || vd.IsD());
3101 VIXL_ASSERT(vd.IsVector() && vd.IsQ());
3107 VIXL_ASSERT(vd.IsScalar() || vd.IsD());
3113 VIXL_ASSERT(vd.IsVector() && vd.IsQ());
3119 VIXL_ASSERT(vd.IsScalar() || vd.IsD());
3125 VIXL_ASSERT(vd.IsVector() && vd.IsQ());
3132 VIXL_ASSERT(AreSameFormat(vd, vn));
3133 VIXL_ASSERT(vd.Is8B() || vd.Is16B());
3139 VIXL_ASSERT(AreSameFormat(vd, vn));
3140 VIXL_ASSERT(vd.Is8B() || vd.Is16B());
3149 VIXL_ASSERT(AreSameFormat(vd, vn, vm));
3150 VIXL_ASSERT(vd.Is8B() || vd.Is16B());
3151 VIXL_ASSERT((0 <= index) && (index < vd.GetLanes()));
3174 VIXL_ASSERT(lane_size == 8);
3183 VIXL_ASSERT(!vd.Is1D());
3193 VIXL_ASSERT(vd.IsScalar());
3199 VIXL_ASSERT(!vd.Is1D());
3200 VIXL_ASSERT(vd.Is2D() == rn.IsX());
3210 VIXL_ASSERT(AreSameFormat(vd, vn));
3226 VIXL_ASSERT(lane_size == 8);
3231 VIXL_ASSERT(
3234 VIXL_ASSERT(
3258 VIXL_ASSERT(rn.IsW());
3262 VIXL_ASSERT(rn.IsW());
3266 VIXL_ASSERT(rn.IsW());
3269 VIXL_ASSERT(lane_size == 8);
3270 VIXL_ASSERT(rn.IsX());
3275 VIXL_ASSERT(
3296 VIXL_ASSERT(rd.IsW());
3300 VIXL_ASSERT(rd.IsW());
3304 VIXL_ASSERT(rd.IsW());
3307 VIXL_ASSERT(lane_size == 8);
3308 VIXL_ASSERT(rd.IsX());
3314 VIXL_ASSERT(
3322 VIXL_ASSERT(vn.GetSizeInBytes() >= 4);
3333 VIXL_ASSERT(lane_size != 8);
3342 VIXL_ASSERT(lane_size == 4);
3343 VIXL_ASSERT(rd.IsX());
3348 VIXL_ASSERT(
3356 VIXL_ASSERT(AreSameFormat(vd, vn));
3357 VIXL_ASSERT(!vd.Is1D() && !vd.Is2D());
3363 VIXL_ASSERT(AreSameFormat(vd, vn));
3364 VIXL_ASSERT(!vd.Is1D() && !vd.Is2D());
3370 VIXL_ASSERT(AreSameFormat(vd, vn));
3371 VIXL_ASSERT(vd.Is8B() || vd.Is16B());
3377 VIXL_ASSERT(AreSameFormat(vd, vn));
3378 VIXL_ASSERT(vd.Is8B() || vd.Is16B());
3384 VIXL_ASSERT(AreSameFormat(vd, vn));
3385 VIXL_ASSERT(vd.Is8B() || vd.Is16B() || vd.Is4H() || vd.Is8H());
3391 VIXL_ASSERT(AreSameFormat(vd, vn));
3392 VIXL_ASSERT(!vd.Is1D() && !vd.Is2D());
3398 VIXL_ASSERT(AreSameFormat(vd, vn));
3399 VIXL_ASSERT(vd.Is2S() || vd.Is4S());
3405 VIXL_ASSERT(AreSameFormat(vd, vn));
3406 VIXL_ASSERT(vd.Is2S() || vd.Is4S());
3414 VIXL_ASSERT((op == NEON_SADDLP) || (op == NEON_UADDLP) ||
3417 VIXL_ASSERT((vn.Is8B() && vd.Is4H()) || (vn.Is4H() && vd.Is2S()) ||
3447 VIXL_ASSERT((vn.Is8B() && vd.Is1H()) || (vn.Is16B() && vd.Is1H()) ||
3467 VIXL_ASSERT((vn.Is8B() && vd.Is1B()) || (vn.Is16B() && vd.Is1B()) ||
3492 VIXL_ASSERT(AS); \
3503 VIXL_ASSERT(AreSameFormat(vd, vn, vm));
3504 VIXL_ASSERT(!vd.Is1D());
3555 VIXL_ASSERT(AreSameFormat(vd, vn));
3573 VIXL_ASSERT((shift >= 0) && (shift < laneSizeInBits));
3583 VIXL_ASSERT((shift >= 1) && (shift <= laneSizeInBits));
3593 VIXL_ASSERT((shift >= 0) && (shift < laneSizeInBits));
3596 VIXL_ASSERT((vn.Is8B() && vd.Is8H()) || (vn.Is4H() && vd.Is4S()) ||
3611 VIXL_ASSERT((shift >= 1) && (shift <= laneSizeInBits));
3615 VIXL_ASSERT((vd.Is1B() && vn.Is1H()) || (vd.Is1H() && vn.Is1S()) ||
3620 VIXL_ASSERT((vd.Is8B() && vn.Is8H()) || (vd.Is4H() && vn.Is4S()) ||
3631 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3637 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3658 VIXL_ASSERT(vn.IsD());
3664 VIXL_ASSERT(vn.IsQ());
3680 VIXL_ASSERT(vn.IsD());
3686 VIXL_ASSERT(vn.IsQ());
3702 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3708 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3714 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3720 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3726 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3732 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3738 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3744 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3750 VIXL_ASSERT(vd.IsVector() || vd.Is1D());
3756 VIXL_ASSERT(vn.IsVector() && vd.IsD());
3762 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3768 VIXL_ASSERT(vn.IsVector() && vd.IsD());
3774 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3780 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3786 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3792 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3798 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3804 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3810 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3816 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3822 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3828 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3834 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3840 VIXL_ASSERT(vd.IsD() || (vn.IsScalar() && vd.IsScalar()));
3846 VIXL_ASSERT(vn.IsVector() && vd.IsQ());
3856 VIXL_ASSERT(IsImmFP32(imm));
3874 VIXL_ASSERT(IsImmFP64(imm));
3901 VIXL_ASSERT(((imm >> kWRegSize) == 0) ||
3908 VIXL_ASSERT((shift == 0) || (shift == 16) || (shift == 32) ||
3910 VIXL_ASSERT(rd.Is64Bits() || (shift == 0) || (shift == 16));
3922 VIXL_ASSERT(rd.Is64Bits());
3926 VIXL_ASSERT(rd.Is64Bits());
3932 VIXL_ASSERT(IsUint16(imm));
3944 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
3947 VIXL_ASSERT(IsImmAddSub(immediate));
3952 VIXL_ASSERT(operand.GetRegister().GetSizeInBits() == rd.GetSizeInBits());
3953 VIXL_ASSERT(operand.GetShift() != ROR);
3963 VIXL_ASSERT(!(rd.IsSP() && (S == SetFlags)));
3973 VIXL_ASSERT(operand.IsExtendedRegister());
3984 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
3985 VIXL_ASSERT(rd.GetSizeInBits() == operand.GetRegister().GetSizeInBits());
3986 VIXL_ASSERT(operand.IsShiftedRegister() && (operand.GetShiftAmount() == 0));
3992 VIXL_ASSERT(IsUint16(code));
3998 VIXL_ASSERT(IsUint16(code));
4012 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
4017 VIXL_ASSERT(immediate != 0);
4018 VIXL_ASSERT(immediate != -1);
4019 VIXL_ASSERT(rd.Is64Bits() || IsUint32(immediate));
4036 VIXL_ASSERT(operand.IsShiftedRegister());
4037 VIXL_ASSERT(operand.GetRegister().GetSizeInBits() == rd.GetSizeInBits());
4066 VIXL_ASSERT(IsImmConditionalCompare(immediate));
4070 VIXL_ASSERT(operand.IsShiftedRegister() && (operand.GetShiftAmount() == 0));
4080 VIXL_ASSERT(rd.GetSizeInBits() == rn.GetSizeInBits());
4088 VIXL_ASSERT(vd.Is1H() || vd.Is1S() || vd.Is1D());
4098 VIXL_ASSERT(vd.Is1S() || vd.Is1D());
4099 VIXL_ASSERT(AreSameSizeAndType(vd, vn, vm, va));
4108 VIXL_ASSERT(vd.Is8B() || vd.Is16B() || vd.Is4H() || vd.Is8H() || vd.Is2S() ||
4110 VIXL_ASSERT((left_shift == 0) || (left_shift == 8) || (left_shift == 16) ||
4112 VIXL_ASSERT(IsUint8(imm8));
4116 VIXL_ASSERT(op == NEONModifiedImmediate_MOVI);
4125 VIXL_ASSERT((left_shift == 0) || (left_shift == 8));
4141 VIXL_ASSERT(vd.Is2S() || vd.Is4S());
4142 VIXL_ASSERT((shift_amount == 8) || (shift_amount == 16));
4143 VIXL_ASSERT(IsUint8(imm8));
4181 VIXL_ASSERT(rd.GetSizeInBits() >= rn.GetSizeInBits());
4204 VIXL_ASSERT(rn.GetSizeInBits() == kXRegSize);
4224 VIXL_ASSERT(operand.IsShiftedRegister());
4225 VIXL_ASSERT(rn.Is64Bits() ||
4276 VIXL_ASSERT((option != RequireUnscaledOffset) &&
4291 VIXL_ASSERT((shift_amount == 0) || (shift_amount == access_size));
4321 VIXL_ASSERT(addr.IsRegisterOffset() || addr.IsImmediateOffset());
4389 VIXL_ASSERT(access_size <= kQRegSizeInBytesLog2);
4396 VIXL_ASSERT(access_size <= kQRegSizeInBytesLog2);
4415 VIXL_ASSERT((reg_size == kXRegSize) || (reg_size == kWRegSize));
4432 VIXL_ASSERT((width == kWRegSize) || (width == kXRegSize));
4627 VIXL_ASSERT(rt.IsValid());
4631 VIXL_ASSERT(rt.IsVRegister());
4642 VIXL_ASSERT(rt.IsQ());
4650 VIXL_ASSERT(rt.IsValid());
4654 VIXL_ASSERT(rt.IsVRegister());
4665 VIXL_ASSERT(rt.IsQ());
4674 VIXL_ASSERT(AreSameSizeAndType(rt, rt2));
4679 VIXL_ASSERT(rt.IsVRegister());
4686 VIXL_ASSERT(rt.IsQ());
4695 VIXL_ASSERT((STP_w | LoadStorePairLBit) == LDP_w);
4703 VIXL_ASSERT(AreSameSizeAndType(rt, rt2));
4708 VIXL_ASSERT(rt.IsVRegister());
4715 VIXL_ASSERT(rt.IsQ());
4724 VIXL_ASSERT((STNP_w | LoadStorePairNonTemporalLBit) == LDNP_w);
4734 VIXL_ASSERT(rt.IsVRegister());
4741 VIXL_ASSERT(rt.IsQ());
4772 VIXL_ASSERT(!regs[i].IsValid());
4779 VIXL_ASSERT(number_of_valid_regs >= number_of_unique_regs);
4780 VIXL_ASSERT(number_of_valid_fpregs >= number_of_unique_fpregs);
4795 VIXL_ASSERT(reg1.IsValid());
4812 VIXL_ASSERT(reg1.IsValid());
4825 VIXL_ASSERT(reg1.IsValid());