Home | History | Annotate | Download | only in x86_64

Lines Matching refs:X86_64Assembler

39 void X86_64Assembler::call(CpuRegister reg) {
47 void X86_64Assembler::call(const Address& address) {
55 void X86_64Assembler::call(Label* label) {
62 void X86_64Assembler::pushq(CpuRegister reg) {
69 void X86_64Assembler::pushq(const Address& address) {
77 void X86_64Assembler::pushq(const Immediate& imm) {
90 void X86_64Assembler::popq(CpuRegister reg) {
97 void X86_64Assembler::popq(const Address& address) {
105 void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) {
121 void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) {
129 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) {
138 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) {
146 void X86_64Assembler::movq(CpuRegister dst, const Address& src) {
154 void X86_64Assembler::movl(CpuRegister dst, const Address& src) {
162 void X86_64Assembler::movq(const Address& dst, CpuRegister src) {
170 void X86_64Assembler::movl(const Address& dst, CpuRegister src) {
177 void X86_64Assembler::movl(const Address& dst, const Immediate& imm) {
185 void X86_64Assembler::movzxb(CpuRegister dst, CpuRegister src) {
194 void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) {
203 void X86_64Assembler::movsxb(CpuRegister dst, CpuRegister src) {
212 void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) {
221 void X86_64Assembler::movb(CpuRegister /*dst*/, const Address& /*src*/) {
226 void X86_64Assembler::movb(const Address& dst, CpuRegister src) {
234 void X86_64Assembler::movb(const Address& dst, const Immediate& imm) {
243 void X86_64Assembler::movzxw(CpuRegister dst, CpuRegister src) {
252 void X86_64Assembler::movzxw(CpuRegister dst, const Address& src) {
261 void X86_64Assembler::movsxw(CpuRegister dst, CpuRegister src) {
270 void X86_64Assembler::movsxw(CpuRegister dst, const Address& src) {
279 void X86_64Assembler::movw(CpuRegister /*dst*/, const Address& /*src*/) {
284 void X86_64Assembler::movw(const Address& dst, CpuRegister src) {
293 void X86_64Assembler::leaq(CpuRegister dst, const Address& src) {
301 void X86_64Assembler::movss(XmmRegister dst, const Address& src) {
311 void X86_64Assembler::movss(const Address& dst, XmmRegister src) {
321 void X86_64Assembler::movss(XmmRegister dst, XmmRegister src) {
331 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) {
341 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) {
351 void X86_64Assembler::addss(XmmRegister dst, XmmRegister src) {
361 void X86_64Assembler::addss(XmmRegister dst, const Address& src) {
371 void X86_64Assembler::subss(XmmRegister dst, XmmRegister src) {
381 void X86_64Assembler::subss(XmmRegister dst, const Address& src) {
391 void X86_64Assembler::mulss(XmmRegister dst, XmmRegister src) {
401 void X86_64Assembler::mulss(XmmRegister dst, const Address& src) {
411 void X86_64Assembler::divss(XmmRegister dst, XmmRegister src) {
421 void X86_64Assembler::divss(XmmRegister dst, const Address& src) {
431 void X86_64Assembler::flds(const Address& src) {
438 void X86_64Assembler::fstps(const Address& dst) {
445 void X86_64Assembler::movsd(XmmRegister dst, const Address& src) {
455 void X86_64Assembler::movsd(const Address& dst, XmmRegister src) {
465 void X86_64Assembler::movsd(XmmRegister dst, XmmRegister src) {
475 void X86_64Assembler::addsd(XmmRegister dst, XmmRegister src) {
485 void X86_64Assembler::addsd(XmmRegister dst, const Address& src) {
495 void X86_64Assembler::subsd(XmmRegister dst, XmmRegister src) {
505 void X86_64Assembler::subsd(XmmRegister dst, const Address& src) {
515 void X86_64Assembler::mulsd(XmmRegister dst, XmmRegister src) {
525 void X86_64Assembler::mulsd(XmmRegister dst, const Address& src) {
535 void X86_64Assembler::divsd(XmmRegister dst, XmmRegister src) {
545 void X86_64Assembler::divsd(XmmRegister dst, const Address& src) {
555 void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src) {
565 void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src) {
575 void X86_64Assembler::cvtss2si(CpuRegister dst, XmmRegister src) {
585 void X86_64Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
595 void X86_64Assembler::cvtsd2si(CpuRegister dst, XmmRegister src) {
605 void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src) {
615 void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src) {
625 void X86_64Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
635 X86_64Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
645 void X86_64Assembler::comiss(XmmRegister a, XmmRegister b) {
654 void X86_64Assembler::comisd(XmmRegister a, XmmRegister b) {
664 void X86_64Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
674 void X86_64Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
684 void X86_64Assembler::xorpd(XmmRegister dst, const Address& src) {
694 void X86_64Assembler::xorpd(XmmRegister dst, XmmRegister src) {
704 void X86_64Assembler::xorps(XmmRegister dst, const Address& src) {
713 void X86_64Assembler::xorps(XmmRegister dst, XmmRegister src) {
722 void X86_64Assembler::andpd(XmmRegister dst, const Address& src) {
732 void X86_64Assembler::fldl(const Address& src) {
739 void X86_64Assembler::fstpl(const Address& dst) {
746 void X86_64Assembler::fnstcw(const Address& dst) {
753 void X86_64Assembler::fldcw(const Address& src) {
760 void X86_64Assembler::fistpl(const Address& dst) {
767 void X86_64Assembler::fistps(const Address& dst) {
774 void X86_64Assembler::fildl(const Address& src) {
781 void X86_64Assembler::fincstp() {
788 void X86_64Assembler::ffree(const Immediate& index) {
796 void X86_64Assembler::fsin() {
803 void X86_64Assembler::fcos() {
810 void X86_64Assembler::fptan() {
817 void X86_64Assembler::xchgl(CpuRegister dst, CpuRegister src) {
825 void X86_64Assembler::xchgq(CpuRegister dst, CpuRegister src) {
833 void X86_64Assembler::xchgl(CpuRegister reg, const Address& address) {
841 void X86_64Assembler::cmpl(CpuRegister reg, const Immediate& imm) {
848 void X86_64Assembler::cmpl(CpuRegister reg0, CpuRegister reg1) {
856 void X86_64Assembler::cmpl(CpuRegister reg, const Address& address) {
864 void X86_64Assembler::cmpq(CpuRegister reg0, CpuRegister reg1) {
872 void X86_64Assembler::cmpq(CpuRegister reg, const Immediate& imm) {
880 void X86_64Assembler::cmpq(CpuRegister reg, const Address& address) {
888 void X86_64Assembler::addl(CpuRegister dst, CpuRegister src) {
896 void X86_64Assembler::addl(CpuRegister reg, const Address& address) {
904 void X86_64Assembler::cmpl(const Address& address, CpuRegister reg) {
912 void X86_64Assembler::cmpl(const Address& address, const Immediate& imm) {
919 void X86_64Assembler::testl(CpuRegister reg1, CpuRegister reg2) {
927 void X86_64Assembler::testl(CpuRegister reg, const Immediate& immediate) {
953 void X86_64Assembler::testq(CpuRegister reg, const Address& address) {
961 void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) {
969 void X86_64Assembler::andl(CpuRegister dst, const Immediate& imm) {
976 void X86_64Assembler::andq(CpuRegister reg, const Immediate& imm) {
984 void X86_64Assembler::orl(CpuRegister dst, CpuRegister src) {
992 void X86_64Assembler::orl(CpuRegister dst, const Immediate& imm) {
999 void X86_64Assembler::xorl(CpuRegister dst, CpuRegister src) {
1007 void X86_64Assembler::xorq(CpuRegister dst, CpuRegister src) {
1015 void X86_64Assembler::xorq(CpuRegister dst, const Immediate& imm) {
1023 void X86_64Assembler::rex(bool force, bool w, Register* r, Register* x, Register* b) {
1050 void X86_64Assembler::rex_reg_mem(bool force, bool w, Register* dst, const Address& mem) {
1075 void X86_64Assembler::addl(CpuRegister reg, const Immediate& imm) {
1082 void X86_64Assembler::addq(CpuRegister reg, const Immediate& imm) {
1090 void X86_64Assembler::addq(CpuRegister dst, const Address& address) {
1098 void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) {
1107 void X86_64Assembler::addl(const Address& address, CpuRegister reg) {
1115 void X86_64Assembler::addl(const Address& address, const Immediate& imm) {
1122 void X86_64Assembler::subl(CpuRegister dst, CpuRegister src) {
1130 void X86_64Assembler::subl(CpuRegister reg, const Immediate& imm) {
1137 void X86_64Assembler::subq(CpuRegister reg, const Immediate& imm) {
1145 void X86_64Assembler::subq(CpuRegister dst, CpuRegister src) {
1153 void X86_64Assembler::subq(CpuRegister reg, const Address& address) {
1161 void X86_64Assembler::subl(CpuRegister reg, const Address& address) {
1169 void X86_64Assembler::cdq() {
1175 void X86_64Assembler::idivl(CpuRegister reg) {
1183 void X86_64Assembler::imull(CpuRegister dst, CpuRegister src) {
1192 void X86_64Assembler::imull(CpuRegister reg, const Immediate& imm) {
1201 void X86_64Assembler::imull(CpuRegister reg, const Address& address) {
1210 void X86_64Assembler::imull(CpuRegister reg) {
1218 void X86_64Assembler::imull(const Address& address) {
1226 void X86_64Assembler::mull(CpuRegister reg) {
1234 void X86_64Assembler::mull(const Address& address) {
1243 void X86_64Assembler::shll(CpuRegister reg, const Immediate& imm) {
1248 void X86_64Assembler::shll(CpuRegister operand, CpuRegister shifter) {
1253 void X86_64Assembler::shrl(CpuRegister reg, const Immediate& imm) {
1258 void X86_64Assembler::shrq(CpuRegister reg, const Immediate& imm) {
1263 void X86_64Assembler::shrl(CpuRegister operand, CpuRegister shifter) {
1268 void X86_64Assembler::sarl(CpuRegister reg, const Immediate& imm) {
1273 void X86_64Assembler::sarl(CpuRegister operand, CpuRegister shifter) {
1278 void X86_64Assembler::negl(CpuRegister reg) {
1286 void X86_64Assembler::notl(CpuRegister reg) {
1294 void X86_64Assembler::enter(const Immediate& imm) {
1304 void X86_64Assembler::leave() {
1310 void X86_64Assembler::ret() {
1316 void X86_64Assembler::ret(const Immediate& imm) {
1326 void X86_64Assembler::nop() {
1332 void X86_64Assembler::int3() {
1338 void X86_64Assembler::hlt() {
1344 void X86_64Assembler::j(Condition condition, Label* label) {
1367 void X86_64Assembler::jmp(CpuRegister reg) {
1374 void X86_64Assembler::jmp(const Address& address) {
1381 void X86_64Assembler::jmp(Label* label) {
1402 X86_64Assembler* X86_64Assembler::lock() {
1409 void X86_64Assembler::cmpxchgl(const Address& address, CpuRegister reg) {
1416 void X86_64Assembler::mfence() {
1424 X86_64Assembler* X86_64Assembler::gs() {
1432 void X86_64Assembler::AddImmediate(CpuRegister reg, const Immediate& imm) {
1444 void X86_64Assembler::setcc(Condition condition, CpuRegister dst) {
1456 void X86_64Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
1466 void X86_64Assembler::FloatNegate(XmmRegister f) {
1478 void X86_64Assembler::DoubleNegate(XmmRegister d) {
1488 void X86_64Assembler::DoubleAbs(XmmRegister reg) {
1498 void X86_64Assembler::Align(int alignment, int offset) {
1507 void X86_64Assembler::Bind(Label* label) {
1520 void X86_64Assembler::EmitOperand(uint8_t reg_or_opcode, const Operand& operand) {
1535 void X86_64Assembler::EmitImmediate(const Immediate& imm) {
1544 void X86_64Assembler::EmitComplex(uint8_t reg_or_opcode,
1566 void X86_64Assembler::EmitLabel(Label* label, int instruction_size) {
1577 void X86_64Assembler::EmitLabelLink(Label* label) {
1585 void X86_64Assembler::EmitGenericShift(bool wide,
1605 void X86_64Assembler::EmitGenericShift(int reg_or_opcode,
1614 void X86_64Assembler::EmitOptionalRex(bool force, bool w, bool r, bool x, bool b) {
1638 void X86_64Assembler::EmitOptionalRex32(CpuRegister reg) {
1642 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, CpuRegister src) {
1646 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, XmmRegister src) {
1650 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, XmmRegister src) {
1654 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, CpuRegister src) {
1658 void X86_64Assembler::EmitOptionalRex32(const Operand& operand) {
1665 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, const Operand& operand) {
1675 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, const Operand& operand) {
1685 void X86_64Assembler::EmitRex64(CpuRegister reg) {
1689 void X86_64Assembler::EmitRex64(CpuRegister dst, CpuRegister src) {
1693 void X86_64Assembler::EmitRex64(CpuRegister dst, const Operand& operand) {
1703 void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, CpuRegister src) {
1707 void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, const Operand& operand) {
1719 void X86_64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
1771 void X86_64Assembler::RemoveFrame(size_t frame_size,
1796 void X86_64Assembler::IncreaseFrameSize(size_t adjust) {
1801 void X86_64Assembler::DecreaseFrameSize(size_t adjust) {
1806 void X86_64Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
1839 void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
1845 void X86_64Assembler
1851 void X86_64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1856 void X86_64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
1861 void X86_64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
1870 void X86_64Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
1874 void X86_64Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
1879 void X86_64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
1911 void X86_64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) {
1937 void X86_64Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
1943 void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
1950 void X86_64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
1957 void X86_64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest, ThreadOffset<8> offs) {
1963 void X86_64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
1974 void X86_64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
1985 void X86_64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
2011 void X86_64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
2019 void X86_64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
2028 void X86_64Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs,
2037 void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src,
2052 void X86_64Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
2057 void X86_64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
2065 void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
2074 void X86_64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
2083 void X86_64Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
2093 void X86_64Assembler::MemoryBarrier(ManagedRegister) {
2099 void X86_64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
2127 void X86_64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
2147 void X86_64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
2163 void X86_64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
2167 void X86_64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
2171 void X86_64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
2178 void X86_64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
2184 void X86_64Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister /*mscratch*/) {
2188 void X86_64Assembler::GetCurrentThread(ManagedRegister tr) {
2192 void X86_64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister mscratch) {
2207 void X86_64Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
2215 X86_64Assembler* sp_asm = down_cast<X86_64Assembler*>(sasm);