Home | History | Annotate | Download | only in x86_64
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "assembler_x86_64.h"
     18 
     19 #include "base/casts.h"
     20 #include "entrypoints/quick/quick_entrypoints.h"
     21 #include "memory_region.h"
     22 #include "thread.h"
     23 
     24 namespace art {
     25 namespace x86_64 {
     26 
     27 std::ostream& operator<<(std::ostream& os, const CpuRegister& reg) {
     28   return os << reg.AsRegister();
     29 }
     30 
     31 std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
     32   return os << reg.AsFloatRegister();
     33 }
     34 
     35 std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
     36   return os << "ST" << static_cast<int>(reg);
     37 }
     38 
     39 void X86_64Assembler::call(CpuRegister reg) {
     40   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     41   EmitOptionalRex32(reg);
     42   EmitUint8(0xFF);
     43   EmitRegisterOperand(2, reg.LowBits());
     44 }
     45 
     46 
     47 void X86_64Assembler::call(const Address& address) {
     48   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     49   EmitOptionalRex32(address);
     50   EmitUint8(0xFF);
     51   EmitOperand(2, address);
     52 }
     53 
     54 
     55 void X86_64Assembler::call(Label* label) {
     56   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     57   EmitUint8(0xE8);
     58   static const int kSize = 5;
     59   EmitLabel(label, kSize);
     60 }
     61 
     62 void X86_64Assembler::pushq(CpuRegister reg) {
     63   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     64   EmitOptionalRex32(reg);
     65   EmitUint8(0x50 + reg.LowBits());
     66 }
     67 
     68 
     69 void X86_64Assembler::pushq(const Address& address) {
     70   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     71   EmitOptionalRex32(address);
     72   EmitUint8(0xFF);
     73   EmitOperand(6, address);
     74 }
     75 
     76 
     77 void X86_64Assembler::pushq(const Immediate& imm) {
     78   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     79   CHECK(imm.is_int32());  // pushq only supports 32b immediate.
     80   if (imm.is_int8()) {
     81     EmitUint8(0x6A);
     82     EmitUint8(imm.value() & 0xFF);
     83   } else {
     84     EmitUint8(0x68);
     85     EmitImmediate(imm);
     86   }
     87 }
     88 
     89 
     90 void X86_64Assembler::popq(CpuRegister reg) {
     91   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     92   EmitOptionalRex32(reg);
     93   EmitUint8(0x58 + reg.LowBits());
     94 }
     95 
     96 
     97 void X86_64Assembler::popq(const Address& address) {
     98   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     99   EmitOptionalRex32(address);
    100   EmitUint8(0x8F);
    101   EmitOperand(0, address);
    102 }
    103 
    104 
    105 void X86_64Assembler::movq(CpuRegister dst, const Immediate& imm) {
    106   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    107   if (imm.is_int32()) {
    108     // 32 bit. Note: sign-extends.
    109     EmitRex64(dst);
    110     EmitUint8(0xC7);
    111     EmitRegisterOperand(0, dst.LowBits());
    112     EmitInt32(static_cast<int32_t>(imm.value()));
    113   } else {
    114     EmitRex64(dst);
    115     EmitUint8(0xB8 + dst.LowBits());
    116     EmitInt64(imm.value());
    117   }
    118 }
    119 
    120 
    121 void X86_64Assembler::movl(CpuRegister dst, const Immediate& imm) {
    122   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    123   EmitOptionalRex32(dst);
    124   EmitUint8(0xB8 + dst.LowBits());
    125   EmitImmediate(imm);
    126 }
    127 
    128 
    129 void X86_64Assembler::movq(CpuRegister dst, CpuRegister src) {
    130   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    131   // 0x89 is movq r/m64 <- r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
    132   EmitRex64(src, dst);
    133   EmitUint8(0x89);
    134   EmitRegisterOperand(src.LowBits(), dst.LowBits());
    135 }
    136 
    137 
    138 void X86_64Assembler::movl(CpuRegister dst, CpuRegister src) {
    139   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    140   EmitOptionalRex32(dst, src);
    141   EmitUint8(0x8B);
    142   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    143 }
    144 
    145 
    146 void X86_64Assembler::movq(CpuRegister dst, const Address& src) {
    147   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    148   EmitRex64(dst, src);
    149   EmitUint8(0x8B);
    150   EmitOperand(dst.LowBits(), src);
    151 }
    152 
    153 
    154 void X86_64Assembler::movl(CpuRegister dst, const Address& src) {
    155   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    156   EmitOptionalRex32(dst, src);
    157   EmitUint8(0x8B);
    158   EmitOperand(dst.LowBits(), src);
    159 }
    160 
    161 
    162 void X86_64Assembler::movq(const Address& dst, CpuRegister src) {
    163   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    164   EmitRex64(src, dst);
    165   EmitUint8(0x89);
    166   EmitOperand(src.LowBits(), dst);
    167 }
    168 
    169 
    170 void X86_64Assembler::movl(const Address& dst, CpuRegister src) {
    171   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    172   EmitOptionalRex32(src, dst);
    173   EmitUint8(0x89);
    174   EmitOperand(src.LowBits(), dst);
    175 }
    176 
    177 void X86_64Assembler::movl(const Address& dst, const Immediate& imm) {
    178   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    179   EmitOptionalRex32(dst);
    180   EmitUint8(0xC7);
    181   EmitOperand(0, dst);
    182   EmitImmediate(imm);
    183 }
    184 
    185 void X86_64Assembler::movzxb(CpuRegister dst, CpuRegister src) {
    186   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    187   EmitOptionalByteRegNormalizingRex32(dst, src);
    188   EmitUint8(0x0F);
    189   EmitUint8(0xB6);
    190   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    191 }
    192 
    193 
    194 void X86_64Assembler::movzxb(CpuRegister dst, const Address& src) {
    195   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    196   EmitOptionalByteRegNormalizingRex32(dst, src);
    197   EmitUint8(0x0F);
    198   EmitUint8(0xB6);
    199   EmitOperand(dst.LowBits(), src);
    200 }
    201 
    202 
    203 void X86_64Assembler::movsxb(CpuRegister dst, CpuRegister src) {
    204   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    205   EmitOptionalByteRegNormalizingRex32(dst, src);
    206   EmitUint8(0x0F);
    207   EmitUint8(0xBE);
    208   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    209 }
    210 
    211 
    212 void X86_64Assembler::movsxb(CpuRegister dst, const Address& src) {
    213   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    214   EmitOptionalByteRegNormalizingRex32(dst, src);
    215   EmitUint8(0x0F);
    216   EmitUint8(0xBE);
    217   EmitOperand(dst.LowBits(), src);
    218 }
    219 
    220 
    221 void X86_64Assembler::movb(CpuRegister /*dst*/, const Address& /*src*/) {
    222   LOG(FATAL) << "Use movzxb or movsxb instead.";
    223 }
    224 
    225 
    226 void X86_64Assembler::movb(const Address& dst, CpuRegister src) {
    227   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    228   EmitOptionalByteRegNormalizingRex32(src, dst);
    229   EmitUint8(0x88);
    230   EmitOperand(src.LowBits(), dst);
    231 }
    232 
    233 
    234 void X86_64Assembler::movb(const Address& dst, const Immediate& imm) {
    235   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    236   EmitUint8(0xC6);
    237   EmitOperand(Register::RAX, dst);
    238   CHECK(imm.is_int8());
    239   EmitUint8(imm.value() & 0xFF);
    240 }
    241 
    242 
    243 void X86_64Assembler::movzxw(CpuRegister dst, CpuRegister src) {
    244   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    245   EmitOptionalRex32(dst, src);
    246   EmitUint8(0x0F);
    247   EmitUint8(0xB7);
    248   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    249 }
    250 
    251 
    252 void X86_64Assembler::movzxw(CpuRegister dst, const Address& src) {
    253   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    254   EmitOptionalRex32(dst, src);
    255   EmitUint8(0x0F);
    256   EmitUint8(0xB7);
    257   EmitOperand(dst.LowBits(), src);
    258 }
    259 
    260 
    261 void X86_64Assembler::movsxw(CpuRegister dst, CpuRegister src) {
    262   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    263   EmitOptionalRex32(dst, src);
    264   EmitUint8(0x0F);
    265   EmitUint8(0xBF);
    266   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    267 }
    268 
    269 
    270 void X86_64Assembler::movsxw(CpuRegister dst, const Address& src) {
    271   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    272   EmitOptionalRex32(dst, src);
    273   EmitUint8(0x0F);
    274   EmitUint8(0xBF);
    275   EmitOperand(dst.LowBits(), src);
    276 }
    277 
    278 
    279 void X86_64Assembler::movw(CpuRegister /*dst*/, const Address& /*src*/) {
    280   LOG(FATAL) << "Use movzxw or movsxw instead.";
    281 }
    282 
    283 
    284 void X86_64Assembler::movw(const Address& dst, CpuRegister src) {
    285   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    286   EmitOperandSizeOverride();
    287   EmitOptionalRex32(src, dst);
    288   EmitUint8(0x89);
    289   EmitOperand(src.LowBits(), dst);
    290 }
    291 
    292 
    293 void X86_64Assembler::leaq(CpuRegister dst, const Address& src) {
    294   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    295   EmitRex64(dst, src);
    296   EmitUint8(0x8D);
    297   EmitOperand(dst.LowBits(), src);
    298 }
    299 
    300 
    301 void X86_64Assembler::movss(XmmRegister dst, const Address& src) {
    302   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    303   EmitUint8(0xF3);
    304   EmitOptionalRex32(dst, src);
    305   EmitUint8(0x0F);
    306   EmitUint8(0x10);
    307   EmitOperand(dst.LowBits(), src);
    308 }
    309 
    310 
    311 void X86_64Assembler::movss(const Address& dst, XmmRegister src) {
    312   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    313   EmitUint8(0xF3);
    314   EmitOptionalRex32(src, dst);
    315   EmitUint8(0x0F);
    316   EmitUint8(0x11);
    317   EmitOperand(src.LowBits(), dst);
    318 }
    319 
    320 
    321 void X86_64Assembler::movss(XmmRegister dst, XmmRegister src) {
    322   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    323   EmitUint8(0xF3);
    324   EmitOptionalRex32(dst, src);
    325   EmitUint8(0x0F);
    326   EmitUint8(0x11);
    327   EmitXmmRegisterOperand(src.LowBits(), dst);
    328 }
    329 
    330 
    331 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) {
    332   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    333   EmitUint8(0x66);
    334   EmitOptionalRex32(dst, src);
    335   EmitUint8(0x0F);
    336   EmitUint8(0x6E);
    337   EmitOperand(dst.LowBits(), Operand(src));
    338 }
    339 
    340 
    341 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) {
    342   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    343   EmitUint8(0x66);
    344   EmitOptionalRex32(src, dst);
    345   EmitUint8(0x0F);
    346   EmitUint8(0x7E);
    347   EmitOperand(src.LowBits(), Operand(dst));
    348 }
    349 
    350 
    351 void X86_64Assembler::addss(XmmRegister dst, XmmRegister src) {
    352   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    353   EmitUint8(0xF3);
    354   EmitOptionalRex32(dst, src);
    355   EmitUint8(0x0F);
    356   EmitUint8(0x58);
    357   EmitXmmRegisterOperand(dst.LowBits(), src);
    358 }
    359 
    360 
    361 void X86_64Assembler::addss(XmmRegister dst, const Address& src) {
    362   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    363   EmitUint8(0xF3);
    364   EmitOptionalRex32(dst, src);
    365   EmitUint8(0x0F);
    366   EmitUint8(0x58);
    367   EmitOperand(dst.LowBits(), src);
    368 }
    369 
    370 
    371 void X86_64Assembler::subss(XmmRegister dst, XmmRegister src) {
    372   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    373   EmitUint8(0xF3);
    374   EmitOptionalRex32(dst, src);
    375   EmitUint8(0x0F);
    376   EmitUint8(0x5C);
    377   EmitXmmRegisterOperand(dst.LowBits(), src);
    378 }
    379 
    380 
    381 void X86_64Assembler::subss(XmmRegister dst, const Address& src) {
    382   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    383   EmitUint8(0xF3);
    384   EmitOptionalRex32(dst, src);
    385   EmitUint8(0x0F);
    386   EmitUint8(0x5C);
    387   EmitOperand(dst.LowBits(), src);
    388 }
    389 
    390 
    391 void X86_64Assembler::mulss(XmmRegister dst, XmmRegister src) {
    392   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    393   EmitUint8(0xF3);
    394   EmitOptionalRex32(dst, src);
    395   EmitUint8(0x0F);
    396   EmitUint8(0x59);
    397   EmitXmmRegisterOperand(dst.LowBits(), src);
    398 }
    399 
    400 
    401 void X86_64Assembler::mulss(XmmRegister dst, const Address& src) {
    402   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    403   EmitUint8(0xF3);
    404   EmitOptionalRex32(dst, src);
    405   EmitUint8(0x0F);
    406   EmitUint8(0x59);
    407   EmitOperand(dst.LowBits(), src);
    408 }
    409 
    410 
    411 void X86_64Assembler::divss(XmmRegister dst, XmmRegister src) {
    412   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    413   EmitUint8(0xF3);
    414   EmitOptionalRex32(dst, src);
    415   EmitUint8(0x0F);
    416   EmitUint8(0x5E);
    417   EmitXmmRegisterOperand(dst.LowBits(), src);
    418 }
    419 
    420 
    421 void X86_64Assembler::divss(XmmRegister dst, const Address& src) {
    422   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    423   EmitUint8(0xF3);
    424   EmitOptionalRex32(dst, src);
    425   EmitUint8(0x0F);
    426   EmitUint8(0x5E);
    427   EmitOperand(dst.LowBits(), src);
    428 }
    429 
    430 
    431 void X86_64Assembler::flds(const Address& src) {
    432   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    433   EmitUint8(0xD9);
    434   EmitOperand(0, src);
    435 }
    436 
    437 
    438 void X86_64Assembler::fstps(const Address& dst) {
    439   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    440   EmitUint8(0xD9);
    441   EmitOperand(3, dst);
    442 }
    443 
    444 
    445 void X86_64Assembler::movsd(XmmRegister dst, const Address& src) {
    446   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    447   EmitUint8(0xF2);
    448   EmitOptionalRex32(dst, src);
    449   EmitUint8(0x0F);
    450   EmitUint8(0x10);
    451   EmitOperand(dst.LowBits(), src);
    452 }
    453 
    454 
    455 void X86_64Assembler::movsd(const Address& dst, XmmRegister src) {
    456   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    457   EmitUint8(0xF2);
    458   EmitOptionalRex32(src, dst);
    459   EmitUint8(0x0F);
    460   EmitUint8(0x11);
    461   EmitOperand(src.LowBits(), dst);
    462 }
    463 
    464 
    465 void X86_64Assembler::movsd(XmmRegister dst, XmmRegister src) {
    466   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    467   EmitUint8(0xF2);
    468   EmitOptionalRex32(dst, src);
    469   EmitUint8(0x0F);
    470   EmitUint8(0x11);
    471   EmitXmmRegisterOperand(src.LowBits(), dst);
    472 }
    473 
    474 
    475 void X86_64Assembler::addsd(XmmRegister dst, XmmRegister src) {
    476   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    477   EmitUint8(0xF2);
    478   EmitOptionalRex32(dst, src);
    479   EmitUint8(0x0F);
    480   EmitUint8(0x58);
    481   EmitXmmRegisterOperand(dst.LowBits(), src);
    482 }
    483 
    484 
    485 void X86_64Assembler::addsd(XmmRegister dst, const Address& src) {
    486   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    487   EmitUint8(0xF2);
    488   EmitOptionalRex32(dst, src);
    489   EmitUint8(0x0F);
    490   EmitUint8(0x58);
    491   EmitOperand(dst.LowBits(), src);
    492 }
    493 
    494 
    495 void X86_64Assembler::subsd(XmmRegister dst, XmmRegister src) {
    496   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    497   EmitUint8(0xF2);
    498   EmitOptionalRex32(dst, src);
    499   EmitUint8(0x0F);
    500   EmitUint8(0x5C);
    501   EmitXmmRegisterOperand(dst.LowBits(), src);
    502 }
    503 
    504 
    505 void X86_64Assembler::subsd(XmmRegister dst, const Address& src) {
    506   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    507   EmitUint8(0xF2);
    508   EmitOptionalRex32(dst, src);
    509   EmitUint8(0x0F);
    510   EmitUint8(0x5C);
    511   EmitOperand(dst.LowBits(), src);
    512 }
    513 
    514 
    515 void X86_64Assembler::mulsd(XmmRegister dst, XmmRegister src) {
    516   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    517   EmitUint8(0xF2);
    518   EmitOptionalRex32(dst, src);
    519   EmitUint8(0x0F);
    520   EmitUint8(0x59);
    521   EmitXmmRegisterOperand(dst.LowBits(), src);
    522 }
    523 
    524 
    525 void X86_64Assembler::mulsd(XmmRegister dst, const Address& src) {
    526   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    527   EmitUint8(0xF2);
    528   EmitOptionalRex32(dst, src);
    529   EmitUint8(0x0F);
    530   EmitUint8(0x59);
    531   EmitOperand(dst.LowBits(), src);
    532 }
    533 
    534 
    535 void X86_64Assembler::divsd(XmmRegister dst, XmmRegister src) {
    536   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    537   EmitUint8(0xF2);
    538   EmitOptionalRex32(dst, src);
    539   EmitUint8(0x0F);
    540   EmitUint8(0x5E);
    541   EmitXmmRegisterOperand(dst.LowBits(), src);
    542 }
    543 
    544 
    545 void X86_64Assembler::divsd(XmmRegister dst, const Address& src) {
    546   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    547   EmitUint8(0xF2);
    548   EmitOptionalRex32(dst, src);
    549   EmitUint8(0x0F);
    550   EmitUint8(0x5E);
    551   EmitOperand(dst.LowBits(), src);
    552 }
    553 
    554 
    555 void X86_64Assembler::cvtsi2ss(XmmRegister dst, CpuRegister src) {
    556   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    557   EmitUint8(0xF3);
    558   EmitOptionalRex32(dst, src);
    559   EmitUint8(0x0F);
    560   EmitUint8(0x2A);
    561   EmitOperand(dst.LowBits(), Operand(src));
    562 }
    563 
    564 
    565 void X86_64Assembler::cvtsi2sd(XmmRegister dst, CpuRegister src) {
    566   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    567   EmitUint8(0xF2);
    568   EmitOptionalRex32(dst, src);
    569   EmitUint8(0x0F);
    570   EmitUint8(0x2A);
    571   EmitOperand(dst.LowBits(), Operand(src));
    572 }
    573 
    574 
    575 void X86_64Assembler::cvtss2si(CpuRegister dst, XmmRegister src) {
    576   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    577   EmitUint8(0xF3);
    578   EmitOptionalRex32(dst, src);
    579   EmitUint8(0x0F);
    580   EmitUint8(0x2D);
    581   EmitXmmRegisterOperand(dst.LowBits(), src);
    582 }
    583 
    584 
    585 void X86_64Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
    586   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    587   EmitUint8(0xF3);
    588   EmitOptionalRex32(dst, src);
    589   EmitUint8(0x0F);
    590   EmitUint8(0x5A);
    591   EmitXmmRegisterOperand(dst.LowBits(), src);
    592 }
    593 
    594 
    595 void X86_64Assembler::cvtsd2si(CpuRegister dst, XmmRegister src) {
    596   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    597   EmitUint8(0xF2);
    598   EmitOptionalRex32(dst, src);
    599   EmitUint8(0x0F);
    600   EmitUint8(0x2D);
    601   EmitXmmRegisterOperand(dst.LowBits(), src);
    602 }
    603 
    604 
    605 void X86_64Assembler::cvttss2si(CpuRegister dst, XmmRegister src) {
    606   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    607   EmitUint8(0xF3);
    608   EmitOptionalRex32(dst, src);
    609   EmitUint8(0x0F);
    610   EmitUint8(0x2C);
    611   EmitXmmRegisterOperand(dst.LowBits(), src);
    612 }
    613 
    614 
    615 void X86_64Assembler::cvttsd2si(CpuRegister dst, XmmRegister src) {
    616   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    617   EmitUint8(0xF2);
    618   EmitOptionalRex32(dst, src);
    619   EmitUint8(0x0F);
    620   EmitUint8(0x2C);
    621   EmitXmmRegisterOperand(dst.LowBits(), src);
    622 }
    623 
    624 
    625 void X86_64Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
    626   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    627   EmitUint8(0xF2);
    628   EmitOptionalRex32(dst, src);
    629   EmitUint8(0x0F);
    630   EmitUint8(0x5A);
    631   EmitXmmRegisterOperand(dst.LowBits(), src);
    632 }
    633 
    634 
    635 void X86_64Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
    636   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    637   EmitUint8(0xF3);
    638   EmitOptionalRex32(dst, src);
    639   EmitUint8(0x0F);
    640   EmitUint8(0xE6);
    641   EmitXmmRegisterOperand(dst.LowBits(), src);
    642 }
    643 
    644 
    645 void X86_64Assembler::comiss(XmmRegister a, XmmRegister b) {
    646   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    647   EmitOptionalRex32(a, b);
    648   EmitUint8(0x0F);
    649   EmitUint8(0x2F);
    650   EmitXmmRegisterOperand(a.LowBits(), b);
    651 }
    652 
    653 
    654 void X86_64Assembler::comisd(XmmRegister a, XmmRegister b) {
    655   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    656   EmitUint8(0x66);
    657   EmitOptionalRex32(a, b);
    658   EmitUint8(0x0F);
    659   EmitUint8(0x2F);
    660   EmitXmmRegisterOperand(a.LowBits(), b);
    661 }
    662 
    663 
    664 void X86_64Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
    665   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    666   EmitUint8(0xF2);
    667   EmitOptionalRex32(dst, src);
    668   EmitUint8(0x0F);
    669   EmitUint8(0x51);
    670   EmitXmmRegisterOperand(dst.LowBits(), src);
    671 }
    672 
    673 
    674 void X86_64Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
    675   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    676   EmitUint8(0xF3);
    677   EmitOptionalRex32(dst, src);
    678   EmitUint8(0x0F);
    679   EmitUint8(0x51);
    680   EmitXmmRegisterOperand(dst.LowBits(), src);
    681 }
    682 
    683 
    684 void X86_64Assembler::xorpd(XmmRegister dst, const Address& src) {
    685   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    686   EmitUint8(0x66);
    687   EmitOptionalRex32(dst, src);
    688   EmitUint8(0x0F);
    689   EmitUint8(0x57);
    690   EmitOperand(dst.LowBits(), src);
    691 }
    692 
    693 
    694 void X86_64Assembler::xorpd(XmmRegister dst, XmmRegister src) {
    695   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    696   EmitUint8(0x66);
    697   EmitOptionalRex32(dst, src);
    698   EmitUint8(0x0F);
    699   EmitUint8(0x57);
    700   EmitXmmRegisterOperand(dst.LowBits(), src);
    701 }
    702 
    703 
    704 void X86_64Assembler::xorps(XmmRegister dst, const Address& src) {
    705   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    706   EmitOptionalRex32(dst, src);
    707   EmitUint8(0x0F);
    708   EmitUint8(0x57);
    709   EmitOperand(dst.LowBits(), src);
    710 }
    711 
    712 
    713 void X86_64Assembler::xorps(XmmRegister dst, XmmRegister src) {
    714   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    715   EmitOptionalRex32(dst, src);
    716   EmitUint8(0x0F);
    717   EmitUint8(0x57);
    718   EmitXmmRegisterOperand(dst.LowBits(), src);
    719 }
    720 
    721 
    722 void X86_64Assembler::andpd(XmmRegister dst, const Address& src) {
    723   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    724   EmitUint8(0x66);
    725   EmitOptionalRex32(dst, src);
    726   EmitUint8(0x0F);
    727   EmitUint8(0x54);
    728   EmitOperand(dst.LowBits(), src);
    729 }
    730 
    731 
    732 void X86_64Assembler::fldl(const Address& src) {
    733   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    734   EmitUint8(0xDD);
    735   EmitOperand(0, src);
    736 }
    737 
    738 
    739 void X86_64Assembler::fstpl(const Address& dst) {
    740   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    741   EmitUint8(0xDD);
    742   EmitOperand(3, dst);
    743 }
    744 
    745 
    746 void X86_64Assembler::fnstcw(const Address& dst) {
    747   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    748   EmitUint8(0xD9);
    749   EmitOperand(7, dst);
    750 }
    751 
    752 
    753 void X86_64Assembler::fldcw(const Address& src) {
    754   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    755   EmitUint8(0xD9);
    756   EmitOperand(5, src);
    757 }
    758 
    759 
    760 void X86_64Assembler::fistpl(const Address& dst) {
    761   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    762   EmitUint8(0xDF);
    763   EmitOperand(7, dst);
    764 }
    765 
    766 
    767 void X86_64Assembler::fistps(const Address& dst) {
    768   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    769   EmitUint8(0xDB);
    770   EmitOperand(3, dst);
    771 }
    772 
    773 
    774 void X86_64Assembler::fildl(const Address& src) {
    775   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    776   EmitUint8(0xDF);
    777   EmitOperand(5, src);
    778 }
    779 
    780 
    781 void X86_64Assembler::fincstp() {
    782   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    783   EmitUint8(0xD9);
    784   EmitUint8(0xF7);
    785 }
    786 
    787 
    788 void X86_64Assembler::ffree(const Immediate& index) {
    789   CHECK_LT(index.value(), 7);
    790   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    791   EmitUint8(0xDD);
    792   EmitUint8(0xC0 + index.value());
    793 }
    794 
    795 
    796 void X86_64Assembler::fsin() {
    797   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    798   EmitUint8(0xD9);
    799   EmitUint8(0xFE);
    800 }
    801 
    802 
    803 void X86_64Assembler::fcos() {
    804   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    805   EmitUint8(0xD9);
    806   EmitUint8(0xFF);
    807 }
    808 
    809 
    810 void X86_64Assembler::fptan() {
    811   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    812   EmitUint8(0xD9);
    813   EmitUint8(0xF2);
    814 }
    815 
    816 
    817 void X86_64Assembler::xchgl(CpuRegister dst, CpuRegister src) {
    818   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    819   EmitOptionalRex32(dst, src);
    820   EmitUint8(0x87);
    821   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    822 }
    823 
    824 
    825 void X86_64Assembler::xchgq(CpuRegister dst, CpuRegister src) {
    826   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    827   EmitRex64(dst, src);
    828   EmitUint8(0x87);
    829   EmitOperand(dst.LowBits(), Operand(src));
    830 }
    831 
    832 
    833 void X86_64Assembler::xchgl(CpuRegister reg, const Address& address) {
    834   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    835   EmitOptionalRex32(reg, address);
    836   EmitUint8(0x87);
    837   EmitOperand(reg.LowBits(), address);
    838 }
    839 
    840 
    841 void X86_64Assembler::cmpl(CpuRegister reg, const Immediate& imm) {
    842   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    843   EmitOptionalRex32(reg);
    844   EmitComplex(7, Operand(reg), imm);
    845 }
    846 
    847 
    848 void X86_64Assembler::cmpl(CpuRegister reg0, CpuRegister reg1) {
    849   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    850   EmitOptionalRex32(reg0, reg1);
    851   EmitUint8(0x3B);
    852   EmitOperand(reg0.LowBits(), Operand(reg1));
    853 }
    854 
    855 
    856 void X86_64Assembler::cmpl(CpuRegister reg, const Address& address) {
    857   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    858   EmitOptionalRex32(reg, address);
    859   EmitUint8(0x3B);
    860   EmitOperand(reg.LowBits(), address);
    861 }
    862 
    863 
    864 void X86_64Assembler::cmpq(CpuRegister reg0, CpuRegister reg1) {
    865   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    866   EmitRex64(reg0, reg1);
    867   EmitUint8(0x3B);
    868   EmitOperand(reg0.LowBits(), Operand(reg1));
    869 }
    870 
    871 
    872 void X86_64Assembler::cmpq(CpuRegister reg, const Immediate& imm) {
    873   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    874   CHECK(imm.is_int32());  // cmpq only supports 32b immediate.
    875   EmitRex64(reg);
    876   EmitComplex(7, Operand(reg), imm);
    877 }
    878 
    879 
    880 void X86_64Assembler::cmpq(CpuRegister reg, const Address& address) {
    881   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    882   EmitRex64(reg);
    883   EmitUint8(0x3B);
    884   EmitOperand(reg.LowBits(), address);
    885 }
    886 
    887 
    888 void X86_64Assembler::addl(CpuRegister dst, CpuRegister src) {
    889   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    890   EmitOptionalRex32(dst, src);
    891   EmitUint8(0x03);
    892   EmitRegisterOperand(dst.LowBits(), src.LowBits());
    893 }
    894 
    895 
    896 void X86_64Assembler::addl(CpuRegister reg, const Address& address) {
    897   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    898   EmitOptionalRex32(reg, address);
    899   EmitUint8(0x03);
    900   EmitOperand(reg.LowBits(), address);
    901 }
    902 
    903 
    904 void X86_64Assembler::cmpl(const Address& address, CpuRegister reg) {
    905   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    906   EmitOptionalRex32(reg, address);
    907   EmitUint8(0x39);
    908   EmitOperand(reg.LowBits(), address);
    909 }
    910 
    911 
    912 void X86_64Assembler::cmpl(const Address& address, const Immediate& imm) {
    913   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    914   EmitOptionalRex32(address);
    915   EmitComplex(7, address, imm);
    916 }
    917 
    918 
    919 void X86_64Assembler::testl(CpuRegister reg1, CpuRegister reg2) {
    920   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    921   EmitOptionalRex32(reg1, reg2);
    922   EmitUint8(0x85);
    923   EmitRegisterOperand(reg1.LowBits(), reg2.LowBits());
    924 }
    925 
    926 
    927 void X86_64Assembler::testl(CpuRegister reg, const Immediate& immediate) {
    928   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    929   // For registers that have a byte variant (RAX, RBX, RCX, and RDX)
    930   // we only test the byte CpuRegister to keep the encoding short.
    931   if (immediate.is_uint8() && reg.AsRegister() < 4) {
    932     // Use zero-extended 8-bit immediate.
    933     if (reg.AsRegister() == RAX) {
    934       EmitUint8(0xA8);
    935     } else {
    936       EmitUint8(0xF6);
    937       EmitUint8(0xC0 + reg.AsRegister());
    938     }
    939     EmitUint8(immediate.value() & 0xFF);
    940   } else if (reg.AsRegister() == RAX) {
    941     // Use short form if the destination is RAX.
    942     EmitUint8(0xA9);
    943     EmitImmediate(immediate);
    944   } else {
    945     EmitOptionalRex32(reg);
    946     EmitUint8(0xF7);
    947     EmitOperand(0, Operand(reg));
    948     EmitImmediate(immediate);
    949   }
    950 }
    951 
    952 
    953 void X86_64Assembler::testq(CpuRegister reg, const Address& address) {
    954   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    955   EmitRex64(reg);
    956   EmitUint8(0x85);
    957   EmitOperand(reg.LowBits(), address);
    958 }
    959 
    960 
    961 void X86_64Assembler::andl(CpuRegister dst, CpuRegister src) {
    962   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    963   EmitOptionalRex32(dst, src);
    964   EmitUint8(0x23);
    965   EmitOperand(dst.LowBits(), Operand(src));
    966 }
    967 
    968 
    969 void X86_64Assembler::andl(CpuRegister dst, const Immediate& imm) {
    970   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    971   EmitOptionalRex32(dst);
    972   EmitComplex(4, Operand(dst), imm);
    973 }
    974 
    975 
    976 void X86_64Assembler::andq(CpuRegister reg, const Immediate& imm) {
    977   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    978   CHECK(imm.is_int32());  // andq only supports 32b immediate.
    979   EmitRex64(reg);
    980   EmitComplex(4, Operand(reg), imm);
    981 }
    982 
    983 
    984 void X86_64Assembler::orl(CpuRegister dst, CpuRegister src) {
    985   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    986   EmitOptionalRex32(dst, src);
    987   EmitUint8(0x0B);
    988   EmitOperand(dst.LowBits(), Operand(src));
    989 }
    990 
    991 
    992 void X86_64Assembler::orl(CpuRegister dst, const Immediate& imm) {
    993   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    994   EmitOptionalRex32(dst);
    995   EmitComplex(1, Operand(dst), imm);
    996 }
    997 
    998 
    999 void X86_64Assembler::xorl(CpuRegister dst, CpuRegister src) {
   1000   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1001   EmitOptionalRex32(dst, src);
   1002   EmitUint8(0x33);
   1003   EmitOperand(dst.LowBits(), Operand(src));
   1004 }
   1005 
   1006 
   1007 void X86_64Assembler::xorq(CpuRegister dst, CpuRegister src) {
   1008   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1009   EmitRex64(dst, src);
   1010   EmitUint8(0x33);
   1011   EmitOperand(dst.LowBits(), Operand(src));
   1012 }
   1013 
   1014 
   1015 void X86_64Assembler::xorq(CpuRegister dst, const Immediate& imm) {
   1016   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1017   CHECK(imm.is_int32());  // xorq only supports 32b immediate.
   1018   EmitRex64(dst);
   1019   EmitComplex(6, Operand(dst), imm);
   1020 }
   1021 
   1022 #if 0
   1023 void X86_64Assembler::rex(bool force, bool w, Register* r, Register* x, Register* b) {
   1024   // REX.WRXB
   1025   // W - 64-bit operand
   1026   // R - MODRM.reg
   1027   // X - SIB.index
   1028   // B - MODRM.rm/SIB.base
   1029   uint8_t rex = force ? 0x40 : 0;
   1030   if (w) {
   1031     rex |= 0x48;  // REX.W000
   1032   }
   1033   if (r != nullptr && *r >= Register::R8 && *r < Register::kNumberOfCpuRegisters) {
   1034     rex |= 0x44;  // REX.0R00
   1035     *r = static_cast<Register>(*r - 8);
   1036   }
   1037   if (x != nullptr && *x >= Register::R8 && *x < Register::kNumberOfCpuRegisters) {
   1038     rex |= 0x42;  // REX.00X0
   1039     *x = static_cast<Register>(*x - 8);
   1040   }
   1041   if (b != nullptr && *b >= Register::R8 && *b < Register::kNumberOfCpuRegisters) {
   1042     rex |= 0x41;  // REX.000B
   1043     *b = static_cast<Register>(*b - 8);
   1044   }
   1045   if (rex != 0) {
   1046     EmitUint8(rex);
   1047   }
   1048 }
   1049 
   1050 void X86_64Assembler::rex_reg_mem(bool force, bool w, Register* dst, const Address& mem) {
   1051   // REX.WRXB
   1052   // W - 64-bit operand
   1053   // R - MODRM.reg
   1054   // X - SIB.index
   1055   // B - MODRM.rm/SIB.base
   1056   uint8_t rex = mem->rex();
   1057   if (force) {
   1058     rex |= 0x40;  // REX.0000
   1059   }
   1060   if (w) {
   1061     rex |= 0x48;  // REX.W000
   1062   }
   1063   if (dst != nullptr && *dst >= Register::R8 && *dst < Register::kNumberOfCpuRegisters) {
   1064     rex |= 0x44;  // REX.0R00
   1065     *dst = static_cast<Register>(*dst - 8);
   1066   }
   1067   if (rex != 0) {
   1068     EmitUint8(rex);
   1069   }
   1070 }
   1071 
   1072 void rex_mem_reg(bool force, bool w, Address* mem, Register* src);
   1073 #endif
   1074 
   1075 void X86_64Assembler::addl(CpuRegister reg, const Immediate& imm) {
   1076   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1077   EmitOptionalRex32(reg);
   1078   EmitComplex(0, Operand(reg), imm);
   1079 }
   1080 
   1081 
   1082 void X86_64Assembler::addq(CpuRegister reg, const Immediate& imm) {
   1083   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1084   CHECK(imm.is_int32());  // addq only supports 32b immediate.
   1085   EmitRex64(reg);
   1086   EmitComplex(0, Operand(reg), imm);
   1087 }
   1088 
   1089 
   1090 void X86_64Assembler::addq(CpuRegister dst, const Address& address) {
   1091   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1092   EmitRex64(dst);
   1093   EmitUint8(0x03);
   1094   EmitOperand(dst.LowBits(), address);
   1095 }
   1096 
   1097 
   1098 void X86_64Assembler::addq(CpuRegister dst, CpuRegister src) {
   1099   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1100   // 0x01 is addq r/m64 <- r/m64 + r64, with op1 in r/m and op2 in reg: so reverse EmitRex64
   1101   EmitRex64(src, dst);
   1102   EmitUint8(0x01);
   1103   EmitRegisterOperand(src.LowBits(), dst.LowBits());
   1104 }
   1105 
   1106 
   1107 void X86_64Assembler::addl(const Address& address, CpuRegister reg) {
   1108   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1109   EmitOptionalRex32(reg, address);
   1110   EmitUint8(0x01);
   1111   EmitOperand(reg.LowBits(), address);
   1112 }
   1113 
   1114 
   1115 void X86_64Assembler::addl(const Address& address, const Immediate& imm) {
   1116   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1117   EmitOptionalRex32(address);
   1118   EmitComplex(0, address, imm);
   1119 }
   1120 
   1121 
   1122 void X86_64Assembler::subl(CpuRegister dst, CpuRegister src) {
   1123   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1124   EmitOptionalRex32(dst, src);
   1125   EmitUint8(0x2B);
   1126   EmitOperand(dst.LowBits(), Operand(src));
   1127 }
   1128 
   1129 
   1130 void X86_64Assembler::subl(CpuRegister reg, const Immediate& imm) {
   1131   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1132   EmitOptionalRex32(reg);
   1133   EmitComplex(5, Operand(reg), imm);
   1134 }
   1135 
   1136 
   1137 void X86_64Assembler::subq(CpuRegister reg, const Immediate& imm) {
   1138   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1139   CHECK(imm.is_int32());  // subq only supports 32b immediate.
   1140   EmitRex64(reg);
   1141   EmitComplex(5, Operand(reg), imm);
   1142 }
   1143 
   1144 
   1145 void X86_64Assembler::subq(CpuRegister dst, CpuRegister src) {
   1146   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1147   EmitRex64(dst, src);
   1148   EmitUint8(0x2B);
   1149   EmitRegisterOperand(dst.LowBits(), src.LowBits());
   1150 }
   1151 
   1152 
   1153 void X86_64Assembler::subq(CpuRegister reg, const Address& address) {
   1154   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1155   EmitRex64(reg);
   1156   EmitUint8(0x2B);
   1157   EmitOperand(reg.LowBits() & 7, address);
   1158 }
   1159 
   1160 
   1161 void X86_64Assembler::subl(CpuRegister reg, const Address& address) {
   1162   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1163   EmitOptionalRex32(reg, address);
   1164   EmitUint8(0x2B);
   1165   EmitOperand(reg.LowBits(), address);
   1166 }
   1167 
   1168 
   1169 void X86_64Assembler::cdq() {
   1170   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1171   EmitUint8(0x99);
   1172 }
   1173 
   1174 
   1175 void X86_64Assembler::idivl(CpuRegister reg) {
   1176   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1177   EmitOptionalRex32(reg);
   1178   EmitUint8(0xF7);
   1179   EmitUint8(0xF8 | reg.LowBits());
   1180 }
   1181 
   1182 
   1183 void X86_64Assembler::imull(CpuRegister dst, CpuRegister src) {
   1184   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1185   EmitOptionalRex32(dst, src);
   1186   EmitUint8(0x0F);
   1187   EmitUint8(0xAF);
   1188   EmitOperand(dst.LowBits(), Operand(src));
   1189 }
   1190 
   1191 
   1192 void X86_64Assembler::imull(CpuRegister reg, const Immediate& imm) {
   1193   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1194   EmitOptionalRex32(reg);
   1195   EmitUint8(0x69);
   1196   EmitOperand(reg.LowBits(), Operand(reg));
   1197   EmitImmediate(imm);
   1198 }
   1199 
   1200 
   1201 void X86_64Assembler::imull(CpuRegister reg, const Address& address) {
   1202   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1203   EmitOptionalRex32(reg, address);
   1204   EmitUint8(0x0F);
   1205   EmitUint8(0xAF);
   1206   EmitOperand(reg.LowBits(), address);
   1207 }
   1208 
   1209 
   1210 void X86_64Assembler::imull(CpuRegister reg) {
   1211   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1212   EmitOptionalRex32(reg);
   1213   EmitUint8(0xF7);
   1214   EmitOperand(5, Operand(reg));
   1215 }
   1216 
   1217 
   1218 void X86_64Assembler::imull(const Address& address) {
   1219   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1220   EmitOptionalRex32(address);
   1221   EmitUint8(0xF7);
   1222   EmitOperand(5, address);
   1223 }
   1224 
   1225 
   1226 void X86_64Assembler::mull(CpuRegister reg) {
   1227   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1228   EmitOptionalRex32(reg);
   1229   EmitUint8(0xF7);
   1230   EmitOperand(4, Operand(reg));
   1231 }
   1232 
   1233 
   1234 void X86_64Assembler::mull(const Address& address) {
   1235   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1236   EmitOptionalRex32(address);
   1237   EmitUint8(0xF7);
   1238   EmitOperand(4, address);
   1239 }
   1240 
   1241 
   1242 
   1243 void X86_64Assembler::shll(CpuRegister reg, const Immediate& imm) {
   1244   EmitGenericShift(false, 4, reg, imm);
   1245 }
   1246 
   1247 
   1248 void X86_64Assembler::shll(CpuRegister operand, CpuRegister shifter) {
   1249   EmitGenericShift(4, operand, shifter);
   1250 }
   1251 
   1252 
   1253 void X86_64Assembler::shrl(CpuRegister reg, const Immediate& imm) {
   1254   EmitGenericShift(false, 5, reg, imm);
   1255 }
   1256 
   1257 
   1258 void X86_64Assembler::shrq(CpuRegister reg, const Immediate& imm) {
   1259   EmitGenericShift(true, 5, reg, imm);
   1260 }
   1261 
   1262 
   1263 void X86_64Assembler::shrl(CpuRegister operand, CpuRegister shifter) {
   1264   EmitGenericShift(5, operand, shifter);
   1265 }
   1266 
   1267 
   1268 void X86_64Assembler::sarl(CpuRegister reg, const Immediate& imm) {
   1269   EmitGenericShift(false, 7, reg, imm);
   1270 }
   1271 
   1272 
   1273 void X86_64Assembler::sarl(CpuRegister operand, CpuRegister shifter) {
   1274   EmitGenericShift(7, operand, shifter);
   1275 }
   1276 
   1277 
   1278 void X86_64Assembler::negl(CpuRegister reg) {
   1279   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1280   EmitOptionalRex32(reg);
   1281   EmitUint8(0xF7);
   1282   EmitOperand(3, Operand(reg));
   1283 }
   1284 
   1285 
   1286 void X86_64Assembler::notl(CpuRegister reg) {
   1287   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1288   EmitOptionalRex32(reg);
   1289   EmitUint8(0xF7);
   1290   EmitUint8(0xD0 | reg.LowBits());
   1291 }
   1292 
   1293 
   1294 void X86_64Assembler::enter(const Immediate& imm) {
   1295   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1296   EmitUint8(0xC8);
   1297   CHECK(imm.is_uint16());
   1298   EmitUint8(imm.value() & 0xFF);
   1299   EmitUint8((imm.value() >> 8) & 0xFF);
   1300   EmitUint8(0x00);
   1301 }
   1302 
   1303 
   1304 void X86_64Assembler::leave() {
   1305   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1306   EmitUint8(0xC9);
   1307 }
   1308 
   1309 
   1310 void X86_64Assembler::ret() {
   1311   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1312   EmitUint8(0xC3);
   1313 }
   1314 
   1315 
   1316 void X86_64Assembler::ret(const Immediate& imm) {
   1317   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1318   EmitUint8(0xC2);
   1319   CHECK(imm.is_uint16());
   1320   EmitUint8(imm.value() & 0xFF);
   1321   EmitUint8((imm.value() >> 8) & 0xFF);
   1322 }
   1323 
   1324 
   1325 
   1326 void X86_64Assembler::nop() {
   1327   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1328   EmitUint8(0x90);
   1329 }
   1330 
   1331 
   1332 void X86_64Assembler::int3() {
   1333   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1334   EmitUint8(0xCC);
   1335 }
   1336 
   1337 
   1338 void X86_64Assembler::hlt() {
   1339   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1340   EmitUint8(0xF4);
   1341 }
   1342 
   1343 
   1344 void X86_64Assembler::j(Condition condition, Label* label) {
   1345   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1346   if (label->IsBound()) {
   1347     static const int kShortSize = 2;
   1348     static const int kLongSize = 6;
   1349     int offset = label->Position() - buffer_.Size();
   1350     CHECK_LE(offset, 0);
   1351     if (IsInt(8, offset - kShortSize)) {
   1352       EmitUint8(0x70 + condition);
   1353       EmitUint8((offset - kShortSize) & 0xFF);
   1354     } else {
   1355       EmitUint8(0x0F);
   1356       EmitUint8(0x80 + condition);
   1357       EmitInt32(offset - kLongSize);
   1358     }
   1359   } else {
   1360     EmitUint8(0x0F);
   1361     EmitUint8(0x80 + condition);
   1362     EmitLabelLink(label);
   1363   }
   1364 }
   1365 
   1366 
   1367 void X86_64Assembler::jmp(CpuRegister reg) {
   1368   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1369   EmitOptionalRex32(reg);
   1370   EmitUint8(0xFF);
   1371   EmitRegisterOperand(4, reg.LowBits());
   1372 }
   1373 
   1374 void X86_64Assembler::jmp(const Address& address) {
   1375   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1376   EmitOptionalRex32(address);
   1377   EmitUint8(0xFF);
   1378   EmitOperand(4, address);
   1379 }
   1380 
   1381 void X86_64Assembler::jmp(Label* label) {
   1382   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1383   if (label->IsBound()) {
   1384     static const int kShortSize = 2;
   1385     static const int kLongSize = 5;
   1386     int offset = label->Position() - buffer_.Size();
   1387     CHECK_LE(offset, 0);
   1388     if (IsInt(8, offset - kShortSize)) {
   1389       EmitUint8(0xEB);
   1390       EmitUint8((offset - kShortSize) & 0xFF);
   1391     } else {
   1392       EmitUint8(0xE9);
   1393       EmitInt32(offset - kLongSize);
   1394     }
   1395   } else {
   1396     EmitUint8(0xE9);
   1397     EmitLabelLink(label);
   1398   }
   1399 }
   1400 
   1401 
   1402 X86_64Assembler* X86_64Assembler::lock() {
   1403   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1404   EmitUint8(0xF0);
   1405   return this;
   1406 }
   1407 
   1408 
   1409 void X86_64Assembler::cmpxchgl(const Address& address, CpuRegister reg) {
   1410   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1411   EmitUint8(0x0F);
   1412   EmitUint8(0xB1);
   1413   EmitOperand(reg.LowBits(), address);
   1414 }
   1415 
   1416 void X86_64Assembler::mfence() {
   1417   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1418   EmitUint8(0x0F);
   1419   EmitUint8(0xAE);
   1420   EmitUint8(0xF0);
   1421 }
   1422 
   1423 
   1424 X86_64Assembler* X86_64Assembler::gs() {
   1425   // TODO: gs is a prefix and not an instruction
   1426   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1427   EmitUint8(0x65);
   1428   return this;
   1429 }
   1430 
   1431 
   1432 void X86_64Assembler::AddImmediate(CpuRegister reg, const Immediate& imm) {
   1433   int value = imm.value();
   1434   if (value != 0) {
   1435     if (value > 0) {
   1436       addl(reg, imm);
   1437     } else {
   1438       subl(reg, Immediate(value));
   1439     }
   1440   }
   1441 }
   1442 
   1443 
   1444 void X86_64Assembler::setcc(Condition condition, CpuRegister dst) {
   1445   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1446   // RSP, RBP, RDI, RSI need rex prefix (else the pattern encodes ah/bh/ch/dh).
   1447   if (dst.NeedsRex() || dst.AsRegister() > 3) {
   1448     EmitOptionalRex(true, false, false, false, dst.NeedsRex());
   1449   }
   1450   EmitUint8(0x0F);
   1451   EmitUint8(0x90 + condition);
   1452   EmitUint8(0xC0 + dst.LowBits());
   1453 }
   1454 
   1455 
   1456 void X86_64Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
   1457   // TODO: Need to have a code constants table.
   1458   int64_t constant = bit_cast<int64_t, double>(value);
   1459   pushq(Immediate(High32Bits(constant)));
   1460   pushq(Immediate(Low32Bits(constant)));
   1461   movsd(dst, Address(CpuRegister(RSP), 0));
   1462   addq(CpuRegister(RSP), Immediate(2 * kWordSize));
   1463 }
   1464 
   1465 
   1466 void X86_64Assembler::FloatNegate(XmmRegister f) {
   1467   static const struct {
   1468     uint32_t a;
   1469     uint32_t b;
   1470     uint32_t c;
   1471     uint32_t d;
   1472   } float_negate_constant __attribute__((aligned(16))) =
   1473       { 0x80000000, 0x00000000, 0x80000000, 0x00000000 };
   1474   xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
   1475 }
   1476 
   1477 
   1478 void X86_64Assembler::DoubleNegate(XmmRegister d) {
   1479   static const struct {
   1480     uint64_t a;
   1481     uint64_t b;
   1482   } double_negate_constant __attribute__((aligned(16))) =
   1483       {0x8000000000000000LL, 0x8000000000000000LL};
   1484   xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
   1485 }
   1486 
   1487 
   1488 void X86_64Assembler::DoubleAbs(XmmRegister reg) {
   1489   static const struct {
   1490     uint64_t a;
   1491     uint64_t b;
   1492   } double_abs_constant __attribute__((aligned(16))) =
   1493       {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
   1494   andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
   1495 }
   1496 
   1497 
   1498 void X86_64Assembler::Align(int alignment, int offset) {
   1499   CHECK(IsPowerOfTwo(alignment));
   1500   // Emit nop instruction until the real position is aligned.
   1501   while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
   1502     nop();
   1503   }
   1504 }
   1505 
   1506 
   1507 void X86_64Assembler::Bind(Label* label) {
   1508   int bound = buffer_.Size();
   1509   CHECK(!label->IsBound());  // Labels can only be bound once.
   1510   while (label->IsLinked()) {
   1511     int position = label->LinkPosition();
   1512     int next = buffer_.Load<int32_t>(position);
   1513     buffer_.Store<int32_t>(position, bound - (position + 4));
   1514     label->position_ = next;
   1515   }
   1516   label->BindTo(bound);
   1517 }
   1518 
   1519 
   1520 void X86_64Assembler::EmitOperand(uint8_t reg_or_opcode, const Operand& operand) {
   1521   CHECK_GE(reg_or_opcode, 0);
   1522   CHECK_LT(reg_or_opcode, 8);
   1523   const int length = operand.length_;
   1524   CHECK_GT(length, 0);
   1525   // Emit the ModRM byte updated with the given reg value.
   1526   CHECK_EQ(operand.encoding_[0] & 0x38, 0);
   1527   EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
   1528   // Emit the rest of the encoded operand.
   1529   for (int i = 1; i < length; i++) {
   1530     EmitUint8(operand.encoding_[i]);
   1531   }
   1532 }
   1533 
   1534 
   1535 void X86_64Assembler::EmitImmediate(const Immediate& imm) {
   1536   if (imm.is_int32()) {
   1537     EmitInt32(static_cast<int32_t>(imm.value()));
   1538   } else {
   1539     EmitInt64(imm.value());
   1540   }
   1541 }
   1542 
   1543 
   1544 void X86_64Assembler::EmitComplex(uint8_t reg_or_opcode,
   1545                                   const Operand& operand,
   1546                                   const Immediate& immediate) {
   1547   CHECK_GE(reg_or_opcode, 0);
   1548   CHECK_LT(reg_or_opcode, 8);
   1549   if (immediate.is_int8()) {
   1550     // Use sign-extended 8-bit immediate.
   1551     EmitUint8(0x83);
   1552     EmitOperand(reg_or_opcode, operand);
   1553     EmitUint8(immediate.value() & 0xFF);
   1554   } else if (operand.IsRegister(CpuRegister(RAX))) {
   1555     // Use short form if the destination is eax.
   1556     EmitUint8(0x05 + (reg_or_opcode << 3));
   1557     EmitImmediate(immediate);
   1558   } else {
   1559     EmitUint8(0x81);
   1560     EmitOperand(reg_or_opcode, operand);
   1561     EmitImmediate(immediate);
   1562   }
   1563 }
   1564 
   1565 
   1566 void X86_64Assembler::EmitLabel(Label* label, int instruction_size) {
   1567   if (label->IsBound()) {
   1568     int offset = label->Position() - buffer_.Size();
   1569     CHECK_LE(offset, 0);
   1570     EmitInt32(offset - instruction_size);
   1571   } else {
   1572     EmitLabelLink(label);
   1573   }
   1574 }
   1575 
   1576 
   1577 void X86_64Assembler::EmitLabelLink(Label* label) {
   1578   CHECK(!label->IsBound());
   1579   int position = buffer_.Size();
   1580   EmitInt32(label->position_);
   1581   label->LinkTo(position);
   1582 }
   1583 
   1584 
   1585 void X86_64Assembler::EmitGenericShift(bool wide,
   1586                                        int reg_or_opcode,
   1587                                        CpuRegister reg,
   1588                                        const Immediate& imm) {
   1589   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1590   CHECK(imm.is_int8());
   1591   if (wide) {
   1592     EmitRex64(reg);
   1593   }
   1594   if (imm.value() == 1) {
   1595     EmitUint8(0xD1);
   1596     EmitOperand(reg_or_opcode, Operand(reg));
   1597   } else {
   1598     EmitUint8(0xC1);
   1599     EmitOperand(reg_or_opcode, Operand(reg));
   1600     EmitUint8(imm.value() & 0xFF);
   1601   }
   1602 }
   1603 
   1604 
   1605 void X86_64Assembler::EmitGenericShift(int reg_or_opcode,
   1606                                        CpuRegister operand,
   1607                                        CpuRegister shifter) {
   1608   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1609   CHECK_EQ(shifter.AsRegister(), RCX);
   1610   EmitUint8(0xD3);
   1611   EmitOperand(reg_or_opcode, Operand(operand));
   1612 }
   1613 
   1614 void X86_64Assembler::EmitOptionalRex(bool force, bool w, bool r, bool x, bool b) {
   1615   // REX.WRXB
   1616   // W - 64-bit operand
   1617   // R - MODRM.reg
   1618   // X - SIB.index
   1619   // B - MODRM.rm/SIB.base
   1620   uint8_t rex = force ? 0x40 : 0;
   1621   if (w) {
   1622     rex |= 0x48;  // REX.W000
   1623   }
   1624   if (r) {
   1625     rex |= 0x44;  // REX.0R00
   1626   }
   1627   if (x) {
   1628     rex |= 0x42;  // REX.00X0
   1629   }
   1630   if (b) {
   1631     rex |= 0x41;  // REX.000B
   1632   }
   1633   if (rex != 0) {
   1634     EmitUint8(rex);
   1635   }
   1636 }
   1637 
   1638 void X86_64Assembler::EmitOptionalRex32(CpuRegister reg) {
   1639   EmitOptionalRex(false, false, false, false, reg.NeedsRex());
   1640 }
   1641 
   1642 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, CpuRegister src) {
   1643   EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
   1644 }
   1645 
   1646 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, XmmRegister src) {
   1647   EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
   1648 }
   1649 
   1650 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, XmmRegister src) {
   1651   EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
   1652 }
   1653 
   1654 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, CpuRegister src) {
   1655   EmitOptionalRex(false, false, dst.NeedsRex(), false, src.NeedsRex());
   1656 }
   1657 
   1658 void X86_64Assembler::EmitOptionalRex32(const Operand& operand) {
   1659   uint8_t rex = operand.rex();
   1660   if (rex != 0) {
   1661     EmitUint8(rex);
   1662   }
   1663 }
   1664 
   1665 void X86_64Assembler::EmitOptionalRex32(CpuRegister dst, const Operand& operand) {
   1666   uint8_t rex = operand.rex();
   1667   if (dst.NeedsRex()) {
   1668     rex |= 0x44;  // REX.0R00
   1669   }
   1670   if (rex != 0) {
   1671     EmitUint8(rex);
   1672   }
   1673 }
   1674 
   1675 void X86_64Assembler::EmitOptionalRex32(XmmRegister dst, const Operand& operand) {
   1676   uint8_t rex = operand.rex();
   1677   if (dst.NeedsRex()) {
   1678     rex |= 0x44;  // REX.0R00
   1679   }
   1680   if (rex != 0) {
   1681     EmitUint8(rex);
   1682   }
   1683 }
   1684 
   1685 void X86_64Assembler::EmitRex64(CpuRegister reg) {
   1686   EmitOptionalRex(false, true, false, false, reg.NeedsRex());
   1687 }
   1688 
   1689 void X86_64Assembler::EmitRex64(CpuRegister dst, CpuRegister src) {
   1690   EmitOptionalRex(false, true, dst.NeedsRex(), false, src.NeedsRex());
   1691 }
   1692 
   1693 void X86_64Assembler::EmitRex64(CpuRegister dst, const Operand& operand) {
   1694   uint8_t rex = 0x48 | operand.rex();  // REX.W000
   1695   if (dst.NeedsRex()) {
   1696     rex |= 0x44;  // REX.0R00
   1697   }
   1698   if (rex != 0) {
   1699     EmitUint8(rex);
   1700   }
   1701 }
   1702 
   1703 void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, CpuRegister src) {
   1704   EmitOptionalRex(true, false, dst.NeedsRex(), false, src.NeedsRex());
   1705 }
   1706 
   1707 void X86_64Assembler::EmitOptionalByteRegNormalizingRex32(CpuRegister dst, const Operand& operand) {
   1708   uint8_t rex = 0x40 | operand.rex();  // REX.0000
   1709   if (dst.NeedsRex()) {
   1710     rex |= 0x44;  // REX.0R00
   1711   }
   1712   if (rex != 0) {
   1713     EmitUint8(rex);
   1714   }
   1715 }
   1716 
   1717 constexpr size_t kFramePointerSize = 8;
   1718 
   1719 void X86_64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
   1720                                  const std::vector<ManagedRegister>& spill_regs,
   1721                                  const ManagedRegisterEntrySpills& entry_spills) {
   1722   CHECK_ALIGNED(frame_size, kStackAlignment);
   1723   int gpr_count = 0;
   1724   for (int i = spill_regs.size() - 1; i >= 0; --i) {
   1725     x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
   1726     if (spill.IsCpuRegister()) {
   1727       pushq(spill.AsCpuRegister());
   1728       gpr_count++;
   1729     }
   1730   }
   1731   // return address then method on stack
   1732   int64_t rest_of_frame = static_cast<int64_t>(frame_size)
   1733                           - (gpr_count * kFramePointerSize)
   1734                           - kFramePointerSize /*return address*/;
   1735   subq(CpuRegister(RSP), Immediate(rest_of_frame));
   1736   // spill xmms
   1737   int64_t offset = rest_of_frame;
   1738   for (int i = spill_regs.size() - 1; i >= 0; --i) {
   1739     x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
   1740     if (spill.IsXmmRegister()) {
   1741       offset -= sizeof(double);
   1742       movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister());
   1743     }
   1744   }
   1745 
   1746   DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
   1747 
   1748   movl(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister());
   1749 
   1750   for (size_t i = 0; i < entry_spills.size(); ++i) {
   1751     ManagedRegisterSpill spill = entry_spills.at(i);
   1752     if (spill.AsX86_64().IsCpuRegister()) {
   1753       if (spill.getSize() == 8) {
   1754         movq(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()),
   1755              spill.AsX86_64().AsCpuRegister());
   1756       } else {
   1757         CHECK_EQ(spill.getSize(), 4);
   1758         movl(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsCpuRegister());
   1759       }
   1760     } else {
   1761       if (spill.getSize() == 8) {
   1762         movsd(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
   1763       } else {
   1764         CHECK_EQ(spill.getSize(), 4);
   1765         movss(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), spill.AsX86_64().AsXmmRegister());
   1766       }
   1767     }
   1768   }
   1769 }
   1770 
   1771 void X86_64Assembler::RemoveFrame(size_t frame_size,
   1772                             const std::vector<ManagedRegister>& spill_regs) {
   1773   CHECK_ALIGNED(frame_size, kStackAlignment);
   1774   int gpr_count = 0;
   1775   // unspill xmms
   1776   int64_t offset = static_cast<int64_t>(frame_size) - (spill_regs.size() * kFramePointerSize) - 2 * kFramePointerSize;
   1777   for (size_t i = 0; i < spill_regs.size(); ++i) {
   1778     x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
   1779     if (spill.IsXmmRegister()) {
   1780       offset += sizeof(double);
   1781       movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset));
   1782     } else {
   1783       gpr_count++;
   1784     }
   1785   }
   1786   addq(CpuRegister(RSP), Immediate(static_cast<int64_t>(frame_size) - (gpr_count * kFramePointerSize) - kFramePointerSize));
   1787   for (size_t i = 0; i < spill_regs.size(); ++i) {
   1788     x86_64::X86_64ManagedRegister spill = spill_regs.at(i).AsX86_64();
   1789     if (spill.IsCpuRegister()) {
   1790       popq(spill.AsCpuRegister());
   1791     }
   1792   }
   1793   ret();
   1794 }
   1795 
   1796 void X86_64Assembler::IncreaseFrameSize(size_t adjust) {
   1797   CHECK_ALIGNED(adjust, kStackAlignment);
   1798   addq(CpuRegister(RSP), Immediate(-static_cast<int64_t>(adjust)));
   1799 }
   1800 
   1801 void X86_64Assembler::DecreaseFrameSize(size_t adjust) {
   1802   CHECK_ALIGNED(adjust, kStackAlignment);
   1803   addq(CpuRegister(RSP), Immediate(adjust));
   1804 }
   1805 
   1806 void X86_64Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
   1807   X86_64ManagedRegister src = msrc.AsX86_64();
   1808   if (src.IsNoRegister()) {
   1809     CHECK_EQ(0u, size);
   1810   } else if (src.IsCpuRegister()) {
   1811     if (size == 4) {
   1812       CHECK_EQ(4u, size);
   1813       movl(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
   1814     } else {
   1815       CHECK_EQ(8u, size);
   1816       movq(Address(CpuRegister(RSP), offs), src.AsCpuRegister());
   1817     }
   1818   } else if (src.IsRegisterPair()) {
   1819     CHECK_EQ(0u, size);
   1820     movq(Address(CpuRegister(RSP), offs), src.AsRegisterPairLow());
   1821     movq(Address(CpuRegister(RSP), FrameOffset(offs.Int32Value()+4)),
   1822          src.AsRegisterPairHigh());
   1823   } else if (src.IsX87Register()) {
   1824     if (size == 4) {
   1825       fstps(Address(CpuRegister(RSP), offs));
   1826     } else {
   1827       fstpl(Address(CpuRegister(RSP), offs));
   1828     }
   1829   } else {
   1830     CHECK(src.IsXmmRegister());
   1831     if (size == 4) {
   1832       movss(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
   1833     } else {
   1834       movsd(Address(CpuRegister(RSP), offs), src.AsXmmRegister());
   1835     }
   1836   }
   1837 }
   1838 
   1839 void X86_64Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
   1840   X86_64ManagedRegister src = msrc.AsX86_64();
   1841   CHECK(src.IsCpuRegister());
   1842   movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
   1843 }
   1844 
   1845 void X86_64Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
   1846   X86_64ManagedRegister src = msrc.AsX86_64();
   1847   CHECK(src.IsCpuRegister());
   1848   movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
   1849 }
   1850 
   1851 void X86_64Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
   1852                                             ManagedRegister) {
   1853   movl(Address(CpuRegister(RSP), dest), Immediate(imm));  // TODO(64) movq?
   1854 }
   1855 
   1856 void X86_64Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
   1857                                                ManagedRegister) {
   1858   gs()->movl(Address::Absolute(dest, true), Immediate(imm));  // TODO(64) movq?
   1859 }
   1860 
   1861 void X86_64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
   1862                                                  FrameOffset fr_offs,
   1863                                                  ManagedRegister mscratch) {
   1864   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   1865   CHECK(scratch.IsCpuRegister());
   1866   leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), fr_offs));
   1867   gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
   1868 }
   1869 
   1870 void X86_64Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
   1871   gs()->movq(Address::Absolute(thr_offs, true), CpuRegister(RSP));
   1872 }
   1873 
   1874 void X86_64Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
   1875                                  FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
   1876   UNIMPLEMENTED(FATAL);  // this case only currently exists for ARM
   1877 }
   1878 
   1879 void X86_64Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
   1880   X86_64ManagedRegister dest = mdest.AsX86_64();
   1881   if (dest.IsNoRegister()) {
   1882     CHECK_EQ(0u, size);
   1883   } else if (dest.IsCpuRegister()) {
   1884     if (size == 4) {
   1885       CHECK_EQ(4u, size);
   1886       movl(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
   1887     } else {
   1888       CHECK_EQ(8u, size);
   1889       movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
   1890     }
   1891   } else if (dest.IsRegisterPair()) {
   1892     CHECK_EQ(0u, size);
   1893     movq(dest.AsRegisterPairLow(), Address(CpuRegister(RSP), src));
   1894     movq(dest.AsRegisterPairHigh(), Address(CpuRegister(RSP), FrameOffset(src.Int32Value()+4)));
   1895   } else if (dest.IsX87Register()) {
   1896     if (size == 4) {
   1897       flds(Address(CpuRegister(RSP), src));
   1898     } else {
   1899       fldl(Address(CpuRegister(RSP), src));
   1900     }
   1901   } else {
   1902     CHECK(dest.IsXmmRegister());
   1903     if (size == 4) {
   1904       movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
   1905     } else {
   1906       movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), src));
   1907     }
   1908   }
   1909 }
   1910 
   1911 void X86_64Assembler::LoadFromThread64(ManagedRegister mdest, ThreadOffset<8> src, size_t size) {
   1912   X86_64ManagedRegister dest = mdest.AsX86_64();
   1913   if (dest.IsNoRegister()) {
   1914     CHECK_EQ(0u, size);
   1915   } else if (dest.IsCpuRegister()) {
   1916     CHECK_EQ(4u, size);
   1917     gs()->movl(dest.AsCpuRegister(), Address::Absolute(src, true));
   1918   } else if (dest.IsRegisterPair()) {
   1919     CHECK_EQ(8u, size);
   1920     gs()->movq(dest.AsRegisterPairLow(), Address::Absolute(src, true));
   1921   } else if (dest.IsX87Register()) {
   1922     if (size == 4) {
   1923       gs()->flds(Address::Absolute(src, true));
   1924     } else {
   1925       gs()->fldl(Address::Absolute(src, true));
   1926     }
   1927   } else {
   1928     CHECK(dest.IsXmmRegister());
   1929     if (size == 4) {
   1930       gs()->movss(dest.AsXmmRegister(), Address::Absolute(src, true));
   1931     } else {
   1932       gs()->movsd(dest.AsXmmRegister(), Address::Absolute(src, true));
   1933     }
   1934   }
   1935 }
   1936 
   1937 void X86_64Assembler::LoadRef(ManagedRegister mdest, FrameOffset  src) {
   1938   X86_64ManagedRegister dest = mdest.AsX86_64();
   1939   CHECK(dest.IsCpuRegister());
   1940   movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
   1941 }
   1942 
   1943 void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
   1944                            MemberOffset offs) {
   1945   X86_64ManagedRegister dest = mdest.AsX86_64();
   1946   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   1947   movq(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
   1948 }
   1949 
   1950 void X86_64Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
   1951                               Offset offs) {
   1952   X86_64ManagedRegister dest = mdest.AsX86_64();
   1953   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   1954   movq(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
   1955 }
   1956 
   1957 void X86_64Assembler::LoadRawPtrFromThread64(ManagedRegister mdest, ThreadOffset<8> offs) {
   1958   X86_64ManagedRegister dest = mdest.AsX86_64();
   1959   CHECK(dest.IsCpuRegister());
   1960   gs()->movq(dest.AsCpuRegister(), Address::Absolute(offs, true));
   1961 }
   1962 
   1963 void X86_64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
   1964   X86_64ManagedRegister reg = mreg.AsX86_64();
   1965   CHECK(size == 1 || size == 2) << size;
   1966   CHECK(reg.IsCpuRegister()) << reg;
   1967   if (size == 1) {
   1968     movsxb(reg.AsCpuRegister(), reg.AsCpuRegister());
   1969   } else {
   1970     movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   1971   }
   1972 }
   1973 
   1974 void X86_64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
   1975   X86_64ManagedRegister reg = mreg.AsX86_64();
   1976   CHECK(size == 1 || size == 2) << size;
   1977   CHECK(reg.IsCpuRegister()) << reg;
   1978   if (size == 1) {
   1979     movzxb(reg.AsCpuRegister(), reg.AsCpuRegister());
   1980   } else {
   1981     movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   1982   }
   1983 }
   1984 
   1985 void X86_64Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
   1986   X86_64ManagedRegister dest = mdest.AsX86_64();
   1987   X86_64ManagedRegister src = msrc.AsX86_64();
   1988   if (!dest.Equals(src)) {
   1989     if (dest.IsCpuRegister() && src.IsCpuRegister()) {
   1990       movq(dest.AsCpuRegister(), src.AsCpuRegister());
   1991     } else if (src.IsX87Register() && dest.IsXmmRegister()) {
   1992       // Pass via stack and pop X87 register
   1993       subl(CpuRegister(RSP), Immediate(16));
   1994       if (size == 4) {
   1995         CHECK_EQ(src.AsX87Register(), ST0);
   1996         fstps(Address(CpuRegister(RSP), 0));
   1997         movss(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
   1998       } else {
   1999         CHECK_EQ(src.AsX87Register(), ST0);
   2000         fstpl(Address(CpuRegister(RSP), 0));
   2001         movsd(dest.AsXmmRegister(), Address(CpuRegister(RSP), 0));
   2002       }
   2003       addq(CpuRegister(RSP), Immediate(16));
   2004     } else {
   2005       // TODO: x87, SSE
   2006       UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
   2007     }
   2008   }
   2009 }
   2010 
   2011 void X86_64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
   2012                            ManagedRegister mscratch) {
   2013   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2014   CHECK(scratch.IsCpuRegister());
   2015   movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), src));
   2016   movl(Address(CpuRegister(RSP), dest), scratch.AsCpuRegister());
   2017 }
   2018 
   2019 void X86_64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
   2020                                              ThreadOffset<8> thr_offs,
   2021                                              ManagedRegister mscratch) {
   2022   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2023   CHECK(scratch.IsCpuRegister());
   2024   gs()->movq(scratch.AsCpuRegister(), Address::Absolute(thr_offs, true));
   2025   Store(fr_offs, scratch, 8);
   2026 }
   2027 
   2028 void X86_64Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs,
   2029                                            FrameOffset fr_offs,
   2030                                            ManagedRegister mscratch) {
   2031   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2032   CHECK(scratch.IsCpuRegister());
   2033   Load(scratch, fr_offs, 8);
   2034   gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
   2035 }
   2036 
   2037 void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src,
   2038                         ManagedRegister mscratch,
   2039                         size_t size) {
   2040   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2041   if (scratch.IsCpuRegister() && size == 8) {
   2042     Load(scratch, src, 4);
   2043     Store(dest, scratch, 4);
   2044     Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
   2045     Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
   2046   } else {
   2047     Load(scratch, src, size);
   2048     Store(dest, scratch, size);
   2049   }
   2050 }
   2051 
   2052 void X86_64Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
   2053                         ManagedRegister /*scratch*/, size_t /*size*/) {
   2054   UNIMPLEMENTED(FATAL);
   2055 }
   2056 
   2057 void X86_64Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
   2058                         ManagedRegister scratch, size_t size) {
   2059   CHECK(scratch.IsNoRegister());
   2060   CHECK_EQ(size, 4u);
   2061   pushq(Address(CpuRegister(RSP), src));
   2062   popq(Address(dest_base.AsX86_64().AsCpuRegister(), dest_offset));
   2063 }
   2064 
   2065 void X86_64Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
   2066                         ManagedRegister mscratch, size_t size) {
   2067   CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
   2068   CHECK_EQ(size, 4u);
   2069   movq(scratch, Address(CpuRegister(RSP), src_base));
   2070   movq(scratch, Address(scratch, src_offset));
   2071   movq(Address(CpuRegister(RSP), dest), scratch);
   2072 }
   2073 
   2074 void X86_64Assembler::Copy(ManagedRegister dest, Offset dest_offset,
   2075                         ManagedRegister src, Offset src_offset,
   2076                         ManagedRegister scratch, size_t size) {
   2077   CHECK_EQ(size, 4u);
   2078   CHECK(scratch.IsNoRegister());
   2079   pushq(Address(src.AsX86_64().AsCpuRegister(), src_offset));
   2080   popq(Address(dest.AsX86_64().AsCpuRegister(), dest_offset));
   2081 }
   2082 
   2083 void X86_64Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
   2084                         ManagedRegister mscratch, size_t size) {
   2085   CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
   2086   CHECK_EQ(size, 4u);
   2087   CHECK_EQ(dest.Int32Value(), src.Int32Value());
   2088   movq(scratch, Address(CpuRegister(RSP), src));
   2089   pushq(Address(scratch, src_offset));
   2090   popq(Address(scratch, dest_offset));
   2091 }
   2092 
   2093 void X86_64Assembler::MemoryBarrier(ManagedRegister) {
   2094 #if ANDROID_SMP != 0
   2095   mfence();
   2096 #endif
   2097 }
   2098 
   2099 void X86_64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
   2100                                    FrameOffset handle_scope_offset,
   2101                                    ManagedRegister min_reg, bool null_allowed) {
   2102   X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
   2103   X86_64ManagedRegister in_reg = min_reg.AsX86_64();
   2104   if (in_reg.IsNoRegister()) {  // TODO(64): && null_allowed
   2105     // Use out_reg as indicator of NULL
   2106     in_reg = out_reg;
   2107     // TODO: movzwl
   2108     movl(in_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2109   }
   2110   CHECK(in_reg.IsCpuRegister());
   2111   CHECK(out_reg.IsCpuRegister());
   2112   VerifyObject(in_reg, null_allowed);
   2113   if (null_allowed) {
   2114     Label null_arg;
   2115     if (!out_reg.Equals(in_reg)) {
   2116       xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   2117     }
   2118     testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   2119     j(kZero, &null_arg);
   2120     leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2121     Bind(&null_arg);
   2122   } else {
   2123     leaq(out_reg.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2124   }
   2125 }
   2126 
   2127 void X86_64Assembler::CreateHandleScopeEntry(FrameOffset out_off,
   2128                                    FrameOffset handle_scope_offset,
   2129                                    ManagedRegister mscratch,
   2130                                    bool null_allowed) {
   2131   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2132   CHECK(scratch.IsCpuRegister());
   2133   if (null_allowed) {
   2134     Label null_arg;
   2135     movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2136     testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
   2137     j(kZero, &null_arg);
   2138     leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2139     Bind(&null_arg);
   2140   } else {
   2141     leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), handle_scope_offset));
   2142   }
   2143   Store(out_off, scratch, 8);
   2144 }
   2145 
   2146 // Given a handle scope entry, load the associated reference.
   2147 void X86_64Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
   2148                                          ManagedRegister min_reg) {
   2149   X86_64ManagedRegister out_reg = mout_reg.AsX86_64();
   2150   X86_64ManagedRegister in_reg = min_reg.AsX86_64();
   2151   CHECK(out_reg.IsCpuRegister());
   2152   CHECK(in_reg.IsCpuRegister());
   2153   Label null_arg;
   2154   if (!out_reg.Equals(in_reg)) {
   2155     xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   2156   }
   2157   testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   2158   j(kZero, &null_arg);
   2159   movq(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
   2160   Bind(&null_arg);
   2161 }
   2162 
   2163 void X86_64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
   2164   // TODO: not validating references
   2165 }
   2166 
   2167 void X86_64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
   2168   // TODO: not validating references
   2169 }
   2170 
   2171 void X86_64Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
   2172   X86_64ManagedRegister base = mbase.AsX86_64();
   2173   CHECK(base.IsCpuRegister());
   2174   call(Address(base.AsCpuRegister(), offset.Int32Value()));
   2175   // TODO: place reference map on call
   2176 }
   2177 
   2178 void X86_64Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
   2179   CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
   2180   movl(scratch, Address(CpuRegister(RSP), base));
   2181   call(Address(scratch, offset));
   2182 }
   2183 
   2184 void X86_64Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister /*mscratch*/) {
   2185   gs()->call(Address::Absolute(offset, true));
   2186 }
   2187 
   2188 void X86_64Assembler::GetCurrentThread(ManagedRegister tr) {
   2189   gs()->movq(tr.AsX86_64().AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
   2190 }
   2191 
   2192 void X86_64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister mscratch) {
   2193   X86_64ManagedRegister scratch = mscratch.AsX86_64();
   2194   gs()->movq(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<8>(), true));
   2195   movq(Address(CpuRegister(RSP), offset), scratch.AsCpuRegister());
   2196 }
   2197 
   2198 // Slowpath entered when Thread::Current()->_exception is non-null
   2199 class X86_64ExceptionSlowPath FINAL : public SlowPath {
   2200  public:
   2201   explicit X86_64ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {}
   2202   virtual void Emit(Assembler *sp_asm) OVERRIDE;
   2203  private:
   2204   const size_t stack_adjust_;
   2205 };
   2206 
   2207 void X86_64Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
   2208   X86_64ExceptionSlowPath* slow = new X86_64ExceptionSlowPath(stack_adjust);
   2209   buffer_.EnqueueSlowPath(slow);
   2210   gs()->cmpl(Address::Absolute(Thread::ExceptionOffset<8>(), true), Immediate(0));
   2211   j(kNotEqual, slow->Entry());
   2212 }
   2213 
   2214 void X86_64ExceptionSlowPath::Emit(Assembler *sasm) {
   2215   X86_64Assembler* sp_asm = down_cast<X86_64Assembler*>(sasm);
   2216 #define __ sp_asm->
   2217   __ Bind(&entry_);
   2218   // Note: the return value is dead
   2219   if (stack_adjust_ != 0) {  // Fix up the frame.
   2220     __ DecreaseFrameSize(stack_adjust_);
   2221   }
   2222   // Pass exception as argument in RDI
   2223   __ gs()->movq(CpuRegister(RDI), Address::Absolute(Thread::ExceptionOffset<8>(), true));
   2224   __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(8, pDeliverException), true));
   2225   // this call should never return
   2226   __ int3();
   2227 #undef __
   2228 }
   2229 
   2230 }  // namespace x86_64
   2231 }  // namespace art
   2232 
   2233