Home | History | Annotate | Download | only in x86
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "assembler_x86.h"
     18 
     19 #include "base/casts.h"
     20 #include "entrypoints/quick/quick_entrypoints.h"
     21 #include "memory_region.h"
     22 #include "thread.h"
     23 
     24 namespace art {
     25 namespace x86 {
     26 
     27 std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
     28   return os << "XMM" << static_cast<int>(reg);
     29 }
     30 
     31 std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
     32   return os << "ST" << static_cast<int>(reg);
     33 }
     34 
     35 void X86Assembler::call(Register reg) {
     36   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     37   EmitUint8(0xFF);
     38   EmitRegisterOperand(2, reg);
     39 }
     40 
     41 
     42 void X86Assembler::call(const Address& address) {
     43   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     44   EmitUint8(0xFF);
     45   EmitOperand(2, address);
     46 }
     47 
     48 
     49 void X86Assembler::call(Label* label) {
     50   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     51   EmitUint8(0xE8);
     52   static const int kSize = 5;
     53   EmitLabel(label, kSize);
     54 }
     55 
     56 
     57 void X86Assembler::call(const ExternalLabel& label) {
     58   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     59   intptr_t call_start = buffer_.GetPosition();
     60   EmitUint8(0xE8);
     61   EmitInt32(label.address());
     62   static const intptr_t kCallExternalLabelSize = 5;
     63   DCHECK_EQ((buffer_.GetPosition() - call_start), kCallExternalLabelSize);
     64 }
     65 
     66 
     67 void X86Assembler::pushl(Register reg) {
     68   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     69   EmitUint8(0x50 + reg);
     70 }
     71 
     72 
     73 void X86Assembler::pushl(const Address& address) {
     74   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     75   EmitUint8(0xFF);
     76   EmitOperand(6, address);
     77 }
     78 
     79 
     80 void X86Assembler::pushl(const Immediate& imm) {
     81   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     82   if (imm.is_int8()) {
     83     EmitUint8(0x6A);
     84     EmitUint8(imm.value() & 0xFF);
     85   } else {
     86     EmitUint8(0x68);
     87     EmitImmediate(imm);
     88   }
     89 }
     90 
     91 
     92 void X86Assembler::popl(Register reg) {
     93   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     94   EmitUint8(0x58 + reg);
     95 }
     96 
     97 
     98 void X86Assembler::popl(const Address& address) {
     99   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    100   EmitUint8(0x8F);
    101   EmitOperand(0, address);
    102 }
    103 
    104 
    105 void X86Assembler::movl(Register dst, const Immediate& imm) {
    106   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    107   EmitUint8(0xB8 + dst);
    108   EmitImmediate(imm);
    109 }
    110 
    111 
    112 void X86Assembler::movl(Register dst, Register src) {
    113   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    114   EmitUint8(0x89);
    115   EmitRegisterOperand(src, dst);
    116 }
    117 
    118 
    119 void X86Assembler::movl(Register dst, const Address& src) {
    120   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    121   EmitUint8(0x8B);
    122   EmitOperand(dst, src);
    123 }
    124 
    125 
    126 void X86Assembler::movl(const Address& dst, Register src) {
    127   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    128   EmitUint8(0x89);
    129   EmitOperand(src, dst);
    130 }
    131 
    132 
    133 void X86Assembler::movl(const Address& dst, const Immediate& imm) {
    134   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    135   EmitUint8(0xC7);
    136   EmitOperand(0, dst);
    137   EmitImmediate(imm);
    138 }
    139 
    140 void X86Assembler::movl(const Address& dst, Label* lbl) {
    141   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    142   EmitUint8(0xC7);
    143   EmitOperand(0, dst);
    144   EmitLabel(lbl, dst.length_ + 5);
    145 }
    146 
    147 void X86Assembler::movzxb(Register dst, ByteRegister src) {
    148   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    149   EmitUint8(0x0F);
    150   EmitUint8(0xB6);
    151   EmitRegisterOperand(dst, src);
    152 }
    153 
    154 
    155 void X86Assembler::movzxb(Register dst, const Address& src) {
    156   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    157   EmitUint8(0x0F);
    158   EmitUint8(0xB6);
    159   EmitOperand(dst, src);
    160 }
    161 
    162 
    163 void X86Assembler::movsxb(Register dst, ByteRegister src) {
    164   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    165   EmitUint8(0x0F);
    166   EmitUint8(0xBE);
    167   EmitRegisterOperand(dst, src);
    168 }
    169 
    170 
    171 void X86Assembler::movsxb(Register dst, const Address& src) {
    172   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    173   EmitUint8(0x0F);
    174   EmitUint8(0xBE);
    175   EmitOperand(dst, src);
    176 }
    177 
    178 
    179 void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) {
    180   LOG(FATAL) << "Use movzxb or movsxb instead.";
    181 }
    182 
    183 
    184 void X86Assembler::movb(const Address& dst, ByteRegister src) {
    185   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    186   EmitUint8(0x88);
    187   EmitOperand(src, dst);
    188 }
    189 
    190 
    191 void X86Assembler::movb(const Address& dst, const Immediate& imm) {
    192   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    193   EmitUint8(0xC6);
    194   EmitOperand(EAX, dst);
    195   CHECK(imm.is_int8());
    196   EmitUint8(imm.value() & 0xFF);
    197 }
    198 
    199 
    200 void X86Assembler::movzxw(Register dst, Register src) {
    201   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    202   EmitUint8(0x0F);
    203   EmitUint8(0xB7);
    204   EmitRegisterOperand(dst, src);
    205 }
    206 
    207 
    208 void X86Assembler::movzxw(Register dst, const Address& src) {
    209   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    210   EmitUint8(0x0F);
    211   EmitUint8(0xB7);
    212   EmitOperand(dst, src);
    213 }
    214 
    215 
    216 void X86Assembler::movsxw(Register dst, Register src) {
    217   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    218   EmitUint8(0x0F);
    219   EmitUint8(0xBF);
    220   EmitRegisterOperand(dst, src);
    221 }
    222 
    223 
    224 void X86Assembler::movsxw(Register dst, const Address& src) {
    225   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    226   EmitUint8(0x0F);
    227   EmitUint8(0xBF);
    228   EmitOperand(dst, src);
    229 }
    230 
    231 
    232 void X86Assembler::movw(Register /*dst*/, const Address& /*src*/) {
    233   LOG(FATAL) << "Use movzxw or movsxw instead.";
    234 }
    235 
    236 
    237 void X86Assembler::movw(const Address& dst, Register src) {
    238   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    239   EmitOperandSizeOverride();
    240   EmitUint8(0x89);
    241   EmitOperand(src, dst);
    242 }
    243 
    244 
    245 void X86Assembler::leal(Register dst, const Address& src) {
    246   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    247   EmitUint8(0x8D);
    248   EmitOperand(dst, src);
    249 }
    250 
    251 
    252 void X86Assembler::cmovl(Condition condition, Register dst, Register src) {
    253   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    254   EmitUint8(0x0F);
    255   EmitUint8(0x40 + condition);
    256   EmitRegisterOperand(dst, src);
    257 }
    258 
    259 
    260 void X86Assembler::setb(Condition condition, Register dst) {
    261   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    262   EmitUint8(0x0F);
    263   EmitUint8(0x90 + condition);
    264   EmitOperand(0, Operand(dst));
    265 }
    266 
    267 
    268 void X86Assembler::movss(XmmRegister dst, const Address& src) {
    269   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    270   EmitUint8(0xF3);
    271   EmitUint8(0x0F);
    272   EmitUint8(0x10);
    273   EmitOperand(dst, src);
    274 }
    275 
    276 
    277 void X86Assembler::movss(const Address& dst, XmmRegister src) {
    278   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    279   EmitUint8(0xF3);
    280   EmitUint8(0x0F);
    281   EmitUint8(0x11);
    282   EmitOperand(src, dst);
    283 }
    284 
    285 
    286 void X86Assembler::movss(XmmRegister dst, XmmRegister src) {
    287   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    288   EmitUint8(0xF3);
    289   EmitUint8(0x0F);
    290   EmitUint8(0x11);
    291   EmitXmmRegisterOperand(src, dst);
    292 }
    293 
    294 
    295 void X86Assembler::movd(XmmRegister dst, Register src) {
    296   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    297   EmitUint8(0x66);
    298   EmitUint8(0x0F);
    299   EmitUint8(0x6E);
    300   EmitOperand(dst, Operand(src));
    301 }
    302 
    303 
    304 void X86Assembler::movd(Register dst, XmmRegister src) {
    305   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    306   EmitUint8(0x66);
    307   EmitUint8(0x0F);
    308   EmitUint8(0x7E);
    309   EmitOperand(src, Operand(dst));
    310 }
    311 
    312 
    313 void X86Assembler::addss(XmmRegister dst, XmmRegister src) {
    314   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    315   EmitUint8(0xF3);
    316   EmitUint8(0x0F);
    317   EmitUint8(0x58);
    318   EmitXmmRegisterOperand(dst, src);
    319 }
    320 
    321 
    322 void X86Assembler::addss(XmmRegister dst, const Address& src) {
    323   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    324   EmitUint8(0xF3);
    325   EmitUint8(0x0F);
    326   EmitUint8(0x58);
    327   EmitOperand(dst, src);
    328 }
    329 
    330 
    331 void X86Assembler::subss(XmmRegister dst, XmmRegister src) {
    332   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    333   EmitUint8(0xF3);
    334   EmitUint8(0x0F);
    335   EmitUint8(0x5C);
    336   EmitXmmRegisterOperand(dst, src);
    337 }
    338 
    339 
    340 void X86Assembler::subss(XmmRegister dst, const Address& src) {
    341   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    342   EmitUint8(0xF3);
    343   EmitUint8(0x0F);
    344   EmitUint8(0x5C);
    345   EmitOperand(dst, src);
    346 }
    347 
    348 
    349 void X86Assembler::mulss(XmmRegister dst, XmmRegister src) {
    350   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    351   EmitUint8(0xF3);
    352   EmitUint8(0x0F);
    353   EmitUint8(0x59);
    354   EmitXmmRegisterOperand(dst, src);
    355 }
    356 
    357 
    358 void X86Assembler::mulss(XmmRegister dst, const Address& src) {
    359   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    360   EmitUint8(0xF3);
    361   EmitUint8(0x0F);
    362   EmitUint8(0x59);
    363   EmitOperand(dst, src);
    364 }
    365 
    366 
    367 void X86Assembler::divss(XmmRegister dst, XmmRegister src) {
    368   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    369   EmitUint8(0xF3);
    370   EmitUint8(0x0F);
    371   EmitUint8(0x5E);
    372   EmitXmmRegisterOperand(dst, src);
    373 }
    374 
    375 
    376 void X86Assembler::divss(XmmRegister dst, const Address& src) {
    377   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    378   EmitUint8(0xF3);
    379   EmitUint8(0x0F);
    380   EmitUint8(0x5E);
    381   EmitOperand(dst, src);
    382 }
    383 
    384 
    385 void X86Assembler::flds(const Address& src) {
    386   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    387   EmitUint8(0xD9);
    388   EmitOperand(0, src);
    389 }
    390 
    391 
    392 void X86Assembler::fstps(const Address& dst) {
    393   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    394   EmitUint8(0xD9);
    395   EmitOperand(3, dst);
    396 }
    397 
    398 
    399 void X86Assembler::movsd(XmmRegister dst, const Address& src) {
    400   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    401   EmitUint8(0xF2);
    402   EmitUint8(0x0F);
    403   EmitUint8(0x10);
    404   EmitOperand(dst, src);
    405 }
    406 
    407 
    408 void X86Assembler::movsd(const Address& dst, XmmRegister src) {
    409   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    410   EmitUint8(0xF2);
    411   EmitUint8(0x0F);
    412   EmitUint8(0x11);
    413   EmitOperand(src, dst);
    414 }
    415 
    416 
    417 void X86Assembler::movsd(XmmRegister dst, XmmRegister src) {
    418   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    419   EmitUint8(0xF2);
    420   EmitUint8(0x0F);
    421   EmitUint8(0x11);
    422   EmitXmmRegisterOperand(src, dst);
    423 }
    424 
    425 
    426 void X86Assembler::addsd(XmmRegister dst, XmmRegister src) {
    427   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    428   EmitUint8(0xF2);
    429   EmitUint8(0x0F);
    430   EmitUint8(0x58);
    431   EmitXmmRegisterOperand(dst, src);
    432 }
    433 
    434 
    435 void X86Assembler::addsd(XmmRegister dst, const Address& src) {
    436   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    437   EmitUint8(0xF2);
    438   EmitUint8(0x0F);
    439   EmitUint8(0x58);
    440   EmitOperand(dst, src);
    441 }
    442 
    443 
    444 void X86Assembler::subsd(XmmRegister dst, XmmRegister src) {
    445   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    446   EmitUint8(0xF2);
    447   EmitUint8(0x0F);
    448   EmitUint8(0x5C);
    449   EmitXmmRegisterOperand(dst, src);
    450 }
    451 
    452 
    453 void X86Assembler::subsd(XmmRegister dst, const Address& src) {
    454   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    455   EmitUint8(0xF2);
    456   EmitUint8(0x0F);
    457   EmitUint8(0x5C);
    458   EmitOperand(dst, src);
    459 }
    460 
    461 
    462 void X86Assembler::mulsd(XmmRegister dst, XmmRegister src) {
    463   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    464   EmitUint8(0xF2);
    465   EmitUint8(0x0F);
    466   EmitUint8(0x59);
    467   EmitXmmRegisterOperand(dst, src);
    468 }
    469 
    470 
    471 void X86Assembler::mulsd(XmmRegister dst, const Address& src) {
    472   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    473   EmitUint8(0xF2);
    474   EmitUint8(0x0F);
    475   EmitUint8(0x59);
    476   EmitOperand(dst, src);
    477 }
    478 
    479 
    480 void X86Assembler::divsd(XmmRegister dst, XmmRegister src) {
    481   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    482   EmitUint8(0xF2);
    483   EmitUint8(0x0F);
    484   EmitUint8(0x5E);
    485   EmitXmmRegisterOperand(dst, src);
    486 }
    487 
    488 
    489 void X86Assembler::divsd(XmmRegister dst, const Address& src) {
    490   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    491   EmitUint8(0xF2);
    492   EmitUint8(0x0F);
    493   EmitUint8(0x5E);
    494   EmitOperand(dst, src);
    495 }
    496 
    497 
    498 void X86Assembler::cvtsi2ss(XmmRegister dst, Register src) {
    499   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    500   EmitUint8(0xF3);
    501   EmitUint8(0x0F);
    502   EmitUint8(0x2A);
    503   EmitOperand(dst, Operand(src));
    504 }
    505 
    506 
    507 void X86Assembler::cvtsi2sd(XmmRegister dst, Register src) {
    508   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    509   EmitUint8(0xF2);
    510   EmitUint8(0x0F);
    511   EmitUint8(0x2A);
    512   EmitOperand(dst, Operand(src));
    513 }
    514 
    515 
    516 void X86Assembler::cvtss2si(Register dst, XmmRegister src) {
    517   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    518   EmitUint8(0xF3);
    519   EmitUint8(0x0F);
    520   EmitUint8(0x2D);
    521   EmitXmmRegisterOperand(dst, src);
    522 }
    523 
    524 
    525 void X86Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
    526   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    527   EmitUint8(0xF3);
    528   EmitUint8(0x0F);
    529   EmitUint8(0x5A);
    530   EmitXmmRegisterOperand(dst, src);
    531 }
    532 
    533 
    534 void X86Assembler::cvtsd2si(Register dst, XmmRegister src) {
    535   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    536   EmitUint8(0xF2);
    537   EmitUint8(0x0F);
    538   EmitUint8(0x2D);
    539   EmitXmmRegisterOperand(dst, src);
    540 }
    541 
    542 
    543 void X86Assembler::cvttss2si(Register dst, XmmRegister src) {
    544   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    545   EmitUint8(0xF3);
    546   EmitUint8(0x0F);
    547   EmitUint8(0x2C);
    548   EmitXmmRegisterOperand(dst, src);
    549 }
    550 
    551 
    552 void X86Assembler::cvttsd2si(Register dst, XmmRegister src) {
    553   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    554   EmitUint8(0xF2);
    555   EmitUint8(0x0F);
    556   EmitUint8(0x2C);
    557   EmitXmmRegisterOperand(dst, src);
    558 }
    559 
    560 
    561 void X86Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
    562   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    563   EmitUint8(0xF2);
    564   EmitUint8(0x0F);
    565   EmitUint8(0x5A);
    566   EmitXmmRegisterOperand(dst, src);
    567 }
    568 
    569 
    570 void X86Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
    571   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    572   EmitUint8(0xF3);
    573   EmitUint8(0x0F);
    574   EmitUint8(0xE6);
    575   EmitXmmRegisterOperand(dst, src);
    576 }
    577 
    578 
    579 void X86Assembler::comiss(XmmRegister a, XmmRegister b) {
    580   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    581   EmitUint8(0x0F);
    582   EmitUint8(0x2F);
    583   EmitXmmRegisterOperand(a, b);
    584 }
    585 
    586 
    587 void X86Assembler::comisd(XmmRegister a, XmmRegister b) {
    588   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    589   EmitUint8(0x66);
    590   EmitUint8(0x0F);
    591   EmitUint8(0x2F);
    592   EmitXmmRegisterOperand(a, b);
    593 }
    594 
    595 
    596 void X86Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
    597   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    598   EmitUint8(0xF2);
    599   EmitUint8(0x0F);
    600   EmitUint8(0x51);
    601   EmitXmmRegisterOperand(dst, src);
    602 }
    603 
    604 
    605 void X86Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
    606   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    607   EmitUint8(0xF3);
    608   EmitUint8(0x0F);
    609   EmitUint8(0x51);
    610   EmitXmmRegisterOperand(dst, src);
    611 }
    612 
    613 
    614 void X86Assembler::xorpd(XmmRegister dst, const Address& src) {
    615   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    616   EmitUint8(0x66);
    617   EmitUint8(0x0F);
    618   EmitUint8(0x57);
    619   EmitOperand(dst, src);
    620 }
    621 
    622 
    623 void X86Assembler::xorpd(XmmRegister dst, XmmRegister src) {
    624   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    625   EmitUint8(0x66);
    626   EmitUint8(0x0F);
    627   EmitUint8(0x57);
    628   EmitXmmRegisterOperand(dst, src);
    629 }
    630 
    631 
    632 void X86Assembler::xorps(XmmRegister dst, const Address& src) {
    633   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    634   EmitUint8(0x0F);
    635   EmitUint8(0x57);
    636   EmitOperand(dst, src);
    637 }
    638 
    639 
    640 void X86Assembler::xorps(XmmRegister dst, XmmRegister src) {
    641   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    642   EmitUint8(0x0F);
    643   EmitUint8(0x57);
    644   EmitXmmRegisterOperand(dst, src);
    645 }
    646 
    647 
    648 void X86Assembler::andpd(XmmRegister dst, const Address& src) {
    649   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    650   EmitUint8(0x66);
    651   EmitUint8(0x0F);
    652   EmitUint8(0x54);
    653   EmitOperand(dst, src);
    654 }
    655 
    656 
    657 void X86Assembler::fldl(const Address& src) {
    658   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    659   EmitUint8(0xDD);
    660   EmitOperand(0, src);
    661 }
    662 
    663 
    664 void X86Assembler::fstpl(const Address& dst) {
    665   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    666   EmitUint8(0xDD);
    667   EmitOperand(3, dst);
    668 }
    669 
    670 
    671 void X86Assembler::fnstcw(const Address& dst) {
    672   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    673   EmitUint8(0xD9);
    674   EmitOperand(7, dst);
    675 }
    676 
    677 
    678 void X86Assembler::fldcw(const Address& src) {
    679   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    680   EmitUint8(0xD9);
    681   EmitOperand(5, src);
    682 }
    683 
    684 
    685 void X86Assembler::fistpl(const Address& dst) {
    686   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    687   EmitUint8(0xDF);
    688   EmitOperand(7, dst);
    689 }
    690 
    691 
    692 void X86Assembler::fistps(const Address& dst) {
    693   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    694   EmitUint8(0xDB);
    695   EmitOperand(3, dst);
    696 }
    697 
    698 
    699 void X86Assembler::fildl(const Address& src) {
    700   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    701   EmitUint8(0xDF);
    702   EmitOperand(5, src);
    703 }
    704 
    705 
    706 void X86Assembler::fincstp() {
    707   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    708   EmitUint8(0xD9);
    709   EmitUint8(0xF7);
    710 }
    711 
    712 
    713 void X86Assembler::ffree(const Immediate& index) {
    714   CHECK_LT(index.value(), 7);
    715   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    716   EmitUint8(0xDD);
    717   EmitUint8(0xC0 + index.value());
    718 }
    719 
    720 
    721 void X86Assembler::fsin() {
    722   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    723   EmitUint8(0xD9);
    724   EmitUint8(0xFE);
    725 }
    726 
    727 
    728 void X86Assembler::fcos() {
    729   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    730   EmitUint8(0xD9);
    731   EmitUint8(0xFF);
    732 }
    733 
    734 
    735 void X86Assembler::fptan() {
    736   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    737   EmitUint8(0xD9);
    738   EmitUint8(0xF2);
    739 }
    740 
    741 
    742 void X86Assembler::xchgl(Register dst, Register src) {
    743   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    744   EmitUint8(0x87);
    745   EmitRegisterOperand(dst, src);
    746 }
    747 
    748 void X86Assembler::xchgl(Register reg, const Address& address) {
    749   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    750   EmitUint8(0x87);
    751   EmitOperand(reg, address);
    752 }
    753 
    754 
    755 void X86Assembler::cmpl(Register reg, const Immediate& imm) {
    756   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    757   EmitComplex(7, Operand(reg), imm);
    758 }
    759 
    760 
    761 void X86Assembler::cmpl(Register reg0, Register reg1) {
    762   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    763   EmitUint8(0x3B);
    764   EmitOperand(reg0, Operand(reg1));
    765 }
    766 
    767 
    768 void X86Assembler::cmpl(Register reg, const Address& address) {
    769   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    770   EmitUint8(0x3B);
    771   EmitOperand(reg, address);
    772 }
    773 
    774 
    775 void X86Assembler::addl(Register dst, Register src) {
    776   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    777   EmitUint8(0x03);
    778   EmitRegisterOperand(dst, src);
    779 }
    780 
    781 
    782 void X86Assembler::addl(Register reg, const Address& address) {
    783   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    784   EmitUint8(0x03);
    785   EmitOperand(reg, address);
    786 }
    787 
    788 
    789 void X86Assembler::cmpl(const Address& address, Register reg) {
    790   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    791   EmitUint8(0x39);
    792   EmitOperand(reg, address);
    793 }
    794 
    795 
    796 void X86Assembler::cmpl(const Address& address, const Immediate& imm) {
    797   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    798   EmitComplex(7, address, imm);
    799 }
    800 
    801 
    802 void X86Assembler::testl(Register reg1, Register reg2) {
    803   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    804   EmitUint8(0x85);
    805   EmitRegisterOperand(reg1, reg2);
    806 }
    807 
    808 
    809 void X86Assembler::testl(Register reg, const Address& address) {
    810   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    811   EmitUint8(0x85);
    812   EmitOperand(reg, address);
    813 }
    814 
    815 
    816 void X86Assembler::testl(Register reg, const Immediate& immediate) {
    817   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    818   // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
    819   // we only test the byte register to keep the encoding short.
    820   if (immediate.is_uint8() && reg < 4) {
    821     // Use zero-extended 8-bit immediate.
    822     if (reg == EAX) {
    823       EmitUint8(0xA8);
    824     } else {
    825       EmitUint8(0xF6);
    826       EmitUint8(0xC0 + reg);
    827     }
    828     EmitUint8(immediate.value() & 0xFF);
    829   } else if (reg == EAX) {
    830     // Use short form if the destination is EAX.
    831     EmitUint8(0xA9);
    832     EmitImmediate(immediate);
    833   } else {
    834     EmitUint8(0xF7);
    835     EmitOperand(0, Operand(reg));
    836     EmitImmediate(immediate);
    837   }
    838 }
    839 
    840 
    841 void X86Assembler::andl(Register dst, Register src) {
    842   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    843   EmitUint8(0x23);
    844   EmitOperand(dst, Operand(src));
    845 }
    846 
    847 
    848 void X86Assembler::andl(Register dst, const Immediate& imm) {
    849   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    850   EmitComplex(4, Operand(dst), imm);
    851 }
    852 
    853 
    854 void X86Assembler::orl(Register dst, Register src) {
    855   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    856   EmitUint8(0x0B);
    857   EmitOperand(dst, Operand(src));
    858 }
    859 
    860 
    861 void X86Assembler::orl(Register dst, const Immediate& imm) {
    862   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    863   EmitComplex(1, Operand(dst), imm);
    864 }
    865 
    866 
    867 void X86Assembler::xorl(Register dst, Register src) {
    868   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    869   EmitUint8(0x33);
    870   EmitOperand(dst, Operand(src));
    871 }
    872 
    873 void X86Assembler::xorl(Register dst, const Immediate& imm) {
    874   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    875   EmitComplex(6, Operand(dst), imm);
    876 }
    877 
    878 void X86Assembler::addl(Register reg, const Immediate& imm) {
    879   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    880   EmitComplex(0, Operand(reg), imm);
    881 }
    882 
    883 
    884 void X86Assembler::addl(const Address& address, Register reg) {
    885   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    886   EmitUint8(0x01);
    887   EmitOperand(reg, address);
    888 }
    889 
    890 
    891 void X86Assembler::addl(const Address& address, const Immediate& imm) {
    892   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    893   EmitComplex(0, address, imm);
    894 }
    895 
    896 
    897 void X86Assembler::adcl(Register reg, const Immediate& imm) {
    898   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    899   EmitComplex(2, Operand(reg), imm);
    900 }
    901 
    902 
    903 void X86Assembler::adcl(Register dst, Register src) {
    904   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    905   EmitUint8(0x13);
    906   EmitOperand(dst, Operand(src));
    907 }
    908 
    909 
    910 void X86Assembler::adcl(Register dst, const Address& address) {
    911   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    912   EmitUint8(0x13);
    913   EmitOperand(dst, address);
    914 }
    915 
    916 
    917 void X86Assembler::subl(Register dst, Register src) {
    918   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    919   EmitUint8(0x2B);
    920   EmitOperand(dst, Operand(src));
    921 }
    922 
    923 
    924 void X86Assembler::subl(Register reg, const Immediate& imm) {
    925   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    926   EmitComplex(5, Operand(reg), imm);
    927 }
    928 
    929 
    930 void X86Assembler::subl(Register reg, const Address& address) {
    931   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    932   EmitUint8(0x2B);
    933   EmitOperand(reg, address);
    934 }
    935 
    936 
    937 void X86Assembler::cdq() {
    938   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    939   EmitUint8(0x99);
    940 }
    941 
    942 
    943 void X86Assembler::idivl(Register reg) {
    944   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    945   EmitUint8(0xF7);
    946   EmitUint8(0xF8 | reg);
    947 }
    948 
    949 
    950 void X86Assembler::imull(Register dst, Register src) {
    951   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    952   EmitUint8(0x0F);
    953   EmitUint8(0xAF);
    954   EmitOperand(dst, Operand(src));
    955 }
    956 
    957 
    958 void X86Assembler::imull(Register reg, const Immediate& imm) {
    959   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    960   EmitUint8(0x69);
    961   EmitOperand(reg, Operand(reg));
    962   EmitImmediate(imm);
    963 }
    964 
    965 
    966 void X86Assembler::imull(Register reg, const Address& address) {
    967   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    968   EmitUint8(0x0F);
    969   EmitUint8(0xAF);
    970   EmitOperand(reg, address);
    971 }
    972 
    973 
    974 void X86Assembler::imull(Register reg) {
    975   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    976   EmitUint8(0xF7);
    977   EmitOperand(5, Operand(reg));
    978 }
    979 
    980 
    981 void X86Assembler::imull(const Address& address) {
    982   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    983   EmitUint8(0xF7);
    984   EmitOperand(5, address);
    985 }
    986 
    987 
    988 void X86Assembler::mull(Register reg) {
    989   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    990   EmitUint8(0xF7);
    991   EmitOperand(4, Operand(reg));
    992 }
    993 
    994 
    995 void X86Assembler::mull(const Address& address) {
    996   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    997   EmitUint8(0xF7);
    998   EmitOperand(4, address);
    999 }
   1000 
   1001 
   1002 void X86Assembler::sbbl(Register dst, Register src) {
   1003   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1004   EmitUint8(0x1B);
   1005   EmitOperand(dst, Operand(src));
   1006 }
   1007 
   1008 
   1009 void X86Assembler::sbbl(Register reg, const Immediate& imm) {
   1010   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1011   EmitComplex(3, Operand(reg), imm);
   1012 }
   1013 
   1014 
   1015 void X86Assembler::sbbl(Register dst, const Address& address) {
   1016   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1017   EmitUint8(0x1B);
   1018   EmitOperand(dst, address);
   1019 }
   1020 
   1021 
   1022 void X86Assembler::incl(Register reg) {
   1023   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1024   EmitUint8(0x40 + reg);
   1025 }
   1026 
   1027 
   1028 void X86Assembler::incl(const Address& address) {
   1029   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1030   EmitUint8(0xFF);
   1031   EmitOperand(0, address);
   1032 }
   1033 
   1034 
   1035 void X86Assembler::decl(Register reg) {
   1036   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1037   EmitUint8(0x48 + reg);
   1038 }
   1039 
   1040 
   1041 void X86Assembler::decl(const Address& address) {
   1042   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1043   EmitUint8(0xFF);
   1044   EmitOperand(1, address);
   1045 }
   1046 
   1047 
   1048 void X86Assembler::shll(Register reg, const Immediate& imm) {
   1049   EmitGenericShift(4, reg, imm);
   1050 }
   1051 
   1052 
   1053 void X86Assembler::shll(Register operand, Register shifter) {
   1054   EmitGenericShift(4, operand, shifter);
   1055 }
   1056 
   1057 
   1058 void X86Assembler::shrl(Register reg, const Immediate& imm) {
   1059   EmitGenericShift(5, reg, imm);
   1060 }
   1061 
   1062 
   1063 void X86Assembler::shrl(Register operand, Register shifter) {
   1064   EmitGenericShift(5, operand, shifter);
   1065 }
   1066 
   1067 
   1068 void X86Assembler::sarl(Register reg, const Immediate& imm) {
   1069   EmitGenericShift(7, reg, imm);
   1070 }
   1071 
   1072 
   1073 void X86Assembler::sarl(Register operand, Register shifter) {
   1074   EmitGenericShift(7, operand, shifter);
   1075 }
   1076 
   1077 
   1078 void X86Assembler::shld(Register dst, Register src) {
   1079   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1080   EmitUint8(0x0F);
   1081   EmitUint8(0xA5);
   1082   EmitRegisterOperand(src, dst);
   1083 }
   1084 
   1085 
   1086 void X86Assembler::negl(Register reg) {
   1087   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1088   EmitUint8(0xF7);
   1089   EmitOperand(3, Operand(reg));
   1090 }
   1091 
   1092 
   1093 void X86Assembler::notl(Register reg) {
   1094   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1095   EmitUint8(0xF7);
   1096   EmitUint8(0xD0 | reg);
   1097 }
   1098 
   1099 
   1100 void X86Assembler::enter(const Immediate& imm) {
   1101   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1102   EmitUint8(0xC8);
   1103   CHECK(imm.is_uint16());
   1104   EmitUint8(imm.value() & 0xFF);
   1105   EmitUint8((imm.value() >> 8) & 0xFF);
   1106   EmitUint8(0x00);
   1107 }
   1108 
   1109 
   1110 void X86Assembler::leave() {
   1111   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1112   EmitUint8(0xC9);
   1113 }
   1114 
   1115 
   1116 void X86Assembler::ret() {
   1117   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1118   EmitUint8(0xC3);
   1119 }
   1120 
   1121 
   1122 void X86Assembler::ret(const Immediate& imm) {
   1123   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1124   EmitUint8(0xC2);
   1125   CHECK(imm.is_uint16());
   1126   EmitUint8(imm.value() & 0xFF);
   1127   EmitUint8((imm.value() >> 8) & 0xFF);
   1128 }
   1129 
   1130 
   1131 
   1132 void X86Assembler::nop() {
   1133   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1134   EmitUint8(0x90);
   1135 }
   1136 
   1137 
   1138 void X86Assembler::int3() {
   1139   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1140   EmitUint8(0xCC);
   1141 }
   1142 
   1143 
   1144 void X86Assembler::hlt() {
   1145   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1146   EmitUint8(0xF4);
   1147 }
   1148 
   1149 
   1150 void X86Assembler::j(Condition condition, Label* label) {
   1151   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1152   if (label->IsBound()) {
   1153     static const int kShortSize = 2;
   1154     static const int kLongSize = 6;
   1155     int offset = label->Position() - buffer_.Size();
   1156     CHECK_LE(offset, 0);
   1157     if (IsInt(8, offset - kShortSize)) {
   1158       EmitUint8(0x70 + condition);
   1159       EmitUint8((offset - kShortSize) & 0xFF);
   1160     } else {
   1161       EmitUint8(0x0F);
   1162       EmitUint8(0x80 + condition);
   1163       EmitInt32(offset - kLongSize);
   1164     }
   1165   } else {
   1166     EmitUint8(0x0F);
   1167     EmitUint8(0x80 + condition);
   1168     EmitLabelLink(label);
   1169   }
   1170 }
   1171 
   1172 
   1173 void X86Assembler::jmp(Register reg) {
   1174   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1175   EmitUint8(0xFF);
   1176   EmitRegisterOperand(4, reg);
   1177 }
   1178 
   1179 void X86Assembler::jmp(const Address& address) {
   1180   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1181   EmitUint8(0xFF);
   1182   EmitOperand(4, address);
   1183 }
   1184 
   1185 void X86Assembler::jmp(Label* label) {
   1186   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1187   if (label->IsBound()) {
   1188     static const int kShortSize = 2;
   1189     static const int kLongSize = 5;
   1190     int offset = label->Position() - buffer_.Size();
   1191     CHECK_LE(offset, 0);
   1192     if (IsInt(8, offset - kShortSize)) {
   1193       EmitUint8(0xEB);
   1194       EmitUint8((offset - kShortSize) & 0xFF);
   1195     } else {
   1196       EmitUint8(0xE9);
   1197       EmitInt32(offset - kLongSize);
   1198     }
   1199   } else {
   1200     EmitUint8(0xE9);
   1201     EmitLabelLink(label);
   1202   }
   1203 }
   1204 
   1205 
   1206 X86Assembler* X86Assembler::lock() {
   1207   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1208   EmitUint8(0xF0);
   1209   return this;
   1210 }
   1211 
   1212 
   1213 void X86Assembler::cmpxchgl(const Address& address, Register reg) {
   1214   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1215   EmitUint8(0x0F);
   1216   EmitUint8(0xB1);
   1217   EmitOperand(reg, address);
   1218 }
   1219 
   1220 void X86Assembler::mfence() {
   1221   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1222   EmitUint8(0x0F);
   1223   EmitUint8(0xAE);
   1224   EmitUint8(0xF0);
   1225 }
   1226 
   1227 X86Assembler* X86Assembler::fs() {
   1228   // TODO: fs is a prefix and not an instruction
   1229   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1230   EmitUint8(0x64);
   1231   return this;
   1232 }
   1233 
   1234 X86Assembler* X86Assembler::gs() {
   1235   // TODO: fs is a prefix and not an instruction
   1236   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1237   EmitUint8(0x65);
   1238   return this;
   1239 }
   1240 
   1241 void X86Assembler::AddImmediate(Register reg, const Immediate& imm) {
   1242   int value = imm.value();
   1243   if (value > 0) {
   1244     if (value == 1) {
   1245       incl(reg);
   1246     } else if (value != 0) {
   1247       addl(reg, imm);
   1248     }
   1249   } else if (value < 0) {
   1250     value = -value;
   1251     if (value == 1) {
   1252       decl(reg);
   1253     } else if (value != 0) {
   1254       subl(reg, Immediate(value));
   1255     }
   1256   }
   1257 }
   1258 
   1259 
   1260 void X86Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
   1261   // TODO: Need to have a code constants table.
   1262   int64_t constant = bit_cast<int64_t, double>(value);
   1263   pushl(Immediate(High32Bits(constant)));
   1264   pushl(Immediate(Low32Bits(constant)));
   1265   movsd(dst, Address(ESP, 0));
   1266   addl(ESP, Immediate(2 * kWordSize));
   1267 }
   1268 
   1269 
   1270 void X86Assembler::FloatNegate(XmmRegister f) {
   1271   static const struct {
   1272     uint32_t a;
   1273     uint32_t b;
   1274     uint32_t c;
   1275     uint32_t d;
   1276   } float_negate_constant __attribute__((aligned(16))) =
   1277       { 0x80000000, 0x00000000, 0x80000000, 0x00000000 };
   1278   xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
   1279 }
   1280 
   1281 
   1282 void X86Assembler::DoubleNegate(XmmRegister d) {
   1283   static const struct {
   1284     uint64_t a;
   1285     uint64_t b;
   1286   } double_negate_constant __attribute__((aligned(16))) =
   1287       {0x8000000000000000LL, 0x8000000000000000LL};
   1288   xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
   1289 }
   1290 
   1291 
   1292 void X86Assembler::DoubleAbs(XmmRegister reg) {
   1293   static const struct {
   1294     uint64_t a;
   1295     uint64_t b;
   1296   } double_abs_constant __attribute__((aligned(16))) =
   1297       {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
   1298   andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
   1299 }
   1300 
   1301 
   1302 void X86Assembler::Align(int alignment, int offset) {
   1303   CHECK(IsPowerOfTwo(alignment));
   1304   // Emit nop instruction until the real position is aligned.
   1305   while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
   1306     nop();
   1307   }
   1308 }
   1309 
   1310 
   1311 void X86Assembler::Bind(Label* label) {
   1312   int bound = buffer_.Size();
   1313   CHECK(!label->IsBound());  // Labels can only be bound once.
   1314   while (label->IsLinked()) {
   1315     int position = label->LinkPosition();
   1316     int next = buffer_.Load<int32_t>(position);
   1317     buffer_.Store<int32_t>(position, bound - (position + 4));
   1318     label->position_ = next;
   1319   }
   1320   label->BindTo(bound);
   1321 }
   1322 
   1323 
   1324 void X86Assembler::EmitOperand(int reg_or_opcode, const Operand& operand) {
   1325   CHECK_GE(reg_or_opcode, 0);
   1326   CHECK_LT(reg_or_opcode, 8);
   1327   const int length = operand.length_;
   1328   CHECK_GT(length, 0);
   1329   // Emit the ModRM byte updated with the given reg value.
   1330   CHECK_EQ(operand.encoding_[0] & 0x38, 0);
   1331   EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
   1332   // Emit the rest of the encoded operand.
   1333   for (int i = 1; i < length; i++) {
   1334     EmitUint8(operand.encoding_[i]);
   1335   }
   1336 }
   1337 
   1338 
   1339 void X86Assembler::EmitImmediate(const Immediate& imm) {
   1340   EmitInt32(imm.value());
   1341 }
   1342 
   1343 
   1344 void X86Assembler::EmitComplex(int reg_or_opcode,
   1345                                const Operand& operand,
   1346                                const Immediate& immediate) {
   1347   CHECK_GE(reg_or_opcode, 0);
   1348   CHECK_LT(reg_or_opcode, 8);
   1349   if (immediate.is_int8()) {
   1350     // Use sign-extended 8-bit immediate.
   1351     EmitUint8(0x83);
   1352     EmitOperand(reg_or_opcode, operand);
   1353     EmitUint8(immediate.value() & 0xFF);
   1354   } else if (operand.IsRegister(EAX)) {
   1355     // Use short form if the destination is eax.
   1356     EmitUint8(0x05 + (reg_or_opcode << 3));
   1357     EmitImmediate(immediate);
   1358   } else {
   1359     EmitUint8(0x81);
   1360     EmitOperand(reg_or_opcode, operand);
   1361     EmitImmediate(immediate);
   1362   }
   1363 }
   1364 
   1365 
   1366 void X86Assembler::EmitLabel(Label* label, int instruction_size) {
   1367   if (label->IsBound()) {
   1368     int offset = label->Position() - buffer_.Size();
   1369     CHECK_LE(offset, 0);
   1370     EmitInt32(offset - instruction_size);
   1371   } else {
   1372     EmitLabelLink(label);
   1373   }
   1374 }
   1375 
   1376 
   1377 void X86Assembler::EmitLabelLink(Label* label) {
   1378   CHECK(!label->IsBound());
   1379   int position = buffer_.Size();
   1380   EmitInt32(label->position_);
   1381   label->LinkTo(position);
   1382 }
   1383 
   1384 
   1385 void X86Assembler::EmitGenericShift(int reg_or_opcode,
   1386                                     Register reg,
   1387                                     const Immediate& imm) {
   1388   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1389   CHECK(imm.is_int8());
   1390   if (imm.value() == 1) {
   1391     EmitUint8(0xD1);
   1392     EmitOperand(reg_or_opcode, Operand(reg));
   1393   } else {
   1394     EmitUint8(0xC1);
   1395     EmitOperand(reg_or_opcode, Operand(reg));
   1396     EmitUint8(imm.value() & 0xFF);
   1397   }
   1398 }
   1399 
   1400 
   1401 void X86Assembler::EmitGenericShift(int reg_or_opcode,
   1402                                     Register operand,
   1403                                     Register shifter) {
   1404   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1405   CHECK_EQ(shifter, ECX);
   1406   EmitUint8(0xD3);
   1407   EmitOperand(reg_or_opcode, Operand(operand));
   1408 }
   1409 
   1410 constexpr size_t kFramePointerSize = 4;
   1411 
   1412 void X86Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
   1413                               const std::vector<ManagedRegister>& spill_regs,
   1414                               const ManagedRegisterEntrySpills& entry_spills) {
   1415   CHECK_ALIGNED(frame_size, kStackAlignment);
   1416   for (int i = spill_regs.size() - 1; i >= 0; --i) {
   1417     pushl(spill_regs.at(i).AsX86().AsCpuRegister());
   1418   }
   1419   // return address then method on stack
   1420   addl(ESP, Immediate(-frame_size + (spill_regs.size() * kFramePointerSize) +
   1421                       sizeof(StackReference<mirror::ArtMethod>) /*method*/ +
   1422                       kFramePointerSize /*return address*/));
   1423   pushl(method_reg.AsX86().AsCpuRegister());
   1424   for (size_t i = 0; i < entry_spills.size(); ++i) {
   1425     movl(Address(ESP, frame_size + sizeof(StackReference<mirror::ArtMethod>) +
   1426                  (i * kFramePointerSize)),
   1427          entry_spills.at(i).AsX86().AsCpuRegister());
   1428   }
   1429 }
   1430 
   1431 void X86Assembler::RemoveFrame(size_t frame_size,
   1432                             const std::vector<ManagedRegister>& spill_regs) {
   1433   CHECK_ALIGNED(frame_size, kStackAlignment);
   1434   addl(ESP, Immediate(frame_size - (spill_regs.size() * kFramePointerSize) -
   1435                       sizeof(StackReference<mirror::ArtMethod>)));
   1436   for (size_t i = 0; i < spill_regs.size(); ++i) {
   1437     popl(spill_regs.at(i).AsX86().AsCpuRegister());
   1438   }
   1439   ret();
   1440 }
   1441 
   1442 void X86Assembler::IncreaseFrameSize(size_t adjust) {
   1443   CHECK_ALIGNED(adjust, kStackAlignment);
   1444   addl(ESP, Immediate(-adjust));
   1445 }
   1446 
   1447 void X86Assembler::DecreaseFrameSize(size_t adjust) {
   1448   CHECK_ALIGNED(adjust, kStackAlignment);
   1449   addl(ESP, Immediate(adjust));
   1450 }
   1451 
   1452 void X86Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
   1453   X86ManagedRegister src = msrc.AsX86();
   1454   if (src.IsNoRegister()) {
   1455     CHECK_EQ(0u, size);
   1456   } else if (src.IsCpuRegister()) {
   1457     CHECK_EQ(4u, size);
   1458     movl(Address(ESP, offs), src.AsCpuRegister());
   1459   } else if (src.IsRegisterPair()) {
   1460     CHECK_EQ(8u, size);
   1461     movl(Address(ESP, offs), src.AsRegisterPairLow());
   1462     movl(Address(ESP, FrameOffset(offs.Int32Value()+4)),
   1463          src.AsRegisterPairHigh());
   1464   } else if (src.IsX87Register()) {
   1465     if (size == 4) {
   1466       fstps(Address(ESP, offs));
   1467     } else {
   1468       fstpl(Address(ESP, offs));
   1469     }
   1470   } else {
   1471     CHECK(src.IsXmmRegister());
   1472     if (size == 4) {
   1473       movss(Address(ESP, offs), src.AsXmmRegister());
   1474     } else {
   1475       movsd(Address(ESP, offs), src.AsXmmRegister());
   1476     }
   1477   }
   1478 }
   1479 
   1480 void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
   1481   X86ManagedRegister src = msrc.AsX86();
   1482   CHECK(src.IsCpuRegister());
   1483   movl(Address(ESP, dest), src.AsCpuRegister());
   1484 }
   1485 
   1486 void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
   1487   X86ManagedRegister src = msrc.AsX86();
   1488   CHECK(src.IsCpuRegister());
   1489   movl(Address(ESP, dest), src.AsCpuRegister());
   1490 }
   1491 
   1492 void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
   1493                                          ManagedRegister) {
   1494   movl(Address(ESP, dest), Immediate(imm));
   1495 }
   1496 
   1497 void X86Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
   1498                                           ManagedRegister) {
   1499   fs()->movl(Address::Absolute(dest), Immediate(imm));
   1500 }
   1501 
   1502 void X86Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
   1503                                             FrameOffset fr_offs,
   1504                                             ManagedRegister mscratch) {
   1505   X86ManagedRegister scratch = mscratch.AsX86();
   1506   CHECK(scratch.IsCpuRegister());
   1507   leal(scratch.AsCpuRegister(), Address(ESP, fr_offs));
   1508   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
   1509 }
   1510 
   1511 void X86Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
   1512   fs()->movl(Address::Absolute(thr_offs), ESP);
   1513 }
   1514 
   1515 void X86Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
   1516                                  FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
   1517   UNIMPLEMENTED(FATAL);  // this case only currently exists for ARM
   1518 }
   1519 
   1520 void X86Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
   1521   X86ManagedRegister dest = mdest.AsX86();
   1522   if (dest.IsNoRegister()) {
   1523     CHECK_EQ(0u, size);
   1524   } else if (dest.IsCpuRegister()) {
   1525     CHECK_EQ(4u, size);
   1526     movl(dest.AsCpuRegister(), Address(ESP, src));
   1527   } else if (dest.IsRegisterPair()) {
   1528     CHECK_EQ(8u, size);
   1529     movl(dest.AsRegisterPairLow(), Address(ESP, src));
   1530     movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
   1531   } else if (dest.IsX87Register()) {
   1532     if (size == 4) {
   1533       flds(Address(ESP, src));
   1534     } else {
   1535       fldl(Address(ESP, src));
   1536     }
   1537   } else {
   1538     CHECK(dest.IsXmmRegister());
   1539     if (size == 4) {
   1540       movss(dest.AsXmmRegister(), Address(ESP, src));
   1541     } else {
   1542       movsd(dest.AsXmmRegister(), Address(ESP, src));
   1543     }
   1544   }
   1545 }
   1546 
   1547 void X86Assembler::LoadFromThread32(ManagedRegister mdest, ThreadOffset<4> src, size_t size) {
   1548   X86ManagedRegister dest = mdest.AsX86();
   1549   if (dest.IsNoRegister()) {
   1550     CHECK_EQ(0u, size);
   1551   } else if (dest.IsCpuRegister()) {
   1552     CHECK_EQ(4u, size);
   1553     fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
   1554   } else if (dest.IsRegisterPair()) {
   1555     CHECK_EQ(8u, size);
   1556     fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
   1557     fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset<4>(src.Int32Value()+4)));
   1558   } else if (dest.IsX87Register()) {
   1559     if (size == 4) {
   1560       fs()->flds(Address::Absolute(src));
   1561     } else {
   1562       fs()->fldl(Address::Absolute(src));
   1563     }
   1564   } else {
   1565     CHECK(dest.IsXmmRegister());
   1566     if (size == 4) {
   1567       fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
   1568     } else {
   1569       fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
   1570     }
   1571   }
   1572 }
   1573 
   1574 void X86Assembler::LoadRef(ManagedRegister mdest, FrameOffset  src) {
   1575   X86ManagedRegister dest = mdest.AsX86();
   1576   CHECK(dest.IsCpuRegister());
   1577   movl(dest.AsCpuRegister(), Address(ESP, src));
   1578 }
   1579 
   1580 void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
   1581                            MemberOffset offs) {
   1582   X86ManagedRegister dest = mdest.AsX86();
   1583   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   1584   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
   1585   if (kPoisonHeapReferences) {
   1586     negl(dest.AsCpuRegister());
   1587   }
   1588 }
   1589 
   1590 void X86Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
   1591                               Offset offs) {
   1592   X86ManagedRegister dest = mdest.AsX86();
   1593   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   1594   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
   1595 }
   1596 
   1597 void X86Assembler::LoadRawPtrFromThread32(ManagedRegister mdest,
   1598                                         ThreadOffset<4> offs) {
   1599   X86ManagedRegister dest = mdest.AsX86();
   1600   CHECK(dest.IsCpuRegister());
   1601   fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
   1602 }
   1603 
   1604 void X86Assembler::SignExtend(ManagedRegister mreg, size_t size) {
   1605   X86ManagedRegister reg = mreg.AsX86();
   1606   CHECK(size == 1 || size == 2) << size;
   1607   CHECK(reg.IsCpuRegister()) << reg;
   1608   if (size == 1) {
   1609     movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
   1610   } else {
   1611     movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   1612   }
   1613 }
   1614 
   1615 void X86Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
   1616   X86ManagedRegister reg = mreg.AsX86();
   1617   CHECK(size == 1 || size == 2) << size;
   1618   CHECK(reg.IsCpuRegister()) << reg;
   1619   if (size == 1) {
   1620     movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
   1621   } else {
   1622     movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   1623   }
   1624 }
   1625 
   1626 void X86Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
   1627   X86ManagedRegister dest = mdest.AsX86();
   1628   X86ManagedRegister src = msrc.AsX86();
   1629   if (!dest.Equals(src)) {
   1630     if (dest.IsCpuRegister() && src.IsCpuRegister()) {
   1631       movl(dest.AsCpuRegister(), src.AsCpuRegister());
   1632     } else if (src.IsX87Register() && dest.IsXmmRegister()) {
   1633       // Pass via stack and pop X87 register
   1634       subl(ESP, Immediate(16));
   1635       if (size == 4) {
   1636         CHECK_EQ(src.AsX87Register(), ST0);
   1637         fstps(Address(ESP, 0));
   1638         movss(dest.AsXmmRegister(), Address(ESP, 0));
   1639       } else {
   1640         CHECK_EQ(src.AsX87Register(), ST0);
   1641         fstpl(Address(ESP, 0));
   1642         movsd(dest.AsXmmRegister(), Address(ESP, 0));
   1643       }
   1644       addl(ESP, Immediate(16));
   1645     } else {
   1646       // TODO: x87, SSE
   1647       UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
   1648     }
   1649   }
   1650 }
   1651 
   1652 void X86Assembler::CopyRef(FrameOffset dest, FrameOffset src,
   1653                            ManagedRegister mscratch) {
   1654   X86ManagedRegister scratch = mscratch.AsX86();
   1655   CHECK(scratch.IsCpuRegister());
   1656   movl(scratch.AsCpuRegister(), Address(ESP, src));
   1657   movl(Address(ESP, dest), scratch.AsCpuRegister());
   1658 }
   1659 
   1660 void X86Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs,
   1661                                         ThreadOffset<4> thr_offs,
   1662                                         ManagedRegister mscratch) {
   1663   X86ManagedRegister scratch = mscratch.AsX86();
   1664   CHECK(scratch.IsCpuRegister());
   1665   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(thr_offs));
   1666   Store(fr_offs, scratch, 4);
   1667 }
   1668 
   1669 void X86Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs,
   1670                                       FrameOffset fr_offs,
   1671                                       ManagedRegister mscratch) {
   1672   X86ManagedRegister scratch = mscratch.AsX86();
   1673   CHECK(scratch.IsCpuRegister());
   1674   Load(scratch, fr_offs, 4);
   1675   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
   1676 }
   1677 
   1678 void X86Assembler::Copy(FrameOffset dest, FrameOffset src,
   1679                         ManagedRegister mscratch,
   1680                         size_t size) {
   1681   X86ManagedRegister scratch = mscratch.AsX86();
   1682   if (scratch.IsCpuRegister() && size == 8) {
   1683     Load(scratch, src, 4);
   1684     Store(dest, scratch, 4);
   1685     Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
   1686     Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
   1687   } else {
   1688     Load(scratch, src, size);
   1689     Store(dest, scratch, size);
   1690   }
   1691 }
   1692 
   1693 void X86Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
   1694                         ManagedRegister /*scratch*/, size_t /*size*/) {
   1695   UNIMPLEMENTED(FATAL);
   1696 }
   1697 
   1698 void X86Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
   1699                         ManagedRegister scratch, size_t size) {
   1700   CHECK(scratch.IsNoRegister());
   1701   CHECK_EQ(size, 4u);
   1702   pushl(Address(ESP, src));
   1703   popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
   1704 }
   1705 
   1706 void X86Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
   1707                         ManagedRegister mscratch, size_t size) {
   1708   Register scratch = mscratch.AsX86().AsCpuRegister();
   1709   CHECK_EQ(size, 4u);
   1710   movl(scratch, Address(ESP, src_base));
   1711   movl(scratch, Address(scratch, src_offset));
   1712   movl(Address(ESP, dest), scratch);
   1713 }
   1714 
   1715 void X86Assembler::Copy(ManagedRegister dest, Offset dest_offset,
   1716                         ManagedRegister src, Offset src_offset,
   1717                         ManagedRegister scratch, size_t size) {
   1718   CHECK_EQ(size, 4u);
   1719   CHECK(scratch.IsNoRegister());
   1720   pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
   1721   popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
   1722 }
   1723 
   1724 void X86Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
   1725                         ManagedRegister mscratch, size_t size) {
   1726   Register scratch = mscratch.AsX86().AsCpuRegister();
   1727   CHECK_EQ(size, 4u);
   1728   CHECK_EQ(dest.Int32Value(), src.Int32Value());
   1729   movl(scratch, Address(ESP, src));
   1730   pushl(Address(scratch, src_offset));
   1731   popl(Address(scratch, dest_offset));
   1732 }
   1733 
   1734 void X86Assembler::MemoryBarrier(ManagedRegister) {
   1735 #if ANDROID_SMP != 0
   1736   mfence();
   1737 #endif
   1738 }
   1739 
   1740 void X86Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
   1741                                    FrameOffset handle_scope_offset,
   1742                                    ManagedRegister min_reg, bool null_allowed) {
   1743   X86ManagedRegister out_reg = mout_reg.AsX86();
   1744   X86ManagedRegister in_reg = min_reg.AsX86();
   1745   CHECK(in_reg.IsCpuRegister());
   1746   CHECK(out_reg.IsCpuRegister());
   1747   VerifyObject(in_reg, null_allowed);
   1748   if (null_allowed) {
   1749     Label null_arg;
   1750     if (!out_reg.Equals(in_reg)) {
   1751       xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   1752     }
   1753     testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   1754     j(kZero, &null_arg);
   1755     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
   1756     Bind(&null_arg);
   1757   } else {
   1758     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
   1759   }
   1760 }
   1761 
   1762 void X86Assembler::CreateHandleScopeEntry(FrameOffset out_off,
   1763                                    FrameOffset handle_scope_offset,
   1764                                    ManagedRegister mscratch,
   1765                                    bool null_allowed) {
   1766   X86ManagedRegister scratch = mscratch.AsX86();
   1767   CHECK(scratch.IsCpuRegister());
   1768   if (null_allowed) {
   1769     Label null_arg;
   1770     movl(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   1771     testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
   1772     j(kZero, &null_arg);
   1773     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   1774     Bind(&null_arg);
   1775   } else {
   1776     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   1777   }
   1778   Store(out_off, scratch, 4);
   1779 }
   1780 
   1781 // Given a handle scope entry, load the associated reference.
   1782 void X86Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
   1783                                          ManagedRegister min_reg) {
   1784   X86ManagedRegister out_reg = mout_reg.AsX86();
   1785   X86ManagedRegister in_reg = min_reg.AsX86();
   1786   CHECK(out_reg.IsCpuRegister());
   1787   CHECK(in_reg.IsCpuRegister());
   1788   Label null_arg;
   1789   if (!out_reg.Equals(in_reg)) {
   1790     xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   1791   }
   1792   testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   1793   j(kZero, &null_arg);
   1794   movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
   1795   Bind(&null_arg);
   1796 }
   1797 
   1798 void X86Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
   1799   // TODO: not validating references
   1800 }
   1801 
   1802 void X86Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
   1803   // TODO: not validating references
   1804 }
   1805 
   1806 void X86Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
   1807   X86ManagedRegister base = mbase.AsX86();
   1808   CHECK(base.IsCpuRegister());
   1809   call(Address(base.AsCpuRegister(), offset.Int32Value()));
   1810   // TODO: place reference map on call
   1811 }
   1812 
   1813 void X86Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
   1814   Register scratch = mscratch.AsX86().AsCpuRegister();
   1815   movl(scratch, Address(ESP, base));
   1816   call(Address(scratch, offset));
   1817 }
   1818 
   1819 void X86Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister /*mscratch*/) {
   1820   fs()->call(Address::Absolute(offset));
   1821 }
   1822 
   1823 void X86Assembler::GetCurrentThread(ManagedRegister tr) {
   1824   fs()->movl(tr.AsX86().AsCpuRegister(),
   1825              Address::Absolute(Thread::SelfOffset<4>()));
   1826 }
   1827 
   1828 void X86Assembler::GetCurrentThread(FrameOffset offset,
   1829                                     ManagedRegister mscratch) {
   1830   X86ManagedRegister scratch = mscratch.AsX86();
   1831   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<4>()));
   1832   movl(Address(ESP, offset), scratch.AsCpuRegister());
   1833 }
   1834 
   1835 void X86Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
   1836   X86ExceptionSlowPath* slow = new X86ExceptionSlowPath(stack_adjust);
   1837   buffer_.EnqueueSlowPath(slow);
   1838   fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<4>()), Immediate(0));
   1839   j(kNotEqual, slow->Entry());
   1840 }
   1841 
   1842 void X86ExceptionSlowPath::Emit(Assembler *sasm) {
   1843   X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
   1844 #define __ sp_asm->
   1845   __ Bind(&entry_);
   1846   // Note: the return value is dead
   1847   if (stack_adjust_ != 0) {  // Fix up the frame.
   1848     __ DecreaseFrameSize(stack_adjust_);
   1849   }
   1850   // Pass exception as argument in EAX
   1851   __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>()));
   1852   __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException)));
   1853   // this call should never return
   1854   __ int3();
   1855 #undef __
   1856 }
   1857 
   1858 }  // namespace x86
   1859 }  // namespace art
   1860