Home | History | Annotate | Download | only in x86
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "assembler_x86.h"
     18 
     19 #include "base/casts.h"
     20 #include "entrypoints/quick/quick_entrypoints.h"
     21 #include "memory_region.h"
     22 #include "thread.h"
     23 
     24 namespace art {
     25 namespace x86 {
     26 
     27 std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
     28   return os << "XMM" << static_cast<int>(reg);
     29 }
     30 
     31 std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
     32   return os << "ST" << static_cast<int>(reg);
     33 }
     34 
     35 void X86Assembler::call(Register reg) {
     36   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     37   EmitUint8(0xFF);
     38   EmitRegisterOperand(2, reg);
     39 }
     40 
     41 
     42 void X86Assembler::call(const Address& address) {
     43   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     44   EmitUint8(0xFF);
     45   EmitOperand(2, address);
     46 }
     47 
     48 
     49 void X86Assembler::call(Label* label) {
     50   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     51   EmitUint8(0xE8);
     52   static const int kSize = 5;
     53   // Offset by one because we already have emitted the opcode.
     54   EmitLabel(label, kSize - 1);
     55 }
     56 
     57 
     58 void X86Assembler::call(const ExternalLabel& label) {
     59   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     60   intptr_t call_start = buffer_.GetPosition();
     61   EmitUint8(0xE8);
     62   EmitInt32(label.address());
     63   static const intptr_t kCallExternalLabelSize = 5;
     64   DCHECK_EQ((buffer_.GetPosition() - call_start), kCallExternalLabelSize);
     65 }
     66 
     67 
     68 void X86Assembler::pushl(Register reg) {
     69   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     70   EmitUint8(0x50 + reg);
     71 }
     72 
     73 
     74 void X86Assembler::pushl(const Address& address) {
     75   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     76   EmitUint8(0xFF);
     77   EmitOperand(6, address);
     78 }
     79 
     80 
     81 void X86Assembler::pushl(const Immediate& imm) {
     82   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     83   if (imm.is_int8()) {
     84     EmitUint8(0x6A);
     85     EmitUint8(imm.value() & 0xFF);
     86   } else {
     87     EmitUint8(0x68);
     88     EmitImmediate(imm);
     89   }
     90 }
     91 
     92 
     93 void X86Assembler::popl(Register reg) {
     94   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
     95   EmitUint8(0x58 + reg);
     96 }
     97 
     98 
     99 void X86Assembler::popl(const Address& address) {
    100   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    101   EmitUint8(0x8F);
    102   EmitOperand(0, address);
    103 }
    104 
    105 
    106 void X86Assembler::movl(Register dst, const Immediate& imm) {
    107   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    108   EmitUint8(0xB8 + dst);
    109   EmitImmediate(imm);
    110 }
    111 
    112 
    113 void X86Assembler::movl(Register dst, Register src) {
    114   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    115   EmitUint8(0x89);
    116   EmitRegisterOperand(src, dst);
    117 }
    118 
    119 
    120 void X86Assembler::movl(Register dst, const Address& src) {
    121   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    122   EmitUint8(0x8B);
    123   EmitOperand(dst, src);
    124 }
    125 
    126 
    127 void X86Assembler::movl(const Address& dst, Register src) {
    128   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    129   EmitUint8(0x89);
    130   EmitOperand(src, dst);
    131 }
    132 
    133 
    134 void X86Assembler::movl(const Address& dst, const Immediate& imm) {
    135   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    136   EmitUint8(0xC7);
    137   EmitOperand(0, dst);
    138   EmitImmediate(imm);
    139 }
    140 
    141 void X86Assembler::movl(const Address& dst, Label* lbl) {
    142   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    143   EmitUint8(0xC7);
    144   EmitOperand(0, dst);
    145   EmitLabel(lbl, dst.length_ + 5);
    146 }
    147 
    148 void X86Assembler::movntl(const Address& dst, Register src) {
    149   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    150   EmitUint8(0x0F);
    151   EmitUint8(0xC3);
    152   EmitOperand(src, dst);
    153 }
    154 
    155 void X86Assembler::bswapl(Register dst) {
    156   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    157   EmitUint8(0x0F);
    158   EmitUint8(0xC8 + dst);
    159 }
    160 
    161 void X86Assembler::bsfl(Register dst, Register src) {
    162   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    163   EmitUint8(0x0F);
    164   EmitUint8(0xBC);
    165   EmitRegisterOperand(dst, src);
    166 }
    167 
    168 void X86Assembler::bsfl(Register dst, const Address& src) {
    169   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    170   EmitUint8(0x0F);
    171   EmitUint8(0xBC);
    172   EmitOperand(dst, src);
    173 }
    174 
    175 void X86Assembler::bsrl(Register dst, Register src) {
    176   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    177   EmitUint8(0x0F);
    178   EmitUint8(0xBD);
    179   EmitRegisterOperand(dst, src);
    180 }
    181 
    182 void X86Assembler::bsrl(Register dst, const Address& src) {
    183   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    184   EmitUint8(0x0F);
    185   EmitUint8(0xBD);
    186   EmitOperand(dst, src);
    187 }
    188 
    189 void X86Assembler::popcntl(Register dst, Register src) {
    190   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    191   EmitUint8(0xF3);
    192   EmitUint8(0x0F);
    193   EmitUint8(0xB8);
    194   EmitRegisterOperand(dst, src);
    195 }
    196 
    197 void X86Assembler::popcntl(Register dst, const Address& src) {
    198   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    199   EmitUint8(0xF3);
    200   EmitUint8(0x0F);
    201   EmitUint8(0xB8);
    202   EmitOperand(dst, src);
    203 }
    204 
    205 void X86Assembler::movzxb(Register dst, ByteRegister src) {
    206   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    207   EmitUint8(0x0F);
    208   EmitUint8(0xB6);
    209   EmitRegisterOperand(dst, src);
    210 }
    211 
    212 
    213 void X86Assembler::movzxb(Register dst, const Address& src) {
    214   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    215   EmitUint8(0x0F);
    216   EmitUint8(0xB6);
    217   EmitOperand(dst, src);
    218 }
    219 
    220 
    221 void X86Assembler::movsxb(Register dst, ByteRegister src) {
    222   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    223   EmitUint8(0x0F);
    224   EmitUint8(0xBE);
    225   EmitRegisterOperand(dst, src);
    226 }
    227 
    228 
    229 void X86Assembler::movsxb(Register dst, const Address& src) {
    230   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    231   EmitUint8(0x0F);
    232   EmitUint8(0xBE);
    233   EmitOperand(dst, src);
    234 }
    235 
    236 
    237 void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) {
    238   LOG(FATAL) << "Use movzxb or movsxb instead.";
    239 }
    240 
    241 
    242 void X86Assembler::movb(const Address& dst, ByteRegister src) {
    243   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    244   EmitUint8(0x88);
    245   EmitOperand(src, dst);
    246 }
    247 
    248 
    249 void X86Assembler::movb(const Address& dst, const Immediate& imm) {
    250   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    251   EmitUint8(0xC6);
    252   EmitOperand(EAX, dst);
    253   CHECK(imm.is_int8());
    254   EmitUint8(imm.value() & 0xFF);
    255 }
    256 
    257 
    258 void X86Assembler::movzxw(Register dst, Register src) {
    259   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    260   EmitUint8(0x0F);
    261   EmitUint8(0xB7);
    262   EmitRegisterOperand(dst, src);
    263 }
    264 
    265 
    266 void X86Assembler::movzxw(Register dst, const Address& src) {
    267   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    268   EmitUint8(0x0F);
    269   EmitUint8(0xB7);
    270   EmitOperand(dst, src);
    271 }
    272 
    273 
    274 void X86Assembler::movsxw(Register dst, Register src) {
    275   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    276   EmitUint8(0x0F);
    277   EmitUint8(0xBF);
    278   EmitRegisterOperand(dst, src);
    279 }
    280 
    281 
    282 void X86Assembler::movsxw(Register dst, const Address& src) {
    283   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    284   EmitUint8(0x0F);
    285   EmitUint8(0xBF);
    286   EmitOperand(dst, src);
    287 }
    288 
    289 
    290 void X86Assembler::movw(Register /*dst*/, const Address& /*src*/) {
    291   LOG(FATAL) << "Use movzxw or movsxw instead.";
    292 }
    293 
    294 
    295 void X86Assembler::movw(const Address& dst, Register src) {
    296   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    297   EmitOperandSizeOverride();
    298   EmitUint8(0x89);
    299   EmitOperand(src, dst);
    300 }
    301 
    302 
    303 void X86Assembler::movw(const Address& dst, const Immediate& imm) {
    304   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    305   EmitOperandSizeOverride();
    306   EmitUint8(0xC7);
    307   EmitOperand(0, dst);
    308   CHECK(imm.is_uint16() || imm.is_int16());
    309   EmitUint8(imm.value() & 0xFF);
    310   EmitUint8(imm.value() >> 8);
    311 }
    312 
    313 
    314 void X86Assembler::leal(Register dst, const Address& src) {
    315   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    316   EmitUint8(0x8D);
    317   EmitOperand(dst, src);
    318 }
    319 
    320 
    321 void X86Assembler::cmovl(Condition condition, Register dst, Register src) {
    322   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    323   EmitUint8(0x0F);
    324   EmitUint8(0x40 + condition);
    325   EmitRegisterOperand(dst, src);
    326 }
    327 
    328 
    329 void X86Assembler::cmovl(Condition condition, Register dst, const Address& src) {
    330   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    331   EmitUint8(0x0F);
    332   EmitUint8(0x40 + condition);
    333   EmitOperand(dst, src);
    334 }
    335 
    336 
    337 void X86Assembler::setb(Condition condition, Register dst) {
    338   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    339   EmitUint8(0x0F);
    340   EmitUint8(0x90 + condition);
    341   EmitOperand(0, Operand(dst));
    342 }
    343 
    344 
    345 void X86Assembler::movaps(XmmRegister dst, XmmRegister src) {
    346   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    347   EmitUint8(0x0F);
    348   EmitUint8(0x28);
    349   EmitXmmRegisterOperand(dst, src);
    350 }
    351 
    352 
    353 void X86Assembler::movss(XmmRegister dst, const Address& src) {
    354   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    355   EmitUint8(0xF3);
    356   EmitUint8(0x0F);
    357   EmitUint8(0x10);
    358   EmitOperand(dst, src);
    359 }
    360 
    361 
    362 void X86Assembler::movss(const Address& dst, XmmRegister src) {
    363   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    364   EmitUint8(0xF3);
    365   EmitUint8(0x0F);
    366   EmitUint8(0x11);
    367   EmitOperand(src, dst);
    368 }
    369 
    370 
    371 void X86Assembler::movss(XmmRegister dst, XmmRegister src) {
    372   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    373   EmitUint8(0xF3);
    374   EmitUint8(0x0F);
    375   EmitUint8(0x11);
    376   EmitXmmRegisterOperand(src, dst);
    377 }
    378 
    379 
    380 void X86Assembler::movd(XmmRegister dst, Register src) {
    381   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    382   EmitUint8(0x66);
    383   EmitUint8(0x0F);
    384   EmitUint8(0x6E);
    385   EmitOperand(dst, Operand(src));
    386 }
    387 
    388 
    389 void X86Assembler::movd(Register dst, XmmRegister src) {
    390   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    391   EmitUint8(0x66);
    392   EmitUint8(0x0F);
    393   EmitUint8(0x7E);
    394   EmitOperand(src, Operand(dst));
    395 }
    396 
    397 
    398 void X86Assembler::addss(XmmRegister dst, XmmRegister src) {
    399   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    400   EmitUint8(0xF3);
    401   EmitUint8(0x0F);
    402   EmitUint8(0x58);
    403   EmitXmmRegisterOperand(dst, src);
    404 }
    405 
    406 
    407 void X86Assembler::addss(XmmRegister dst, const Address& src) {
    408   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    409   EmitUint8(0xF3);
    410   EmitUint8(0x0F);
    411   EmitUint8(0x58);
    412   EmitOperand(dst, src);
    413 }
    414 
    415 
    416 void X86Assembler::subss(XmmRegister dst, XmmRegister src) {
    417   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    418   EmitUint8(0xF3);
    419   EmitUint8(0x0F);
    420   EmitUint8(0x5C);
    421   EmitXmmRegisterOperand(dst, src);
    422 }
    423 
    424 
    425 void X86Assembler::subss(XmmRegister dst, const Address& src) {
    426   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    427   EmitUint8(0xF3);
    428   EmitUint8(0x0F);
    429   EmitUint8(0x5C);
    430   EmitOperand(dst, src);
    431 }
    432 
    433 
    434 void X86Assembler::mulss(XmmRegister dst, XmmRegister src) {
    435   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    436   EmitUint8(0xF3);
    437   EmitUint8(0x0F);
    438   EmitUint8(0x59);
    439   EmitXmmRegisterOperand(dst, src);
    440 }
    441 
    442 
    443 void X86Assembler::mulss(XmmRegister dst, const Address& src) {
    444   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    445   EmitUint8(0xF3);
    446   EmitUint8(0x0F);
    447   EmitUint8(0x59);
    448   EmitOperand(dst, src);
    449 }
    450 
    451 
    452 void X86Assembler::divss(XmmRegister dst, XmmRegister src) {
    453   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    454   EmitUint8(0xF3);
    455   EmitUint8(0x0F);
    456   EmitUint8(0x5E);
    457   EmitXmmRegisterOperand(dst, src);
    458 }
    459 
    460 
    461 void X86Assembler::divss(XmmRegister dst, const Address& src) {
    462   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    463   EmitUint8(0xF3);
    464   EmitUint8(0x0F);
    465   EmitUint8(0x5E);
    466   EmitOperand(dst, src);
    467 }
    468 
    469 
    470 void X86Assembler::flds(const Address& src) {
    471   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    472   EmitUint8(0xD9);
    473   EmitOperand(0, src);
    474 }
    475 
    476 
    477 void X86Assembler::fsts(const Address& dst) {
    478   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    479   EmitUint8(0xD9);
    480   EmitOperand(2, dst);
    481 }
    482 
    483 
    484 void X86Assembler::fstps(const Address& dst) {
    485   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    486   EmitUint8(0xD9);
    487   EmitOperand(3, dst);
    488 }
    489 
    490 
    491 void X86Assembler::movsd(XmmRegister dst, const Address& src) {
    492   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    493   EmitUint8(0xF2);
    494   EmitUint8(0x0F);
    495   EmitUint8(0x10);
    496   EmitOperand(dst, src);
    497 }
    498 
    499 
    500 void X86Assembler::movsd(const Address& dst, XmmRegister src) {
    501   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    502   EmitUint8(0xF2);
    503   EmitUint8(0x0F);
    504   EmitUint8(0x11);
    505   EmitOperand(src, dst);
    506 }
    507 
    508 
    509 void X86Assembler::movsd(XmmRegister dst, XmmRegister src) {
    510   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    511   EmitUint8(0xF2);
    512   EmitUint8(0x0F);
    513   EmitUint8(0x11);
    514   EmitXmmRegisterOperand(src, dst);
    515 }
    516 
    517 
    518 void X86Assembler::movhpd(XmmRegister dst, const Address& src) {
    519   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    520   EmitUint8(0x66);
    521   EmitUint8(0x0F);
    522   EmitUint8(0x16);
    523   EmitOperand(dst, src);
    524 }
    525 
    526 
    527 void X86Assembler::movhpd(const Address& dst, XmmRegister src) {
    528   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    529   EmitUint8(0x66);
    530   EmitUint8(0x0F);
    531   EmitUint8(0x17);
    532   EmitOperand(src, dst);
    533 }
    534 
    535 
    536 void X86Assembler::psrldq(XmmRegister reg, const Immediate& shift_count) {
    537   DCHECK(shift_count.is_uint8());
    538 
    539   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    540   EmitUint8(0x66);
    541   EmitUint8(0x0F);
    542   EmitUint8(0x73);
    543   EmitXmmRegisterOperand(3, reg);
    544   EmitUint8(shift_count.value());
    545 }
    546 
    547 
    548 void X86Assembler::psrlq(XmmRegister reg, const Immediate& shift_count) {
    549   DCHECK(shift_count.is_uint8());
    550 
    551   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    552   EmitUint8(0x66);
    553   EmitUint8(0x0F);
    554   EmitUint8(0x73);
    555   EmitXmmRegisterOperand(2, reg);
    556   EmitUint8(shift_count.value());
    557 }
    558 
    559 
    560 void X86Assembler::punpckldq(XmmRegister dst, XmmRegister src) {
    561   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    562   EmitUint8(0x66);
    563   EmitUint8(0x0F);
    564   EmitUint8(0x62);
    565   EmitXmmRegisterOperand(dst, src);
    566 }
    567 
    568 
    569 void X86Assembler::addsd(XmmRegister dst, XmmRegister src) {
    570   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    571   EmitUint8(0xF2);
    572   EmitUint8(0x0F);
    573   EmitUint8(0x58);
    574   EmitXmmRegisterOperand(dst, src);
    575 }
    576 
    577 
    578 void X86Assembler::addsd(XmmRegister dst, const Address& src) {
    579   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    580   EmitUint8(0xF2);
    581   EmitUint8(0x0F);
    582   EmitUint8(0x58);
    583   EmitOperand(dst, src);
    584 }
    585 
    586 
    587 void X86Assembler::subsd(XmmRegister dst, XmmRegister src) {
    588   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    589   EmitUint8(0xF2);
    590   EmitUint8(0x0F);
    591   EmitUint8(0x5C);
    592   EmitXmmRegisterOperand(dst, src);
    593 }
    594 
    595 
    596 void X86Assembler::subsd(XmmRegister dst, const Address& src) {
    597   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    598   EmitUint8(0xF2);
    599   EmitUint8(0x0F);
    600   EmitUint8(0x5C);
    601   EmitOperand(dst, src);
    602 }
    603 
    604 
    605 void X86Assembler::mulsd(XmmRegister dst, XmmRegister src) {
    606   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    607   EmitUint8(0xF2);
    608   EmitUint8(0x0F);
    609   EmitUint8(0x59);
    610   EmitXmmRegisterOperand(dst, src);
    611 }
    612 
    613 
    614 void X86Assembler::mulsd(XmmRegister dst, const Address& src) {
    615   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    616   EmitUint8(0xF2);
    617   EmitUint8(0x0F);
    618   EmitUint8(0x59);
    619   EmitOperand(dst, src);
    620 }
    621 
    622 
    623 void X86Assembler::divsd(XmmRegister dst, XmmRegister src) {
    624   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    625   EmitUint8(0xF2);
    626   EmitUint8(0x0F);
    627   EmitUint8(0x5E);
    628   EmitXmmRegisterOperand(dst, src);
    629 }
    630 
    631 
    632 void X86Assembler::divsd(XmmRegister dst, const Address& src) {
    633   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    634   EmitUint8(0xF2);
    635   EmitUint8(0x0F);
    636   EmitUint8(0x5E);
    637   EmitOperand(dst, src);
    638 }
    639 
    640 
    641 void X86Assembler::cvtsi2ss(XmmRegister dst, Register src) {
    642   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    643   EmitUint8(0xF3);
    644   EmitUint8(0x0F);
    645   EmitUint8(0x2A);
    646   EmitOperand(dst, Operand(src));
    647 }
    648 
    649 
    650 void X86Assembler::cvtsi2sd(XmmRegister dst, Register src) {
    651   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    652   EmitUint8(0xF2);
    653   EmitUint8(0x0F);
    654   EmitUint8(0x2A);
    655   EmitOperand(dst, Operand(src));
    656 }
    657 
    658 
    659 void X86Assembler::cvtss2si(Register dst, XmmRegister src) {
    660   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    661   EmitUint8(0xF3);
    662   EmitUint8(0x0F);
    663   EmitUint8(0x2D);
    664   EmitXmmRegisterOperand(dst, src);
    665 }
    666 
    667 
    668 void X86Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
    669   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    670   EmitUint8(0xF3);
    671   EmitUint8(0x0F);
    672   EmitUint8(0x5A);
    673   EmitXmmRegisterOperand(dst, src);
    674 }
    675 
    676 
    677 void X86Assembler::cvtsd2si(Register dst, XmmRegister src) {
    678   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    679   EmitUint8(0xF2);
    680   EmitUint8(0x0F);
    681   EmitUint8(0x2D);
    682   EmitXmmRegisterOperand(dst, src);
    683 }
    684 
    685 
    686 void X86Assembler::cvttss2si(Register dst, XmmRegister src) {
    687   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    688   EmitUint8(0xF3);
    689   EmitUint8(0x0F);
    690   EmitUint8(0x2C);
    691   EmitXmmRegisterOperand(dst, src);
    692 }
    693 
    694 
    695 void X86Assembler::cvttsd2si(Register dst, XmmRegister src) {
    696   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    697   EmitUint8(0xF2);
    698   EmitUint8(0x0F);
    699   EmitUint8(0x2C);
    700   EmitXmmRegisterOperand(dst, src);
    701 }
    702 
    703 
    704 void X86Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
    705   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    706   EmitUint8(0xF2);
    707   EmitUint8(0x0F);
    708   EmitUint8(0x5A);
    709   EmitXmmRegisterOperand(dst, src);
    710 }
    711 
    712 
    713 void X86Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
    714   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    715   EmitUint8(0xF3);
    716   EmitUint8(0x0F);
    717   EmitUint8(0xE6);
    718   EmitXmmRegisterOperand(dst, src);
    719 }
    720 
    721 
    722 void X86Assembler::comiss(XmmRegister a, XmmRegister b) {
    723   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    724   EmitUint8(0x0F);
    725   EmitUint8(0x2F);
    726   EmitXmmRegisterOperand(a, b);
    727 }
    728 
    729 
    730 void X86Assembler::comisd(XmmRegister a, XmmRegister b) {
    731   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    732   EmitUint8(0x66);
    733   EmitUint8(0x0F);
    734   EmitUint8(0x2F);
    735   EmitXmmRegisterOperand(a, b);
    736 }
    737 
    738 
    739 void X86Assembler::ucomiss(XmmRegister a, XmmRegister b) {
    740   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    741   EmitUint8(0x0F);
    742   EmitUint8(0x2E);
    743   EmitXmmRegisterOperand(a, b);
    744 }
    745 
    746 
    747 void X86Assembler::ucomiss(XmmRegister a, const Address& b) {
    748   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    749   EmitUint8(0x0F);
    750   EmitUint8(0x2E);
    751   EmitOperand(a, b);
    752 }
    753 
    754 
    755 void X86Assembler::ucomisd(XmmRegister a, XmmRegister b) {
    756   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    757   EmitUint8(0x66);
    758   EmitUint8(0x0F);
    759   EmitUint8(0x2E);
    760   EmitXmmRegisterOperand(a, b);
    761 }
    762 
    763 
    764 void X86Assembler::ucomisd(XmmRegister a, const Address& b) {
    765   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    766   EmitUint8(0x66);
    767   EmitUint8(0x0F);
    768   EmitUint8(0x2E);
    769   EmitOperand(a, b);
    770 }
    771 
    772 
    773 void X86Assembler::roundsd(XmmRegister dst, XmmRegister src, const Immediate& imm) {
    774   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    775   EmitUint8(0x66);
    776   EmitUint8(0x0F);
    777   EmitUint8(0x3A);
    778   EmitUint8(0x0B);
    779   EmitXmmRegisterOperand(dst, src);
    780   EmitUint8(imm.value());
    781 }
    782 
    783 
    784 void X86Assembler::roundss(XmmRegister dst, XmmRegister src, const Immediate& imm) {
    785   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    786   EmitUint8(0x66);
    787   EmitUint8(0x0F);
    788   EmitUint8(0x3A);
    789   EmitUint8(0x0A);
    790   EmitXmmRegisterOperand(dst, src);
    791   EmitUint8(imm.value());
    792 }
    793 
    794 
    795 void X86Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
    796   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    797   EmitUint8(0xF2);
    798   EmitUint8(0x0F);
    799   EmitUint8(0x51);
    800   EmitXmmRegisterOperand(dst, src);
    801 }
    802 
    803 
    804 void X86Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
    805   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    806   EmitUint8(0xF3);
    807   EmitUint8(0x0F);
    808   EmitUint8(0x51);
    809   EmitXmmRegisterOperand(dst, src);
    810 }
    811 
    812 
    813 void X86Assembler::xorpd(XmmRegister dst, const Address& src) {
    814   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    815   EmitUint8(0x66);
    816   EmitUint8(0x0F);
    817   EmitUint8(0x57);
    818   EmitOperand(dst, src);
    819 }
    820 
    821 
    822 void X86Assembler::xorpd(XmmRegister dst, XmmRegister src) {
    823   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    824   EmitUint8(0x66);
    825   EmitUint8(0x0F);
    826   EmitUint8(0x57);
    827   EmitXmmRegisterOperand(dst, src);
    828 }
    829 
    830 
    831 void X86Assembler::andps(XmmRegister dst, XmmRegister src) {
    832   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    833   EmitUint8(0x0F);
    834   EmitUint8(0x54);
    835   EmitXmmRegisterOperand(dst, src);
    836 }
    837 
    838 
    839 void X86Assembler::andpd(XmmRegister dst, XmmRegister src) {
    840   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    841   EmitUint8(0x66);
    842   EmitUint8(0x0F);
    843   EmitUint8(0x54);
    844   EmitXmmRegisterOperand(dst, src);
    845 }
    846 
    847 
    848 void X86Assembler::orpd(XmmRegister dst, XmmRegister src) {
    849   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    850   EmitUint8(0x66);
    851   EmitUint8(0x0F);
    852   EmitUint8(0x56);
    853   EmitXmmRegisterOperand(dst, src);
    854 }
    855 
    856 
    857 void X86Assembler::xorps(XmmRegister dst, const Address& src) {
    858   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    859   EmitUint8(0x0F);
    860   EmitUint8(0x57);
    861   EmitOperand(dst, src);
    862 }
    863 
    864 
    865 void X86Assembler::orps(XmmRegister dst, XmmRegister src) {
    866   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    867   EmitUint8(0x0F);
    868   EmitUint8(0x56);
    869   EmitXmmRegisterOperand(dst, src);
    870 }
    871 
    872 
    873 void X86Assembler::xorps(XmmRegister dst, XmmRegister src) {
    874   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    875   EmitUint8(0x0F);
    876   EmitUint8(0x57);
    877   EmitXmmRegisterOperand(dst, src);
    878 }
    879 
    880 
    881 void X86Assembler::andps(XmmRegister dst, const Address& src) {
    882   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    883   EmitUint8(0x0F);
    884   EmitUint8(0x54);
    885   EmitOperand(dst, src);
    886 }
    887 
    888 
    889 void X86Assembler::andpd(XmmRegister dst, const Address& src) {
    890   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    891   EmitUint8(0x66);
    892   EmitUint8(0x0F);
    893   EmitUint8(0x54);
    894   EmitOperand(dst, src);
    895 }
    896 
    897 
    898 void X86Assembler::fldl(const Address& src) {
    899   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    900   EmitUint8(0xDD);
    901   EmitOperand(0, src);
    902 }
    903 
    904 
    905 void X86Assembler::fstl(const Address& dst) {
    906   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    907   EmitUint8(0xDD);
    908   EmitOperand(2, dst);
    909 }
    910 
    911 
    912 void X86Assembler::fstpl(const Address& dst) {
    913   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    914   EmitUint8(0xDD);
    915   EmitOperand(3, dst);
    916 }
    917 
    918 
    919 void X86Assembler::fstsw() {
    920   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    921   EmitUint8(0x9B);
    922   EmitUint8(0xDF);
    923   EmitUint8(0xE0);
    924 }
    925 
    926 
    927 void X86Assembler::fnstcw(const Address& dst) {
    928   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    929   EmitUint8(0xD9);
    930   EmitOperand(7, dst);
    931 }
    932 
    933 
    934 void X86Assembler::fldcw(const Address& src) {
    935   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    936   EmitUint8(0xD9);
    937   EmitOperand(5, src);
    938 }
    939 
    940 
    941 void X86Assembler::fistpl(const Address& dst) {
    942   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    943   EmitUint8(0xDF);
    944   EmitOperand(7, dst);
    945 }
    946 
    947 
    948 void X86Assembler::fistps(const Address& dst) {
    949   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    950   EmitUint8(0xDB);
    951   EmitOperand(3, dst);
    952 }
    953 
    954 
    955 void X86Assembler::fildl(const Address& src) {
    956   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    957   EmitUint8(0xDF);
    958   EmitOperand(5, src);
    959 }
    960 
    961 
    962 void X86Assembler::filds(const Address& src) {
    963   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    964   EmitUint8(0xDB);
    965   EmitOperand(0, src);
    966 }
    967 
    968 
    969 void X86Assembler::fincstp() {
    970   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    971   EmitUint8(0xD9);
    972   EmitUint8(0xF7);
    973 }
    974 
    975 
    976 void X86Assembler::ffree(const Immediate& index) {
    977   CHECK_LT(index.value(), 7);
    978   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    979   EmitUint8(0xDD);
    980   EmitUint8(0xC0 + index.value());
    981 }
    982 
    983 
    984 void X86Assembler::fsin() {
    985   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    986   EmitUint8(0xD9);
    987   EmitUint8(0xFE);
    988 }
    989 
    990 
    991 void X86Assembler::fcos() {
    992   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    993   EmitUint8(0xD9);
    994   EmitUint8(0xFF);
    995 }
    996 
    997 
    998 void X86Assembler::fptan() {
    999   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1000   EmitUint8(0xD9);
   1001   EmitUint8(0xF2);
   1002 }
   1003 
   1004 
   1005 void X86Assembler::fucompp() {
   1006   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1007   EmitUint8(0xDA);
   1008   EmitUint8(0xE9);
   1009 }
   1010 
   1011 
   1012 void X86Assembler::fprem() {
   1013   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1014   EmitUint8(0xD9);
   1015   EmitUint8(0xF8);
   1016 }
   1017 
   1018 
   1019 void X86Assembler::xchgl(Register dst, Register src) {
   1020   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1021   EmitUint8(0x87);
   1022   EmitRegisterOperand(dst, src);
   1023 }
   1024 
   1025 
   1026 void X86Assembler::xchgl(Register reg, const Address& address) {
   1027   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1028   EmitUint8(0x87);
   1029   EmitOperand(reg, address);
   1030 }
   1031 
   1032 
   1033 void X86Assembler::cmpw(const Address& address, const Immediate& imm) {
   1034   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1035   EmitUint8(0x66);
   1036   EmitComplex(7, address, imm);
   1037 }
   1038 
   1039 
   1040 void X86Assembler::cmpl(Register reg, const Immediate& imm) {
   1041   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1042   EmitComplex(7, Operand(reg), imm);
   1043 }
   1044 
   1045 
   1046 void X86Assembler::cmpl(Register reg0, Register reg1) {
   1047   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1048   EmitUint8(0x3B);
   1049   EmitOperand(reg0, Operand(reg1));
   1050 }
   1051 
   1052 
   1053 void X86Assembler::cmpl(Register reg, const Address& address) {
   1054   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1055   EmitUint8(0x3B);
   1056   EmitOperand(reg, address);
   1057 }
   1058 
   1059 
   1060 void X86Assembler::addl(Register dst, Register src) {
   1061   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1062   EmitUint8(0x03);
   1063   EmitRegisterOperand(dst, src);
   1064 }
   1065 
   1066 
   1067 void X86Assembler::addl(Register reg, const Address& address) {
   1068   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1069   EmitUint8(0x03);
   1070   EmitOperand(reg, address);
   1071 }
   1072 
   1073 
   1074 void X86Assembler::cmpl(const Address& address, Register reg) {
   1075   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1076   EmitUint8(0x39);
   1077   EmitOperand(reg, address);
   1078 }
   1079 
   1080 
   1081 void X86Assembler::cmpl(const Address& address, const Immediate& imm) {
   1082   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1083   EmitComplex(7, address, imm);
   1084 }
   1085 
   1086 
   1087 void X86Assembler::testl(Register reg1, Register reg2) {
   1088   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1089   EmitUint8(0x85);
   1090   EmitRegisterOperand(reg1, reg2);
   1091 }
   1092 
   1093 
   1094 void X86Assembler::testl(Register reg, const Address& address) {
   1095   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1096   EmitUint8(0x85);
   1097   EmitOperand(reg, address);
   1098 }
   1099 
   1100 
   1101 void X86Assembler::testl(Register reg, const Immediate& immediate) {
   1102   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1103   // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
   1104   // we only test the byte register to keep the encoding short.
   1105   if (immediate.is_uint8() && reg < 4) {
   1106     // Use zero-extended 8-bit immediate.
   1107     if (reg == EAX) {
   1108       EmitUint8(0xA8);
   1109     } else {
   1110       EmitUint8(0xF6);
   1111       EmitUint8(0xC0 + reg);
   1112     }
   1113     EmitUint8(immediate.value() & 0xFF);
   1114   } else if (reg == EAX) {
   1115     // Use short form if the destination is EAX.
   1116     EmitUint8(0xA9);
   1117     EmitImmediate(immediate);
   1118   } else {
   1119     EmitUint8(0xF7);
   1120     EmitOperand(0, Operand(reg));
   1121     EmitImmediate(immediate);
   1122   }
   1123 }
   1124 
   1125 
   1126 void X86Assembler::andl(Register dst, Register src) {
   1127   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1128   EmitUint8(0x23);
   1129   EmitOperand(dst, Operand(src));
   1130 }
   1131 
   1132 
   1133 void X86Assembler::andl(Register reg, const Address& address) {
   1134   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1135   EmitUint8(0x23);
   1136   EmitOperand(reg, address);
   1137 }
   1138 
   1139 
   1140 void X86Assembler::andl(Register dst, const Immediate& imm) {
   1141   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1142   EmitComplex(4, Operand(dst), imm);
   1143 }
   1144 
   1145 
   1146 void X86Assembler::orl(Register dst, Register src) {
   1147   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1148   EmitUint8(0x0B);
   1149   EmitOperand(dst, Operand(src));
   1150 }
   1151 
   1152 
   1153 void X86Assembler::orl(Register reg, const Address& address) {
   1154   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1155   EmitUint8(0x0B);
   1156   EmitOperand(reg, address);
   1157 }
   1158 
   1159 
   1160 void X86Assembler::orl(Register dst, const Immediate& imm) {
   1161   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1162   EmitComplex(1, Operand(dst), imm);
   1163 }
   1164 
   1165 
   1166 void X86Assembler::xorl(Register dst, Register src) {
   1167   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1168   EmitUint8(0x33);
   1169   EmitOperand(dst, Operand(src));
   1170 }
   1171 
   1172 
   1173 void X86Assembler::xorl(Register reg, const Address& address) {
   1174   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1175   EmitUint8(0x33);
   1176   EmitOperand(reg, address);
   1177 }
   1178 
   1179 
   1180 void X86Assembler::xorl(Register dst, const Immediate& imm) {
   1181   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1182   EmitComplex(6, Operand(dst), imm);
   1183 }
   1184 
   1185 
   1186 void X86Assembler::addl(Register reg, const Immediate& imm) {
   1187   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1188   EmitComplex(0, Operand(reg), imm);
   1189 }
   1190 
   1191 
   1192 void X86Assembler::addl(const Address& address, Register reg) {
   1193   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1194   EmitUint8(0x01);
   1195   EmitOperand(reg, address);
   1196 }
   1197 
   1198 
   1199 void X86Assembler::addl(const Address& address, const Immediate& imm) {
   1200   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1201   EmitComplex(0, address, imm);
   1202 }
   1203 
   1204 
   1205 void X86Assembler::adcl(Register reg, const Immediate& imm) {
   1206   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1207   EmitComplex(2, Operand(reg), imm);
   1208 }
   1209 
   1210 
   1211 void X86Assembler::adcl(Register dst, Register src) {
   1212   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1213   EmitUint8(0x13);
   1214   EmitOperand(dst, Operand(src));
   1215 }
   1216 
   1217 
   1218 void X86Assembler::adcl(Register dst, const Address& address) {
   1219   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1220   EmitUint8(0x13);
   1221   EmitOperand(dst, address);
   1222 }
   1223 
   1224 
   1225 void X86Assembler::subl(Register dst, Register src) {
   1226   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1227   EmitUint8(0x2B);
   1228   EmitOperand(dst, Operand(src));
   1229 }
   1230 
   1231 
   1232 void X86Assembler::subl(Register reg, const Immediate& imm) {
   1233   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1234   EmitComplex(5, Operand(reg), imm);
   1235 }
   1236 
   1237 
   1238 void X86Assembler::subl(Register reg, const Address& address) {
   1239   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1240   EmitUint8(0x2B);
   1241   EmitOperand(reg, address);
   1242 }
   1243 
   1244 
   1245 void X86Assembler::subl(const Address& address, Register reg) {
   1246   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1247   EmitUint8(0x29);
   1248   EmitOperand(reg, address);
   1249 }
   1250 
   1251 
   1252 void X86Assembler::cdq() {
   1253   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1254   EmitUint8(0x99);
   1255 }
   1256 
   1257 
   1258 void X86Assembler::idivl(Register reg) {
   1259   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1260   EmitUint8(0xF7);
   1261   EmitUint8(0xF8 | reg);
   1262 }
   1263 
   1264 
   1265 void X86Assembler::imull(Register dst, Register src) {
   1266   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1267   EmitUint8(0x0F);
   1268   EmitUint8(0xAF);
   1269   EmitOperand(dst, Operand(src));
   1270 }
   1271 
   1272 
   1273 void X86Assembler::imull(Register dst, Register src, const Immediate& imm) {
   1274   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1275   // See whether imm can be represented as a sign-extended 8bit value.
   1276   int32_t v32 = static_cast<int32_t>(imm.value());
   1277   if (IsInt<8>(v32)) {
   1278     // Sign-extension works.
   1279     EmitUint8(0x6B);
   1280     EmitOperand(dst, Operand(src));
   1281     EmitUint8(static_cast<uint8_t>(v32 & 0xFF));
   1282   } else {
   1283     // Not representable, use full immediate.
   1284     EmitUint8(0x69);
   1285     EmitOperand(dst, Operand(src));
   1286     EmitImmediate(imm);
   1287   }
   1288 }
   1289 
   1290 
   1291 void X86Assembler::imull(Register reg, const Immediate& imm) {
   1292   imull(reg, reg, imm);
   1293 }
   1294 
   1295 
   1296 void X86Assembler::imull(Register reg, const Address& address) {
   1297   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1298   EmitUint8(0x0F);
   1299   EmitUint8(0xAF);
   1300   EmitOperand(reg, address);
   1301 }
   1302 
   1303 
   1304 void X86Assembler::imull(Register reg) {
   1305   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1306   EmitUint8(0xF7);
   1307   EmitOperand(5, Operand(reg));
   1308 }
   1309 
   1310 
   1311 void X86Assembler::imull(const Address& address) {
   1312   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1313   EmitUint8(0xF7);
   1314   EmitOperand(5, address);
   1315 }
   1316 
   1317 
   1318 void X86Assembler::mull(Register reg) {
   1319   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1320   EmitUint8(0xF7);
   1321   EmitOperand(4, Operand(reg));
   1322 }
   1323 
   1324 
   1325 void X86Assembler::mull(const Address& address) {
   1326   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1327   EmitUint8(0xF7);
   1328   EmitOperand(4, address);
   1329 }
   1330 
   1331 
   1332 void X86Assembler::sbbl(Register dst, Register src) {
   1333   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1334   EmitUint8(0x1B);
   1335   EmitOperand(dst, Operand(src));
   1336 }
   1337 
   1338 
   1339 void X86Assembler::sbbl(Register reg, const Immediate& imm) {
   1340   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1341   EmitComplex(3, Operand(reg), imm);
   1342 }
   1343 
   1344 
   1345 void X86Assembler::sbbl(Register dst, const Address& address) {
   1346   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1347   EmitUint8(0x1B);
   1348   EmitOperand(dst, address);
   1349 }
   1350 
   1351 
   1352 void X86Assembler::sbbl(const Address& address, Register src) {
   1353   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1354   EmitUint8(0x19);
   1355   EmitOperand(src, address);
   1356 }
   1357 
   1358 
   1359 void X86Assembler::incl(Register reg) {
   1360   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1361   EmitUint8(0x40 + reg);
   1362 }
   1363 
   1364 
   1365 void X86Assembler::incl(const Address& address) {
   1366   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1367   EmitUint8(0xFF);
   1368   EmitOperand(0, address);
   1369 }
   1370 
   1371 
   1372 void X86Assembler::decl(Register reg) {
   1373   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1374   EmitUint8(0x48 + reg);
   1375 }
   1376 
   1377 
   1378 void X86Assembler::decl(const Address& address) {
   1379   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1380   EmitUint8(0xFF);
   1381   EmitOperand(1, address);
   1382 }
   1383 
   1384 
   1385 void X86Assembler::shll(Register reg, const Immediate& imm) {
   1386   EmitGenericShift(4, Operand(reg), imm);
   1387 }
   1388 
   1389 
   1390 void X86Assembler::shll(Register operand, Register shifter) {
   1391   EmitGenericShift(4, Operand(operand), shifter);
   1392 }
   1393 
   1394 
   1395 void X86Assembler::shll(const Address& address, const Immediate& imm) {
   1396   EmitGenericShift(4, address, imm);
   1397 }
   1398 
   1399 
   1400 void X86Assembler::shll(const Address& address, Register shifter) {
   1401   EmitGenericShift(4, address, shifter);
   1402 }
   1403 
   1404 
   1405 void X86Assembler::shrl(Register reg, const Immediate& imm) {
   1406   EmitGenericShift(5, Operand(reg), imm);
   1407 }
   1408 
   1409 
   1410 void X86Assembler::shrl(Register operand, Register shifter) {
   1411   EmitGenericShift(5, Operand(operand), shifter);
   1412 }
   1413 
   1414 
   1415 void X86Assembler::shrl(const Address& address, const Immediate& imm) {
   1416   EmitGenericShift(5, address, imm);
   1417 }
   1418 
   1419 
   1420 void X86Assembler::shrl(const Address& address, Register shifter) {
   1421   EmitGenericShift(5, address, shifter);
   1422 }
   1423 
   1424 
   1425 void X86Assembler::sarl(Register reg, const Immediate& imm) {
   1426   EmitGenericShift(7, Operand(reg), imm);
   1427 }
   1428 
   1429 
   1430 void X86Assembler::sarl(Register operand, Register shifter) {
   1431   EmitGenericShift(7, Operand(operand), shifter);
   1432 }
   1433 
   1434 
   1435 void X86Assembler::sarl(const Address& address, const Immediate& imm) {
   1436   EmitGenericShift(7, address, imm);
   1437 }
   1438 
   1439 
   1440 void X86Assembler::sarl(const Address& address, Register shifter) {
   1441   EmitGenericShift(7, address, shifter);
   1442 }
   1443 
   1444 
   1445 void X86Assembler::shld(Register dst, Register src, Register shifter) {
   1446   DCHECK_EQ(ECX, shifter);
   1447   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1448   EmitUint8(0x0F);
   1449   EmitUint8(0xA5);
   1450   EmitRegisterOperand(src, dst);
   1451 }
   1452 
   1453 
   1454 void X86Assembler::shld(Register dst, Register src, const Immediate& imm) {
   1455   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1456   EmitUint8(0x0F);
   1457   EmitUint8(0xA4);
   1458   EmitRegisterOperand(src, dst);
   1459   EmitUint8(imm.value() & 0xFF);
   1460 }
   1461 
   1462 
   1463 void X86Assembler::shrd(Register dst, Register src, Register shifter) {
   1464   DCHECK_EQ(ECX, shifter);
   1465   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1466   EmitUint8(0x0F);
   1467   EmitUint8(0xAD);
   1468   EmitRegisterOperand(src, dst);
   1469 }
   1470 
   1471 
   1472 void X86Assembler::shrd(Register dst, Register src, const Immediate& imm) {
   1473   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1474   EmitUint8(0x0F);
   1475   EmitUint8(0xAC);
   1476   EmitRegisterOperand(src, dst);
   1477   EmitUint8(imm.value() & 0xFF);
   1478 }
   1479 
   1480 
   1481 void X86Assembler::roll(Register reg, const Immediate& imm) {
   1482   EmitGenericShift(0, Operand(reg), imm);
   1483 }
   1484 
   1485 
   1486 void X86Assembler::roll(Register operand, Register shifter) {
   1487   EmitGenericShift(0, Operand(operand), shifter);
   1488 }
   1489 
   1490 
   1491 void X86Assembler::rorl(Register reg, const Immediate& imm) {
   1492   EmitGenericShift(1, Operand(reg), imm);
   1493 }
   1494 
   1495 
   1496 void X86Assembler::rorl(Register operand, Register shifter) {
   1497   EmitGenericShift(1, Operand(operand), shifter);
   1498 }
   1499 
   1500 
   1501 void X86Assembler::negl(Register reg) {
   1502   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1503   EmitUint8(0xF7);
   1504   EmitOperand(3, Operand(reg));
   1505 }
   1506 
   1507 
   1508 void X86Assembler::notl(Register reg) {
   1509   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1510   EmitUint8(0xF7);
   1511   EmitUint8(0xD0 | reg);
   1512 }
   1513 
   1514 
   1515 void X86Assembler::enter(const Immediate& imm) {
   1516   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1517   EmitUint8(0xC8);
   1518   CHECK(imm.is_uint16());
   1519   EmitUint8(imm.value() & 0xFF);
   1520   EmitUint8((imm.value() >> 8) & 0xFF);
   1521   EmitUint8(0x00);
   1522 }
   1523 
   1524 
   1525 void X86Assembler::leave() {
   1526   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1527   EmitUint8(0xC9);
   1528 }
   1529 
   1530 
   1531 void X86Assembler::ret() {
   1532   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1533   EmitUint8(0xC3);
   1534 }
   1535 
   1536 
   1537 void X86Assembler::ret(const Immediate& imm) {
   1538   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1539   EmitUint8(0xC2);
   1540   CHECK(imm.is_uint16());
   1541   EmitUint8(imm.value() & 0xFF);
   1542   EmitUint8((imm.value() >> 8) & 0xFF);
   1543 }
   1544 
   1545 
   1546 
   1547 void X86Assembler::nop() {
   1548   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1549   EmitUint8(0x90);
   1550 }
   1551 
   1552 
   1553 void X86Assembler::int3() {
   1554   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1555   EmitUint8(0xCC);
   1556 }
   1557 
   1558 
   1559 void X86Assembler::hlt() {
   1560   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1561   EmitUint8(0xF4);
   1562 }
   1563 
   1564 
   1565 void X86Assembler::j(Condition condition, Label* label) {
   1566   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1567   if (label->IsBound()) {
   1568     static const int kShortSize = 2;
   1569     static const int kLongSize = 6;
   1570     int offset = label->Position() - buffer_.Size();
   1571     CHECK_LE(offset, 0);
   1572     if (IsInt<8>(offset - kShortSize)) {
   1573       EmitUint8(0x70 + condition);
   1574       EmitUint8((offset - kShortSize) & 0xFF);
   1575     } else {
   1576       EmitUint8(0x0F);
   1577       EmitUint8(0x80 + condition);
   1578       EmitInt32(offset - kLongSize);
   1579     }
   1580   } else {
   1581     EmitUint8(0x0F);
   1582     EmitUint8(0x80 + condition);
   1583     EmitLabelLink(label);
   1584   }
   1585 }
   1586 
   1587 
   1588 void X86Assembler::j(Condition condition, NearLabel* label) {
   1589   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1590   if (label->IsBound()) {
   1591     static const int kShortSize = 2;
   1592     int offset = label->Position() - buffer_.Size();
   1593     CHECK_LE(offset, 0);
   1594     CHECK(IsInt<8>(offset - kShortSize));
   1595     EmitUint8(0x70 + condition);
   1596     EmitUint8((offset - kShortSize) & 0xFF);
   1597   } else {
   1598     EmitUint8(0x70 + condition);
   1599     EmitLabelLink(label);
   1600   }
   1601 }
   1602 
   1603 
   1604 void X86Assembler::jecxz(NearLabel* label) {
   1605   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1606   if (label->IsBound()) {
   1607     static const int kShortSize = 2;
   1608     int offset = label->Position() - buffer_.Size();
   1609     CHECK_LE(offset, 0);
   1610     CHECK(IsInt<8>(offset - kShortSize));
   1611     EmitUint8(0xE3);
   1612     EmitUint8((offset - kShortSize) & 0xFF);
   1613   } else {
   1614     EmitUint8(0xE3);
   1615     EmitLabelLink(label);
   1616   }
   1617 }
   1618 
   1619 
   1620 void X86Assembler::jmp(Register reg) {
   1621   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1622   EmitUint8(0xFF);
   1623   EmitRegisterOperand(4, reg);
   1624 }
   1625 
   1626 void X86Assembler::jmp(const Address& address) {
   1627   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1628   EmitUint8(0xFF);
   1629   EmitOperand(4, address);
   1630 }
   1631 
   1632 void X86Assembler::jmp(Label* label) {
   1633   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1634   if (label->IsBound()) {
   1635     static const int kShortSize = 2;
   1636     static const int kLongSize = 5;
   1637     int offset = label->Position() - buffer_.Size();
   1638     CHECK_LE(offset, 0);
   1639     if (IsInt<8>(offset - kShortSize)) {
   1640       EmitUint8(0xEB);
   1641       EmitUint8((offset - kShortSize) & 0xFF);
   1642     } else {
   1643       EmitUint8(0xE9);
   1644       EmitInt32(offset - kLongSize);
   1645     }
   1646   } else {
   1647     EmitUint8(0xE9);
   1648     EmitLabelLink(label);
   1649   }
   1650 }
   1651 
   1652 
   1653 void X86Assembler::jmp(NearLabel* label) {
   1654   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1655   if (label->IsBound()) {
   1656     static const int kShortSize = 2;
   1657     int offset = label->Position() - buffer_.Size();
   1658     CHECK_LE(offset, 0);
   1659     CHECK(IsInt<8>(offset - kShortSize));
   1660     EmitUint8(0xEB);
   1661     EmitUint8((offset - kShortSize) & 0xFF);
   1662   } else {
   1663     EmitUint8(0xEB);
   1664     EmitLabelLink(label);
   1665   }
   1666 }
   1667 
   1668 
   1669 void X86Assembler::repne_scasw() {
   1670   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1671   EmitUint8(0x66);
   1672   EmitUint8(0xF2);
   1673   EmitUint8(0xAF);
   1674 }
   1675 
   1676 
   1677 void X86Assembler::repe_cmpsw() {
   1678   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1679   EmitUint8(0x66);
   1680   EmitUint8(0xF3);
   1681   EmitUint8(0xA7);
   1682 }
   1683 
   1684 
   1685 void X86Assembler::repe_cmpsl() {
   1686   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1687   EmitUint8(0xF3);
   1688   EmitUint8(0xA7);
   1689 }
   1690 
   1691 
   1692 void X86Assembler::rep_movsw() {
   1693   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1694   EmitUint8(0x66);
   1695   EmitUint8(0xF3);
   1696   EmitUint8(0xA5);
   1697 }
   1698 
   1699 
   1700 X86Assembler* X86Assembler::lock() {
   1701   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1702   EmitUint8(0xF0);
   1703   return this;
   1704 }
   1705 
   1706 
   1707 void X86Assembler::cmpxchgl(const Address& address, Register reg) {
   1708   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1709   EmitUint8(0x0F);
   1710   EmitUint8(0xB1);
   1711   EmitOperand(reg, address);
   1712 }
   1713 
   1714 
   1715 void X86Assembler::cmpxchg8b(const Address& address) {
   1716   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1717   EmitUint8(0x0F);
   1718   EmitUint8(0xC7);
   1719   EmitOperand(1, address);
   1720 }
   1721 
   1722 
   1723 void X86Assembler::mfence() {
   1724   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1725   EmitUint8(0x0F);
   1726   EmitUint8(0xAE);
   1727   EmitUint8(0xF0);
   1728 }
   1729 
   1730 X86Assembler* X86Assembler::fs() {
   1731   // TODO: fs is a prefix and not an instruction
   1732   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1733   EmitUint8(0x64);
   1734   return this;
   1735 }
   1736 
   1737 X86Assembler* X86Assembler::gs() {
   1738   // TODO: fs is a prefix and not an instruction
   1739   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1740   EmitUint8(0x65);
   1741   return this;
   1742 }
   1743 
   1744 void X86Assembler::AddImmediate(Register reg, const Immediate& imm) {
   1745   int value = imm.value();
   1746   if (value > 0) {
   1747     if (value == 1) {
   1748       incl(reg);
   1749     } else if (value != 0) {
   1750       addl(reg, imm);
   1751     }
   1752   } else if (value < 0) {
   1753     value = -value;
   1754     if (value == 1) {
   1755       decl(reg);
   1756     } else if (value != 0) {
   1757       subl(reg, Immediate(value));
   1758     }
   1759   }
   1760 }
   1761 
   1762 
   1763 void X86Assembler::LoadLongConstant(XmmRegister dst, int64_t value) {
   1764   // TODO: Need to have a code constants table.
   1765   pushl(Immediate(High32Bits(value)));
   1766   pushl(Immediate(Low32Bits(value)));
   1767   movsd(dst, Address(ESP, 0));
   1768   addl(ESP, Immediate(2 * sizeof(int32_t)));
   1769 }
   1770 
   1771 
   1772 void X86Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
   1773   // TODO: Need to have a code constants table.
   1774   int64_t constant = bit_cast<int64_t, double>(value);
   1775   LoadLongConstant(dst, constant);
   1776 }
   1777 
   1778 
   1779 void X86Assembler::Align(int alignment, int offset) {
   1780   CHECK(IsPowerOfTwo(alignment));
   1781   // Emit nop instruction until the real position is aligned.
   1782   while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
   1783     nop();
   1784   }
   1785 }
   1786 
   1787 
   1788 void X86Assembler::Bind(Label* label) {
   1789   int bound = buffer_.Size();
   1790   CHECK(!label->IsBound());  // Labels can only be bound once.
   1791   while (label->IsLinked()) {
   1792     int position = label->LinkPosition();
   1793     int next = buffer_.Load<int32_t>(position);
   1794     buffer_.Store<int32_t>(position, bound - (position + 4));
   1795     label->position_ = next;
   1796   }
   1797   label->BindTo(bound);
   1798 }
   1799 
   1800 
   1801 void X86Assembler::Bind(NearLabel* label) {
   1802   int bound = buffer_.Size();
   1803   CHECK(!label->IsBound());  // Labels can only be bound once.
   1804   while (label->IsLinked()) {
   1805     int position = label->LinkPosition();
   1806     uint8_t delta = buffer_.Load<uint8_t>(position);
   1807     int offset = bound - (position + 1);
   1808     CHECK(IsInt<8>(offset));
   1809     buffer_.Store<int8_t>(position, offset);
   1810     label->position_ = delta != 0u ? label->position_ - delta : 0;
   1811   }
   1812   label->BindTo(bound);
   1813 }
   1814 
   1815 
   1816 void X86Assembler::EmitOperand(int reg_or_opcode, const Operand& operand) {
   1817   CHECK_GE(reg_or_opcode, 0);
   1818   CHECK_LT(reg_or_opcode, 8);
   1819   const int length = operand.length_;
   1820   CHECK_GT(length, 0);
   1821   // Emit the ModRM byte updated with the given reg value.
   1822   CHECK_EQ(operand.encoding_[0] & 0x38, 0);
   1823   EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
   1824   // Emit the rest of the encoded operand.
   1825   for (int i = 1; i < length; i++) {
   1826     EmitUint8(operand.encoding_[i]);
   1827   }
   1828   AssemblerFixup* fixup = operand.GetFixup();
   1829   if (fixup != nullptr) {
   1830     EmitFixup(fixup);
   1831   }
   1832 }
   1833 
   1834 
   1835 void X86Assembler::EmitImmediate(const Immediate& imm) {
   1836   EmitInt32(imm.value());
   1837 }
   1838 
   1839 
   1840 void X86Assembler::EmitComplex(int reg_or_opcode,
   1841                                const Operand& operand,
   1842                                const Immediate& immediate) {
   1843   CHECK_GE(reg_or_opcode, 0);
   1844   CHECK_LT(reg_or_opcode, 8);
   1845   if (immediate.is_int8()) {
   1846     // Use sign-extended 8-bit immediate.
   1847     EmitUint8(0x83);
   1848     EmitOperand(reg_or_opcode, operand);
   1849     EmitUint8(immediate.value() & 0xFF);
   1850   } else if (operand.IsRegister(EAX)) {
   1851     // Use short form if the destination is eax.
   1852     EmitUint8(0x05 + (reg_or_opcode << 3));
   1853     EmitImmediate(immediate);
   1854   } else {
   1855     EmitUint8(0x81);
   1856     EmitOperand(reg_or_opcode, operand);
   1857     EmitImmediate(immediate);
   1858   }
   1859 }
   1860 
   1861 
   1862 void X86Assembler::EmitLabel(Label* label, int instruction_size) {
   1863   if (label->IsBound()) {
   1864     int offset = label->Position() - buffer_.Size();
   1865     CHECK_LE(offset, 0);
   1866     EmitInt32(offset - instruction_size);
   1867   } else {
   1868     EmitLabelLink(label);
   1869   }
   1870 }
   1871 
   1872 
   1873 void X86Assembler::EmitLabelLink(Label* label) {
   1874   CHECK(!label->IsBound());
   1875   int position = buffer_.Size();
   1876   EmitInt32(label->position_);
   1877   label->LinkTo(position);
   1878 }
   1879 
   1880 
   1881 void X86Assembler::EmitLabelLink(NearLabel* label) {
   1882   CHECK(!label->IsBound());
   1883   int position = buffer_.Size();
   1884   if (label->IsLinked()) {
   1885     // Save the delta in the byte that we have to play with.
   1886     uint32_t delta = position - label->LinkPosition();
   1887     CHECK(IsUint<8>(delta));
   1888     EmitUint8(delta & 0xFF);
   1889   } else {
   1890     EmitUint8(0);
   1891   }
   1892   label->LinkTo(position);
   1893 }
   1894 
   1895 
   1896 void X86Assembler::EmitGenericShift(int reg_or_opcode,
   1897                                     const Operand& operand,
   1898                                     const Immediate& imm) {
   1899   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1900   CHECK(imm.is_int8());
   1901   if (imm.value() == 1) {
   1902     EmitUint8(0xD1);
   1903     EmitOperand(reg_or_opcode, operand);
   1904   } else {
   1905     EmitUint8(0xC1);
   1906     EmitOperand(reg_or_opcode, operand);
   1907     EmitUint8(imm.value() & 0xFF);
   1908   }
   1909 }
   1910 
   1911 
   1912 void X86Assembler::EmitGenericShift(int reg_or_opcode,
   1913                                     const Operand& operand,
   1914                                     Register shifter) {
   1915   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   1916   CHECK_EQ(shifter, ECX);
   1917   EmitUint8(0xD3);
   1918   EmitOperand(reg_or_opcode, operand);
   1919 }
   1920 
   1921 static dwarf::Reg DWARFReg(Register reg) {
   1922   return dwarf::Reg::X86Core(static_cast<int>(reg));
   1923 }
   1924 
   1925 constexpr size_t kFramePointerSize = 4;
   1926 
   1927 void X86Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
   1928                               const std::vector<ManagedRegister>& spill_regs,
   1929                               const ManagedRegisterEntrySpills& entry_spills) {
   1930   DCHECK_EQ(buffer_.Size(), 0U);  // Nothing emitted yet.
   1931   cfi_.SetCurrentCFAOffset(4);  // Return address on stack.
   1932   CHECK_ALIGNED(frame_size, kStackAlignment);
   1933   int gpr_count = 0;
   1934   for (int i = spill_regs.size() - 1; i >= 0; --i) {
   1935     Register spill = spill_regs.at(i).AsX86().AsCpuRegister();
   1936     pushl(spill);
   1937     gpr_count++;
   1938     cfi_.AdjustCFAOffset(kFramePointerSize);
   1939     cfi_.RelOffset(DWARFReg(spill), 0);
   1940   }
   1941 
   1942   // return address then method on stack.
   1943   int32_t adjust = frame_size - gpr_count * kFramePointerSize -
   1944       kFramePointerSize /*method*/ -
   1945       kFramePointerSize /*return address*/;
   1946   addl(ESP, Immediate(-adjust));
   1947   cfi_.AdjustCFAOffset(adjust);
   1948   pushl(method_reg.AsX86().AsCpuRegister());
   1949   cfi_.AdjustCFAOffset(kFramePointerSize);
   1950   DCHECK_EQ(static_cast<size_t>(cfi_.GetCurrentCFAOffset()), frame_size);
   1951 
   1952   for (size_t i = 0; i < entry_spills.size(); ++i) {
   1953     ManagedRegisterSpill spill = entry_spills.at(i);
   1954     if (spill.AsX86().IsCpuRegister()) {
   1955       int offset = frame_size + spill.getSpillOffset();
   1956       movl(Address(ESP, offset), spill.AsX86().AsCpuRegister());
   1957     } else {
   1958       DCHECK(spill.AsX86().IsXmmRegister());
   1959       if (spill.getSize() == 8) {
   1960         movsd(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister());
   1961       } else {
   1962         CHECK_EQ(spill.getSize(), 4);
   1963         movss(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister());
   1964       }
   1965     }
   1966   }
   1967 }
   1968 
   1969 void X86Assembler::RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& spill_regs) {
   1970   CHECK_ALIGNED(frame_size, kStackAlignment);
   1971   cfi_.RememberState();
   1972   // -kFramePointerSize for ArtMethod*.
   1973   int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize;
   1974   addl(ESP, Immediate(adjust));
   1975   cfi_.AdjustCFAOffset(-adjust);
   1976   for (size_t i = 0; i < spill_regs.size(); ++i) {
   1977     Register spill = spill_regs.at(i).AsX86().AsCpuRegister();
   1978     popl(spill);
   1979     cfi_.AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
   1980     cfi_.Restore(DWARFReg(spill));
   1981   }
   1982   ret();
   1983   // The CFI should be restored for any code that follows the exit block.
   1984   cfi_.RestoreState();
   1985   cfi_.DefCFAOffset(frame_size);
   1986 }
   1987 
   1988 void X86Assembler::IncreaseFrameSize(size_t adjust) {
   1989   CHECK_ALIGNED(adjust, kStackAlignment);
   1990   addl(ESP, Immediate(-adjust));
   1991   cfi_.AdjustCFAOffset(adjust);
   1992 }
   1993 
   1994 void X86Assembler::DecreaseFrameSize(size_t adjust) {
   1995   CHECK_ALIGNED(adjust, kStackAlignment);
   1996   addl(ESP, Immediate(adjust));
   1997   cfi_.AdjustCFAOffset(-adjust);
   1998 }
   1999 
   2000 void X86Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
   2001   X86ManagedRegister src = msrc.AsX86();
   2002   if (src.IsNoRegister()) {
   2003     CHECK_EQ(0u, size);
   2004   } else if (src.IsCpuRegister()) {
   2005     CHECK_EQ(4u, size);
   2006     movl(Address(ESP, offs), src.AsCpuRegister());
   2007   } else if (src.IsRegisterPair()) {
   2008     CHECK_EQ(8u, size);
   2009     movl(Address(ESP, offs), src.AsRegisterPairLow());
   2010     movl(Address(ESP, FrameOffset(offs.Int32Value()+4)),
   2011          src.AsRegisterPairHigh());
   2012   } else if (src.IsX87Register()) {
   2013     if (size == 4) {
   2014       fstps(Address(ESP, offs));
   2015     } else {
   2016       fstpl(Address(ESP, offs));
   2017     }
   2018   } else {
   2019     CHECK(src.IsXmmRegister());
   2020     if (size == 4) {
   2021       movss(Address(ESP, offs), src.AsXmmRegister());
   2022     } else {
   2023       movsd(Address(ESP, offs), src.AsXmmRegister());
   2024     }
   2025   }
   2026 }
   2027 
   2028 void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
   2029   X86ManagedRegister src = msrc.AsX86();
   2030   CHECK(src.IsCpuRegister());
   2031   movl(Address(ESP, dest), src.AsCpuRegister());
   2032 }
   2033 
   2034 void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
   2035   X86ManagedRegister src = msrc.AsX86();
   2036   CHECK(src.IsCpuRegister());
   2037   movl(Address(ESP, dest), src.AsCpuRegister());
   2038 }
   2039 
   2040 void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
   2041                                          ManagedRegister) {
   2042   movl(Address(ESP, dest), Immediate(imm));
   2043 }
   2044 
   2045 void X86Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
   2046                                           ManagedRegister) {
   2047   fs()->movl(Address::Absolute(dest), Immediate(imm));
   2048 }
   2049 
   2050 void X86Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
   2051                                             FrameOffset fr_offs,
   2052                                             ManagedRegister mscratch) {
   2053   X86ManagedRegister scratch = mscratch.AsX86();
   2054   CHECK(scratch.IsCpuRegister());
   2055   leal(scratch.AsCpuRegister(), Address(ESP, fr_offs));
   2056   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
   2057 }
   2058 
   2059 void X86Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
   2060   fs()->movl(Address::Absolute(thr_offs), ESP);
   2061 }
   2062 
   2063 void X86Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
   2064                                  FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
   2065   UNIMPLEMENTED(FATAL);  // this case only currently exists for ARM
   2066 }
   2067 
   2068 void X86Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
   2069   X86ManagedRegister dest = mdest.AsX86();
   2070   if (dest.IsNoRegister()) {
   2071     CHECK_EQ(0u, size);
   2072   } else if (dest.IsCpuRegister()) {
   2073     CHECK_EQ(4u, size);
   2074     movl(dest.AsCpuRegister(), Address(ESP, src));
   2075   } else if (dest.IsRegisterPair()) {
   2076     CHECK_EQ(8u, size);
   2077     movl(dest.AsRegisterPairLow(), Address(ESP, src));
   2078     movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
   2079   } else if (dest.IsX87Register()) {
   2080     if (size == 4) {
   2081       flds(Address(ESP, src));
   2082     } else {
   2083       fldl(Address(ESP, src));
   2084     }
   2085   } else {
   2086     CHECK(dest.IsXmmRegister());
   2087     if (size == 4) {
   2088       movss(dest.AsXmmRegister(), Address(ESP, src));
   2089     } else {
   2090       movsd(dest.AsXmmRegister(), Address(ESP, src));
   2091     }
   2092   }
   2093 }
   2094 
   2095 void X86Assembler::LoadFromThread32(ManagedRegister mdest, ThreadOffset<4> src, size_t size) {
   2096   X86ManagedRegister dest = mdest.AsX86();
   2097   if (dest.IsNoRegister()) {
   2098     CHECK_EQ(0u, size);
   2099   } else if (dest.IsCpuRegister()) {
   2100     CHECK_EQ(4u, size);
   2101     fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
   2102   } else if (dest.IsRegisterPair()) {
   2103     CHECK_EQ(8u, size);
   2104     fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
   2105     fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset<4>(src.Int32Value()+4)));
   2106   } else if (dest.IsX87Register()) {
   2107     if (size == 4) {
   2108       fs()->flds(Address::Absolute(src));
   2109     } else {
   2110       fs()->fldl(Address::Absolute(src));
   2111     }
   2112   } else {
   2113     CHECK(dest.IsXmmRegister());
   2114     if (size == 4) {
   2115       fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
   2116     } else {
   2117       fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
   2118     }
   2119   }
   2120 }
   2121 
   2122 void X86Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
   2123   X86ManagedRegister dest = mdest.AsX86();
   2124   CHECK(dest.IsCpuRegister());
   2125   movl(dest.AsCpuRegister(), Address(ESP, src));
   2126 }
   2127 
   2128 void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
   2129                            bool unpoison_reference) {
   2130   X86ManagedRegister dest = mdest.AsX86();
   2131   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   2132   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
   2133   if (unpoison_reference) {
   2134     MaybeUnpoisonHeapReference(dest.AsCpuRegister());
   2135   }
   2136 }
   2137 
   2138 void X86Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
   2139                               Offset offs) {
   2140   X86ManagedRegister dest = mdest.AsX86();
   2141   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
   2142   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
   2143 }
   2144 
   2145 void X86Assembler::LoadRawPtrFromThread32(ManagedRegister mdest,
   2146                                         ThreadOffset<4> offs) {
   2147   X86ManagedRegister dest = mdest.AsX86();
   2148   CHECK(dest.IsCpuRegister());
   2149   fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
   2150 }
   2151 
   2152 void X86Assembler::SignExtend(ManagedRegister mreg, size_t size) {
   2153   X86ManagedRegister reg = mreg.AsX86();
   2154   CHECK(size == 1 || size == 2) << size;
   2155   CHECK(reg.IsCpuRegister()) << reg;
   2156   if (size == 1) {
   2157     movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
   2158   } else {
   2159     movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   2160   }
   2161 }
   2162 
   2163 void X86Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
   2164   X86ManagedRegister reg = mreg.AsX86();
   2165   CHECK(size == 1 || size == 2) << size;
   2166   CHECK(reg.IsCpuRegister()) << reg;
   2167   if (size == 1) {
   2168     movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
   2169   } else {
   2170     movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
   2171   }
   2172 }
   2173 
   2174 void X86Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
   2175   X86ManagedRegister dest = mdest.AsX86();
   2176   X86ManagedRegister src = msrc.AsX86();
   2177   if (!dest.Equals(src)) {
   2178     if (dest.IsCpuRegister() && src.IsCpuRegister()) {
   2179       movl(dest.AsCpuRegister(), src.AsCpuRegister());
   2180     } else if (src.IsX87Register() && dest.IsXmmRegister()) {
   2181       // Pass via stack and pop X87 register
   2182       subl(ESP, Immediate(16));
   2183       if (size == 4) {
   2184         CHECK_EQ(src.AsX87Register(), ST0);
   2185         fstps(Address(ESP, 0));
   2186         movss(dest.AsXmmRegister(), Address(ESP, 0));
   2187       } else {
   2188         CHECK_EQ(src.AsX87Register(), ST0);
   2189         fstpl(Address(ESP, 0));
   2190         movsd(dest.AsXmmRegister(), Address(ESP, 0));
   2191       }
   2192       addl(ESP, Immediate(16));
   2193     } else {
   2194       // TODO: x87, SSE
   2195       UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
   2196     }
   2197   }
   2198 }
   2199 
   2200 void X86Assembler::CopyRef(FrameOffset dest, FrameOffset src,
   2201                            ManagedRegister mscratch) {
   2202   X86ManagedRegister scratch = mscratch.AsX86();
   2203   CHECK(scratch.IsCpuRegister());
   2204   movl(scratch.AsCpuRegister(), Address(ESP, src));
   2205   movl(Address(ESP, dest), scratch.AsCpuRegister());
   2206 }
   2207 
   2208 void X86Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs,
   2209                                         ThreadOffset<4> thr_offs,
   2210                                         ManagedRegister mscratch) {
   2211   X86ManagedRegister scratch = mscratch.AsX86();
   2212   CHECK(scratch.IsCpuRegister());
   2213   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(thr_offs));
   2214   Store(fr_offs, scratch, 4);
   2215 }
   2216 
   2217 void X86Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs,
   2218                                       FrameOffset fr_offs,
   2219                                       ManagedRegister mscratch) {
   2220   X86ManagedRegister scratch = mscratch.AsX86();
   2221   CHECK(scratch.IsCpuRegister());
   2222   Load(scratch, fr_offs, 4);
   2223   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
   2224 }
   2225 
   2226 void X86Assembler::Copy(FrameOffset dest, FrameOffset src,
   2227                         ManagedRegister mscratch,
   2228                         size_t size) {
   2229   X86ManagedRegister scratch = mscratch.AsX86();
   2230   if (scratch.IsCpuRegister() && size == 8) {
   2231     Load(scratch, src, 4);
   2232     Store(dest, scratch, 4);
   2233     Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
   2234     Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
   2235   } else {
   2236     Load(scratch, src, size);
   2237     Store(dest, scratch, size);
   2238   }
   2239 }
   2240 
   2241 void X86Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
   2242                         ManagedRegister /*scratch*/, size_t /*size*/) {
   2243   UNIMPLEMENTED(FATAL);
   2244 }
   2245 
   2246 void X86Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
   2247                         ManagedRegister scratch, size_t size) {
   2248   CHECK(scratch.IsNoRegister());
   2249   CHECK_EQ(size, 4u);
   2250   pushl(Address(ESP, src));
   2251   popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
   2252 }
   2253 
   2254 void X86Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
   2255                         ManagedRegister mscratch, size_t size) {
   2256   Register scratch = mscratch.AsX86().AsCpuRegister();
   2257   CHECK_EQ(size, 4u);
   2258   movl(scratch, Address(ESP, src_base));
   2259   movl(scratch, Address(scratch, src_offset));
   2260   movl(Address(ESP, dest), scratch);
   2261 }
   2262 
   2263 void X86Assembler::Copy(ManagedRegister dest, Offset dest_offset,
   2264                         ManagedRegister src, Offset src_offset,
   2265                         ManagedRegister scratch, size_t size) {
   2266   CHECK_EQ(size, 4u);
   2267   CHECK(scratch.IsNoRegister());
   2268   pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
   2269   popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
   2270 }
   2271 
   2272 void X86Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
   2273                         ManagedRegister mscratch, size_t size) {
   2274   Register scratch = mscratch.AsX86().AsCpuRegister();
   2275   CHECK_EQ(size, 4u);
   2276   CHECK_EQ(dest.Int32Value(), src.Int32Value());
   2277   movl(scratch, Address(ESP, src));
   2278   pushl(Address(scratch, src_offset));
   2279   popl(Address(scratch, dest_offset));
   2280 }
   2281 
   2282 void X86Assembler::MemoryBarrier(ManagedRegister) {
   2283   mfence();
   2284 }
   2285 
   2286 void X86Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
   2287                                    FrameOffset handle_scope_offset,
   2288                                    ManagedRegister min_reg, bool null_allowed) {
   2289   X86ManagedRegister out_reg = mout_reg.AsX86();
   2290   X86ManagedRegister in_reg = min_reg.AsX86();
   2291   CHECK(in_reg.IsCpuRegister());
   2292   CHECK(out_reg.IsCpuRegister());
   2293   VerifyObject(in_reg, null_allowed);
   2294   if (null_allowed) {
   2295     Label null_arg;
   2296     if (!out_reg.Equals(in_reg)) {
   2297       xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   2298     }
   2299     testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   2300     j(kZero, &null_arg);
   2301     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
   2302     Bind(&null_arg);
   2303   } else {
   2304     leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset));
   2305   }
   2306 }
   2307 
   2308 void X86Assembler::CreateHandleScopeEntry(FrameOffset out_off,
   2309                                    FrameOffset handle_scope_offset,
   2310                                    ManagedRegister mscratch,
   2311                                    bool null_allowed) {
   2312   X86ManagedRegister scratch = mscratch.AsX86();
   2313   CHECK(scratch.IsCpuRegister());
   2314   if (null_allowed) {
   2315     Label null_arg;
   2316     movl(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   2317     testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
   2318     j(kZero, &null_arg);
   2319     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   2320     Bind(&null_arg);
   2321   } else {
   2322     leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset));
   2323   }
   2324   Store(out_off, scratch, 4);
   2325 }
   2326 
   2327 // Given a handle scope entry, load the associated reference.
   2328 void X86Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
   2329                                          ManagedRegister min_reg) {
   2330   X86ManagedRegister out_reg = mout_reg.AsX86();
   2331   X86ManagedRegister in_reg = min_reg.AsX86();
   2332   CHECK(out_reg.IsCpuRegister());
   2333   CHECK(in_reg.IsCpuRegister());
   2334   Label null_arg;
   2335   if (!out_reg.Equals(in_reg)) {
   2336     xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
   2337   }
   2338   testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
   2339   j(kZero, &null_arg);
   2340   movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
   2341   Bind(&null_arg);
   2342 }
   2343 
   2344 void X86Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
   2345   // TODO: not validating references
   2346 }
   2347 
   2348 void X86Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
   2349   // TODO: not validating references
   2350 }
   2351 
   2352 void X86Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
   2353   X86ManagedRegister base = mbase.AsX86();
   2354   CHECK(base.IsCpuRegister());
   2355   call(Address(base.AsCpuRegister(), offset.Int32Value()));
   2356   // TODO: place reference map on call
   2357 }
   2358 
   2359 void X86Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
   2360   Register scratch = mscratch.AsX86().AsCpuRegister();
   2361   movl(scratch, Address(ESP, base));
   2362   call(Address(scratch, offset));
   2363 }
   2364 
   2365 void X86Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister /*mscratch*/) {
   2366   fs()->call(Address::Absolute(offset));
   2367 }
   2368 
   2369 void X86Assembler::GetCurrentThread(ManagedRegister tr) {
   2370   fs()->movl(tr.AsX86().AsCpuRegister(),
   2371              Address::Absolute(Thread::SelfOffset<4>()));
   2372 }
   2373 
   2374 void X86Assembler::GetCurrentThread(FrameOffset offset,
   2375                                     ManagedRegister mscratch) {
   2376   X86ManagedRegister scratch = mscratch.AsX86();
   2377   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<4>()));
   2378   movl(Address(ESP, offset), scratch.AsCpuRegister());
   2379 }
   2380 
   2381 void X86Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
   2382   X86ExceptionSlowPath* slow = new (GetArena()) X86ExceptionSlowPath(stack_adjust);
   2383   buffer_.EnqueueSlowPath(slow);
   2384   fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<4>()), Immediate(0));
   2385   j(kNotEqual, slow->Entry());
   2386 }
   2387 
   2388 void X86ExceptionSlowPath::Emit(Assembler *sasm) {
   2389   X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
   2390 #define __ sp_asm->
   2391   __ Bind(&entry_);
   2392   // Note: the return value is dead
   2393   if (stack_adjust_ != 0) {  // Fix up the frame.
   2394     __ DecreaseFrameSize(stack_adjust_);
   2395   }
   2396   // Pass exception as argument in EAX
   2397   __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>()));
   2398   __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException)));
   2399   // this call should never return
   2400   __ int3();
   2401 #undef __
   2402 }
   2403 
   2404 void X86Assembler::AddConstantArea() {
   2405   ArrayRef<const int32_t> area = constant_area_.GetBuffer();
   2406   // Generate the data for the literal area.
   2407   for (size_t i = 0, e = area.size(); i < e; i++) {
   2408     AssemblerBuffer::EnsureCapacity ensured(&buffer_);
   2409     EmitInt32(area[i]);
   2410   }
   2411 }
   2412 
   2413 size_t ConstantArea::AppendInt32(int32_t v) {
   2414   size_t result = buffer_.size() * elem_size_;
   2415   buffer_.push_back(v);
   2416   return result;
   2417 }
   2418 
   2419 size_t ConstantArea::AddInt32(int32_t v) {
   2420   for (size_t i = 0, e = buffer_.size(); i < e; i++) {
   2421     if (v == buffer_[i]) {
   2422       return i * elem_size_;
   2423     }
   2424   }
   2425 
   2426   // Didn't match anything.
   2427   return AppendInt32(v);
   2428 }
   2429 
   2430 size_t ConstantArea::AddInt64(int64_t v) {
   2431   int32_t v_low = Low32Bits(v);
   2432   int32_t v_high = High32Bits(v);
   2433   if (buffer_.size() > 1) {
   2434     // Ensure we don't pass the end of the buffer.
   2435     for (size_t i = 0, e = buffer_.size() - 1; i < e; i++) {
   2436       if (v_low == buffer_[i] && v_high == buffer_[i + 1]) {
   2437         return i * elem_size_;
   2438       }
   2439     }
   2440   }
   2441 
   2442   // Didn't match anything.
   2443   size_t result = buffer_.size() * elem_size_;
   2444   buffer_.push_back(v_low);
   2445   buffer_.push_back(v_high);
   2446   return result;
   2447 }
   2448 
   2449 size_t ConstantArea::AddDouble(double v) {
   2450   // Treat the value as a 64-bit integer value.
   2451   return AddInt64(bit_cast<int64_t, double>(v));
   2452 }
   2453 
   2454 size_t ConstantArea::AddFloat(float v) {
   2455   // Treat the value as a 32-bit integer value.
   2456   return AddInt32(bit_cast<int32_t, float>(v));
   2457 }
   2458 
   2459 }  // namespace x86
   2460 }  // namespace art
   2461