Home | History | Annotate | Download | only in arm
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "assembler_arm32.h"
     18 
     19 #include "base/logging.h"
     20 #include "entrypoints/quick/quick_entrypoints.h"
     21 #include "offsets.h"
     22 #include "thread.h"
     23 #include "utils.h"
     24 
     25 namespace art {
     26 namespace arm {
     27 
     28 void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
     29                         Condition cond) {
     30   EmitType01(cond, so.type(), AND, 0, rn, rd, so);
     31 }
     32 
     33 
     34 void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
     35                        Condition cond) {
     36   EmitType01(cond, so.type(), EOR, 0, rn, rd, so);
     37 }
     38 
     39 
     40 void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
     41                        Condition cond) {
     42   EmitType01(cond, so.type(), SUB, 0, rn, rd, so);
     43 }
     44 
     45 void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
     46                        Condition cond) {
     47   EmitType01(cond, so.type(), RSB, 0, rn, rd, so);
     48 }
     49 
     50 void Arm32Assembler::rsbs(Register rd, Register rn, const ShifterOperand& so,
     51                         Condition cond) {
     52   EmitType01(cond, so.type(), RSB, 1, rn, rd, so);
     53 }
     54 
     55 
     56 void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so,
     57                        Condition cond) {
     58   EmitType01(cond, so.type(), ADD, 0, rn, rd, so);
     59 }
     60 
     61 
     62 void Arm32Assembler::adds(Register rd, Register rn, const ShifterOperand& so,
     63                         Condition cond) {
     64   EmitType01(cond, so.type(), ADD, 1, rn, rd, so);
     65 }
     66 
     67 
     68 void Arm32Assembler::subs(Register rd, Register rn, const ShifterOperand& so,
     69                         Condition cond) {
     70   EmitType01(cond, so.type(), SUB, 1, rn, rd, so);
     71 }
     72 
     73 
     74 void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
     75                        Condition cond) {
     76   EmitType01(cond, so.type(), ADC, 0, rn, rd, so);
     77 }
     78 
     79 
     80 void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
     81                        Condition cond) {
     82   EmitType01(cond, so.type(), SBC, 0, rn, rd, so);
     83 }
     84 
     85 
     86 void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
     87                        Condition cond) {
     88   EmitType01(cond, so.type(), RSC, 0, rn, rd, so);
     89 }
     90 
     91 
     92 void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
     93   CHECK_NE(rn, PC);  // Reserve tst pc instruction for exception handler marker.
     94   EmitType01(cond, so.type(), TST, 1, rn, R0, so);
     95 }
     96 
     97 
     98 void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
     99   CHECK_NE(rn, PC);  // Reserve teq pc instruction for exception handler marker.
    100   EmitType01(cond, so.type(), TEQ, 1, rn, R0, so);
    101 }
    102 
    103 
    104 void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
    105   EmitType01(cond, so.type(), CMP, 1, rn, R0, so);
    106 }
    107 
    108 
    109 void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
    110   EmitType01(cond, so.type(), CMN, 1, rn, R0, so);
    111 }
    112 
    113 
    114 void Arm32Assembler::orr(Register rd, Register rn,
    115                     const ShifterOperand& so, Condition cond) {
    116   EmitType01(cond, so.type(), ORR, 0, rn, rd, so);
    117 }
    118 
    119 
    120 void Arm32Assembler::orrs(Register rd, Register rn,
    121                         const ShifterOperand& so, Condition cond) {
    122   EmitType01(cond, so.type(), ORR, 1, rn, rd, so);
    123 }
    124 
    125 
    126 void Arm32Assembler::mov(Register rd, const ShifterOperand& so, Condition cond) {
    127   EmitType01(cond, so.type(), MOV, 0, R0, rd, so);
    128 }
    129 
    130 
    131 void Arm32Assembler::movs(Register rd, const ShifterOperand& so, Condition cond) {
    132   EmitType01(cond, so.type(), MOV, 1, R0, rd, so);
    133 }
    134 
    135 
    136 void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
    137                        Condition cond) {
    138   EmitType01(cond, so.type(), BIC, 0, rn, rd, so);
    139 }
    140 
    141 
    142 void Arm32Assembler::mvn(Register rd, const ShifterOperand& so, Condition cond) {
    143   EmitType01(cond, so.type(), MVN, 0, R0, rd, so);
    144 }
    145 
    146 
    147 void Arm32Assembler::mvns(Register rd, const ShifterOperand& so, Condition cond) {
    148   EmitType01(cond, so.type(), MVN, 1, R0, rd, so);
    149 }
    150 
    151 
    152 void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
    153   // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
    154   EmitMulOp(cond, 0, R0, rd, rn, rm);
    155 }
    156 
    157 
    158 void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra,
    159                          Condition cond) {
    160   // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
    161   EmitMulOp(cond, B21, ra, rd, rn, rm);
    162 }
    163 
    164 
    165 void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra,
    166                          Condition cond) {
    167   // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
    168   EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
    169 }
    170 
    171 
    172 void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
    173                            Register rm, Condition cond) {
    174   // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
    175   EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
    176 }
    177 
    178 
    179 void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
    180   CHECK_NE(rd, kNoRegister);
    181   CHECK_NE(rn, kNoRegister);
    182   CHECK_NE(rm, kNoRegister);
    183   CHECK_NE(cond, kNoCondition);
    184   int32_t encoding = B26 | B25 | B24 | B20 |
    185       B15 | B14 | B13 | B12 |
    186       (static_cast<int32_t>(cond) << kConditionShift) |
    187       (static_cast<int32_t>(rn) << 0) |
    188       (static_cast<int32_t>(rd) << 16) |
    189       (static_cast<int32_t>(rm) << 8) |
    190       B4;
    191   Emit(encoding);
    192 }
    193 
    194 
    195 void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
    196   CHECK_NE(rd, kNoRegister);
    197   CHECK_NE(rn, kNoRegister);
    198   CHECK_NE(rm, kNoRegister);
    199   CHECK_NE(cond, kNoCondition);
    200   int32_t encoding = B26 | B25 | B24 | B21 | B20 |
    201       B15 | B14 | B13 | B12 |
    202       (static_cast<int32_t>(cond) << kConditionShift) |
    203       (static_cast<int32_t>(rn) << 0) |
    204       (static_cast<int32_t>(rd) << 16) |
    205       (static_cast<int32_t>(rm) << 8) |
    206       B4;
    207   Emit(encoding);
    208 }
    209 
    210 
    211 void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) {
    212   EmitMemOp(cond, true, false, rd, ad);
    213 }
    214 
    215 
    216 void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) {
    217   EmitMemOp(cond, false, false, rd, ad);
    218 }
    219 
    220 
    221 void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
    222   EmitMemOp(cond, true, true, rd, ad);
    223 }
    224 
    225 
    226 void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) {
    227   EmitMemOp(cond, false, true, rd, ad);
    228 }
    229 
    230 
    231 void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
    232   EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
    233 }
    234 
    235 
    236 void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) {
    237   EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
    238 }
    239 
    240 
    241 void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
    242   EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
    243 }
    244 
    245 
    246 void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
    247   EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
    248 }
    249 
    250 
    251 void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
    252   CHECK_EQ(rd % 2, 0);
    253   EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
    254 }
    255 
    256 
    257 void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) {
    258   CHECK_EQ(rd % 2, 0);
    259   EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
    260 }
    261 
    262 
    263 void Arm32Assembler::ldm(BlockAddressMode am,
    264                        Register base,
    265                        RegList regs,
    266                        Condition cond) {
    267   EmitMultiMemOp(cond, am, true, base, regs);
    268 }
    269 
    270 
    271 void Arm32Assembler::stm(BlockAddressMode am,
    272                        Register base,
    273                        RegList regs,
    274                        Condition cond) {
    275   EmitMultiMemOp(cond, am, false, base, regs);
    276 }
    277 
    278 
    279 void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
    280   EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
    281 }
    282 
    283 
    284 void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
    285   EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
    286 }
    287 
    288 
    289 bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
    290   uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
    291   if (((imm32 & ((1 << 19) - 1)) == 0) &&
    292       ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
    293        (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
    294     uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
    295         ((imm32 >> 19) & ((1 << 6) -1));
    296     EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
    297                sd, S0, S0);
    298     return true;
    299   }
    300   return false;
    301 }
    302 
    303 
    304 bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
    305   uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
    306   if (((imm64 & ((1LL << 48) - 1)) == 0) &&
    307       ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
    308        (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
    309     uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
    310         ((imm64 >> 48) & ((1 << 6) -1));
    311     EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
    312                dd, D0, D0);
    313     return true;
    314   }
    315   return false;
    316 }
    317 
    318 
    319 void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
    320                            Condition cond) {
    321   EmitVFPsss(cond, B21 | B20, sd, sn, sm);
    322 }
    323 
    324 
    325 void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
    326                            Condition cond) {
    327   EmitVFPddd(cond, B21 | B20, dd, dn, dm);
    328 }
    329 
    330 
    331 void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
    332                            Condition cond) {
    333   EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
    334 }
    335 
    336 
    337 void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
    338                            Condition cond) {
    339   EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
    340 }
    341 
    342 
    343 void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
    344                            Condition cond) {
    345   EmitVFPsss(cond, B21, sd, sn, sm);
    346 }
    347 
    348 
    349 void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
    350                            Condition cond) {
    351   EmitVFPddd(cond, B21, dd, dn, dm);
    352 }
    353 
    354 
    355 void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
    356                            Condition cond) {
    357   EmitVFPsss(cond, 0, sd, sn, sm);
    358 }
    359 
    360 
    361 void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
    362                            Condition cond) {
    363   EmitVFPddd(cond, 0, dd, dn, dm);
    364 }
    365 
    366 
    367 void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
    368                            Condition cond) {
    369   EmitVFPsss(cond, B6, sd, sn, sm);
    370 }
    371 
    372 
    373 void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
    374                            Condition cond) {
    375   EmitVFPddd(cond, B6, dd, dn, dm);
    376 }
    377 
    378 
    379 void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
    380                            Condition cond) {
    381   EmitVFPsss(cond, B23, sd, sn, sm);
    382 }
    383 
    384 
    385 void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
    386                            Condition cond) {
    387   EmitVFPddd(cond, B23, dd, dn, dm);
    388 }
    389 
    390 
    391 void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
    392   EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
    393 }
    394 
    395 
    396 void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
    397   EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
    398 }
    399 
    400 
    401 void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
    402   EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
    403 }
    404 
    405 
    406 void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
    407   EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
    408 }
    409 
    410 
    411 void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
    412   EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
    413 }
    414 
    415 void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
    416   EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
    417 }
    418 
    419 
    420 void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
    421   EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
    422 }
    423 
    424 
    425 void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
    426   EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
    427 }
    428 
    429 
    430 void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
    431   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
    432 }
    433 
    434 
    435 void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
    436   EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
    437 }
    438 
    439 
    440 void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
    441   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
    442 }
    443 
    444 
    445 void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
    446   EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
    447 }
    448 
    449 
    450 void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
    451   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
    452 }
    453 
    454 
    455 void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
    456   EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
    457 }
    458 
    459 
    460 void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
    461   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
    462 }
    463 
    464 
    465 void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
    466   EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
    467 }
    468 
    469 
    470 void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
    471   EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
    472 }
    473 
    474 
    475 void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
    476   EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
    477 }
    478 
    479 
    480 void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) {
    481   EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
    482 }
    483 
    484 
    485 void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) {
    486   EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
    487 }
    488 
    489 void Arm32Assembler::b(Label* label, Condition cond) {
    490   EmitBranch(cond, label, false);
    491 }
    492 
    493 
    494 void Arm32Assembler::bl(Label* label, Condition cond) {
    495   EmitBranch(cond, label, true);
    496 }
    497 
    498 
    499 void Arm32Assembler::MarkExceptionHandler(Label* label) {
    500   EmitType01(AL, 1, TST, 1, PC, R0, ShifterOperand(0));
    501   Label l;
    502   b(&l);
    503   EmitBranch(AL, label, false);
    504   Bind(&l);
    505 }
    506 
    507 
    508 void Arm32Assembler::Emit(int32_t value) {
    509   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    510   buffer_.Emit<int32_t>(value);
    511 }
    512 
    513 
    514 void Arm32Assembler::EmitType01(Condition cond,
    515                                 int type,
    516                                 Opcode opcode,
    517                                 int set_cc,
    518                                 Register rn,
    519                                 Register rd,
    520                                 const ShifterOperand& so) {
    521   CHECK_NE(rd, kNoRegister);
    522   CHECK_NE(cond, kNoCondition);
    523   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    524                      type << kTypeShift |
    525                      static_cast<int32_t>(opcode) << kOpcodeShift |
    526                      set_cc << kSShift |
    527                      static_cast<int32_t>(rn) << kRnShift |
    528                      static_cast<int32_t>(rd) << kRdShift |
    529                      so.encodingArm();
    530   Emit(encoding);
    531 }
    532 
    533 
    534 void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) {
    535   CHECK_NE(cond, kNoCondition);
    536   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    537                      5 << kTypeShift |
    538                      (link ? 1 : 0) << kLinkShift;
    539   Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding));
    540 }
    541 
    542 
    543 void Arm32Assembler::EmitMemOp(Condition cond,
    544                                bool load,
    545                                bool byte,
    546                                Register rd,
    547                                const Address& ad) {
    548   CHECK_NE(rd, kNoRegister);
    549   CHECK_NE(cond, kNoCondition);
    550   const Address& addr = static_cast<const Address&>(ad);
    551 
    552   int32_t encoding = 0;
    553   if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) {
    554     // PC relative LDR(literal)
    555     int32_t offset = ad.GetOffset();
    556     int32_t u = B23;
    557     if (offset < 0) {
    558       offset = -offset;
    559       u = 0;
    560     }
    561     CHECK_LT(offset, (1 << 12));
    562     encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    563          B26 | B24 | u | B20 |
    564          (load ? L : 0) |
    565          (byte ? B : 0) |
    566          (static_cast<int32_t>(rd) << kRdShift) |
    567          0xf << 16 |
    568          (offset & 0xfff);
    569 
    570   } else {
    571     encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    572         B26 |
    573         (load ? L : 0) |
    574         (byte ? B : 0) |
    575         (static_cast<int32_t>(rd) << kRdShift) |
    576         addr.encodingArm();
    577   }
    578   Emit(encoding);
    579 }
    580 
    581 
    582 void Arm32Assembler::EmitMemOpAddressMode3(Condition cond,
    583                                            int32_t mode,
    584                                            Register rd,
    585                                            const Address& ad) {
    586   CHECK_NE(rd, kNoRegister);
    587   CHECK_NE(cond, kNoCondition);
    588   const Address& addr = static_cast<const Address&>(ad);
    589   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    590                      B22  |
    591                      mode |
    592                      (static_cast<int32_t>(rd) << kRdShift) |
    593                      addr.encoding3();
    594   Emit(encoding);
    595 }
    596 
    597 
    598 void Arm32Assembler::EmitMultiMemOp(Condition cond,
    599                                     BlockAddressMode am,
    600                                     bool load,
    601                                     Register base,
    602                                     RegList regs) {
    603   CHECK_NE(base, kNoRegister);
    604   CHECK_NE(cond, kNoCondition);
    605   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    606                      B27 |
    607                      am |
    608                      (load ? L : 0) |
    609                      (static_cast<int32_t>(base) << kRnShift) |
    610                      regs;
    611   Emit(encoding);
    612 }
    613 
    614 
    615 void Arm32Assembler::EmitShiftImmediate(Condition cond,
    616                                         Shift opcode,
    617                                         Register rd,
    618                                         Register rm,
    619                                         const ShifterOperand& so) {
    620   CHECK_NE(cond, kNoCondition);
    621   CHECK(so.IsImmediate());
    622   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    623                      static_cast<int32_t>(MOV) << kOpcodeShift |
    624                      static_cast<int32_t>(rd) << kRdShift |
    625                      so.encodingArm() << kShiftImmShift |
    626                      static_cast<int32_t>(opcode) << kShiftShift |
    627                      static_cast<int32_t>(rm);
    628   Emit(encoding);
    629 }
    630 
    631 
    632 void Arm32Assembler::EmitShiftRegister(Condition cond,
    633                                        Shift opcode,
    634                                        Register rd,
    635                                        Register rm,
    636                                        const ShifterOperand& so) {
    637   CHECK_NE(cond, kNoCondition);
    638   CHECK(so.IsRegister());
    639   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    640                      static_cast<int32_t>(MOV) << kOpcodeShift |
    641                      static_cast<int32_t>(rd) << kRdShift |
    642                      so.encodingArm() << kShiftRegisterShift |
    643                      static_cast<int32_t>(opcode) << kShiftShift |
    644                      B4 |
    645                      static_cast<int32_t>(rm);
    646   Emit(encoding);
    647 }
    648 
    649 
    650 void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) {
    651   if (label->IsBound()) {
    652     EmitType5(cond, label->Position() - buffer_.Size(), link);
    653   } else {
    654     int position = buffer_.Size();
    655     // Use the offset field of the branch instruction for linking the sites.
    656     EmitType5(cond, label->position_, link);
    657     label->LinkTo(position);
    658   }
    659 }
    660 
    661 
    662 void Arm32Assembler::clz(Register rd, Register rm, Condition cond) {
    663   CHECK_NE(rd, kNoRegister);
    664   CHECK_NE(rm, kNoRegister);
    665   CHECK_NE(cond, kNoCondition);
    666   CHECK_NE(rd, PC);
    667   CHECK_NE(rm, PC);
    668   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    669                      B24 | B22 | B21 | (0xf << 16) |
    670                      (static_cast<int32_t>(rd) << kRdShift) |
    671                      (0xf << 8) | B4 | static_cast<int32_t>(rm);
    672   Emit(encoding);
    673 }
    674 
    675 
    676 void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
    677   CHECK_NE(cond, kNoCondition);
    678   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    679                      B25 | B24 | ((imm16 >> 12) << 16) |
    680                      static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
    681   Emit(encoding);
    682 }
    683 
    684 
    685 void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
    686   CHECK_NE(cond, kNoCondition);
    687   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    688                      B25 | B24 | B22 | ((imm16 >> 12) << 16) |
    689                      static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
    690   Emit(encoding);
    691 }
    692 
    693 
    694 void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode,
    695                                Register rd, Register rn,
    696                                Register rm, Register rs) {
    697   CHECK_NE(rd, kNoRegister);
    698   CHECK_NE(rn, kNoRegister);
    699   CHECK_NE(rm, kNoRegister);
    700   CHECK_NE(rs, kNoRegister);
    701   CHECK_NE(cond, kNoCondition);
    702   int32_t encoding = opcode |
    703       (static_cast<int32_t>(cond) << kConditionShift) |
    704       (static_cast<int32_t>(rn) << kRnShift) |
    705       (static_cast<int32_t>(rd) << kRdShift) |
    706       (static_cast<int32_t>(rs) << kRsShift) |
    707       B7 | B4 |
    708       (static_cast<int32_t>(rm) << kRmShift);
    709   Emit(encoding);
    710 }
    711 
    712 void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) {
    713   CHECK_NE(rn, kNoRegister);
    714   CHECK_NE(rt, kNoRegister);
    715   CHECK_NE(cond, kNoCondition);
    716   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    717                      B24 |
    718                      B23 |
    719                      L   |
    720                      (static_cast<int32_t>(rn) << kLdExRnShift) |
    721                      (static_cast<int32_t>(rt) << kLdExRtShift) |
    722                      B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
    723   Emit(encoding);
    724 }
    725 
    726 
    727 void Arm32Assembler::strex(Register rd,
    728                            Register rt,
    729                            Register rn,
    730                            Condition cond) {
    731   CHECK_NE(rn, kNoRegister);
    732   CHECK_NE(rd, kNoRegister);
    733   CHECK_NE(rt, kNoRegister);
    734   CHECK_NE(cond, kNoCondition);
    735   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    736                      B24 |
    737                      B23 |
    738                      (static_cast<int32_t>(rn) << kStrExRnShift) |
    739                      (static_cast<int32_t>(rd) << kStrExRdShift) |
    740                      B11 | B10 | B9 | B8 | B7 | B4 |
    741                      (static_cast<int32_t>(rt) << kStrExRtShift);
    742   Emit(encoding);
    743 }
    744 
    745 
    746 void Arm32Assembler::clrex(Condition cond) {
    747   CHECK_EQ(cond, AL);   // This cannot be conditional on ARM.
    748   int32_t encoding = (kSpecialCondition << kConditionShift) |
    749                      B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
    750   Emit(encoding);
    751 }
    752 
    753 
    754 void Arm32Assembler::nop(Condition cond) {
    755   CHECK_NE(cond, kNoCondition);
    756   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    757                      B25 | B24 | B21 | (0xf << 12);
    758   Emit(encoding);
    759 }
    760 
    761 
    762 void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
    763   CHECK_NE(sn, kNoSRegister);
    764   CHECK_NE(rt, kNoRegister);
    765   CHECK_NE(rt, SP);
    766   CHECK_NE(rt, PC);
    767   CHECK_NE(cond, kNoCondition);
    768   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    769                      B27 | B26 | B25 |
    770                      ((static_cast<int32_t>(sn) >> 1)*B16) |
    771                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    772                      ((static_cast<int32_t>(sn) & 1)*B7) | B4;
    773   Emit(encoding);
    774 }
    775 
    776 
    777 void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
    778   CHECK_NE(sn, kNoSRegister);
    779   CHECK_NE(rt, kNoRegister);
    780   CHECK_NE(rt, SP);
    781   CHECK_NE(rt, PC);
    782   CHECK_NE(cond, kNoCondition);
    783   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    784                      B27 | B26 | B25 | B20 |
    785                      ((static_cast<int32_t>(sn) >> 1)*B16) |
    786                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    787                      ((static_cast<int32_t>(sn) & 1)*B7) | B4;
    788   Emit(encoding);
    789 }
    790 
    791 
    792 void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
    793                              Condition cond) {
    794   CHECK_NE(sm, kNoSRegister);
    795   CHECK_NE(sm, S31);
    796   CHECK_NE(rt, kNoRegister);
    797   CHECK_NE(rt, SP);
    798   CHECK_NE(rt, PC);
    799   CHECK_NE(rt2, kNoRegister);
    800   CHECK_NE(rt2, SP);
    801   CHECK_NE(rt2, PC);
    802   CHECK_NE(cond, kNoCondition);
    803   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    804                      B27 | B26 | B22 |
    805                      (static_cast<int32_t>(rt2)*B16) |
    806                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    807                      ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
    808                      (static_cast<int32_t>(sm) >> 1);
    809   Emit(encoding);
    810 }
    811 
    812 
    813 void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
    814                              Condition cond) {
    815   CHECK_NE(sm, kNoSRegister);
    816   CHECK_NE(sm, S31);
    817   CHECK_NE(rt, kNoRegister);
    818   CHECK_NE(rt, SP);
    819   CHECK_NE(rt, PC);
    820   CHECK_NE(rt2, kNoRegister);
    821   CHECK_NE(rt2, SP);
    822   CHECK_NE(rt2, PC);
    823   CHECK_NE(rt, rt2);
    824   CHECK_NE(cond, kNoCondition);
    825   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    826                      B27 | B26 | B22 | B20 |
    827                      (static_cast<int32_t>(rt2)*B16) |
    828                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    829                      ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
    830                      (static_cast<int32_t>(sm) >> 1);
    831   Emit(encoding);
    832 }
    833 
    834 
    835 void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
    836                              Condition cond) {
    837   CHECK_NE(dm, kNoDRegister);
    838   CHECK_NE(rt, kNoRegister);
    839   CHECK_NE(rt, SP);
    840   CHECK_NE(rt, PC);
    841   CHECK_NE(rt2, kNoRegister);
    842   CHECK_NE(rt2, SP);
    843   CHECK_NE(rt2, PC);
    844   CHECK_NE(cond, kNoCondition);
    845   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    846                      B27 | B26 | B22 |
    847                      (static_cast<int32_t>(rt2)*B16) |
    848                      (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
    849                      ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
    850                      (static_cast<int32_t>(dm) & 0xf);
    851   Emit(encoding);
    852 }
    853 
    854 
    855 void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
    856                              Condition cond) {
    857   CHECK_NE(dm, kNoDRegister);
    858   CHECK_NE(rt, kNoRegister);
    859   CHECK_NE(rt, SP);
    860   CHECK_NE(rt, PC);
    861   CHECK_NE(rt2, kNoRegister);
    862   CHECK_NE(rt2, SP);
    863   CHECK_NE(rt2, PC);
    864   CHECK_NE(rt, rt2);
    865   CHECK_NE(cond, kNoCondition);
    866   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    867                      B27 | B26 | B22 | B20 |
    868                      (static_cast<int32_t>(rt2)*B16) |
    869                      (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
    870                      ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
    871                      (static_cast<int32_t>(dm) & 0xf);
    872   Emit(encoding);
    873 }
    874 
    875 
    876 void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
    877   const Address& addr = static_cast<const Address&>(ad);
    878   CHECK_NE(sd, kNoSRegister);
    879   CHECK_NE(cond, kNoCondition);
    880   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    881                      B27 | B26 | B24 | B20 |
    882                      ((static_cast<int32_t>(sd) & 1)*B22) |
    883                      ((static_cast<int32_t>(sd) >> 1)*B12) |
    884                      B11 | B9 | addr.vencoding();
    885   Emit(encoding);
    886 }
    887 
    888 
    889 void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
    890   const Address& addr = static_cast<const Address&>(ad);
    891   CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
    892   CHECK_NE(sd, kNoSRegister);
    893   CHECK_NE(cond, kNoCondition);
    894   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    895                      B27 | B26 | B24 |
    896                      ((static_cast<int32_t>(sd) & 1)*B22) |
    897                      ((static_cast<int32_t>(sd) >> 1)*B12) |
    898                      B11 | B9 | addr.vencoding();
    899   Emit(encoding);
    900 }
    901 
    902 
    903 void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
    904   const Address& addr = static_cast<const Address&>(ad);
    905   CHECK_NE(dd, kNoDRegister);
    906   CHECK_NE(cond, kNoCondition);
    907   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    908                      B27 | B26 | B24 | B20 |
    909                      ((static_cast<int32_t>(dd) >> 4)*B22) |
    910                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
    911                      B11 | B9 | B8 | addr.vencoding();
    912   Emit(encoding);
    913 }
    914 
    915 
    916 void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
    917   const Address& addr = static_cast<const Address&>(ad);
    918   CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
    919   CHECK_NE(dd, kNoDRegister);
    920   CHECK_NE(cond, kNoCondition);
    921   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    922                      B27 | B26 | B24 |
    923                      ((static_cast<int32_t>(dd) >> 4)*B22) |
    924                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
    925                      B11 | B9 | B8 | addr.vencoding();
    926   Emit(encoding);
    927 }
    928 
    929 
    930 void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
    931   EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
    932 }
    933 
    934 
    935 void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
    936   EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
    937 }
    938 
    939 
    940 void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) {
    941   EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
    942 }
    943 
    944 
    945 void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
    946   EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
    947 }
    948 
    949 
    950 void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
    951   CHECK_NE(cond, kNoCondition);
    952   CHECK_GT(nregs, 0);
    953   uint32_t D;
    954   uint32_t Vd;
    955   if (dbl) {
    956     // Encoded as D:Vd.
    957     D = (reg >> 4) & 1;
    958     Vd = reg & 0b1111;
    959   } else {
    960     // Encoded as Vd:D.
    961     D = reg & 1;
    962     Vd = (reg >> 1) & 0b1111;
    963   }
    964   int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
    965                     B11 | B9 |
    966         (dbl ? B8 : 0) |
    967         (push ? B24 : (B23 | B20)) |
    968         static_cast<int32_t>(cond) << kConditionShift |
    969         nregs << (dbl ? 1 : 0) |
    970         D << 22 |
    971         Vd << 12;
    972   Emit(encoding);
    973 }
    974 
    975 
    976 void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode,
    977                                 SRegister sd, SRegister sn, SRegister sm) {
    978   CHECK_NE(sd, kNoSRegister);
    979   CHECK_NE(sn, kNoSRegister);
    980   CHECK_NE(sm, kNoSRegister);
    981   CHECK_NE(cond, kNoCondition);
    982   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    983                      B27 | B26 | B25 | B11 | B9 | opcode |
    984                      ((static_cast<int32_t>(sd) & 1)*B22) |
    985                      ((static_cast<int32_t>(sn) >> 1)*B16) |
    986                      ((static_cast<int32_t>(sd) >> 1)*B12) |
    987                      ((static_cast<int32_t>(sn) & 1)*B7) |
    988                      ((static_cast<int32_t>(sm) & 1)*B5) |
    989                      (static_cast<int32_t>(sm) >> 1);
    990   Emit(encoding);
    991 }
    992 
    993 
    994 void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode,
    995                                 DRegister dd, DRegister dn, DRegister dm) {
    996   CHECK_NE(dd, kNoDRegister);
    997   CHECK_NE(dn, kNoDRegister);
    998   CHECK_NE(dm, kNoDRegister);
    999   CHECK_NE(cond, kNoCondition);
   1000   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1001                      B27 | B26 | B25 | B11 | B9 | B8 | opcode |
   1002                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1003                      ((static_cast<int32_t>(dn) & 0xf)*B16) |
   1004                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1005                      ((static_cast<int32_t>(dn) >> 4)*B7) |
   1006                      ((static_cast<int32_t>(dm) >> 4)*B5) |
   1007                      (static_cast<int32_t>(dm) & 0xf);
   1008   Emit(encoding);
   1009 }
   1010 
   1011 
   1012 void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode,
   1013                                SRegister sd, DRegister dm) {
   1014   CHECK_NE(sd, kNoSRegister);
   1015   CHECK_NE(dm, kNoDRegister);
   1016   CHECK_NE(cond, kNoCondition);
   1017   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1018                      B27 | B26 | B25 | B11 | B9 | opcode |
   1019                      ((static_cast<int32_t>(sd) & 1)*B22) |
   1020                      ((static_cast<int32_t>(sd) >> 1)*B12) |
   1021                      ((static_cast<int32_t>(dm) >> 4)*B5) |
   1022                      (static_cast<int32_t>(dm) & 0xf);
   1023   Emit(encoding);
   1024 }
   1025 
   1026 
   1027 void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode,
   1028                              DRegister dd, SRegister sm) {
   1029   CHECK_NE(dd, kNoDRegister);
   1030   CHECK_NE(sm, kNoSRegister);
   1031   CHECK_NE(cond, kNoCondition);
   1032   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1033                      B27 | B26 | B25 | B11 | B9 | opcode |
   1034                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1035                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1036                      ((static_cast<int32_t>(sm) & 1)*B5) |
   1037                      (static_cast<int32_t>(sm) >> 1);
   1038   Emit(encoding);
   1039 }
   1040 
   1041 
   1042 void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
   1043                          bool setcc, Condition cond) {
   1044   CHECK_NE(shift_imm, 0u);  // Do not use Lsl if no shift is wanted.
   1045   if (setcc) {
   1046     movs(rd, ShifterOperand(rm, LSL, shift_imm), cond);
   1047   } else {
   1048     mov(rd, ShifterOperand(rm, LSL, shift_imm), cond);
   1049   }
   1050 }
   1051 
   1052 
   1053 void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
   1054                          bool setcc, Condition cond) {
   1055   CHECK_NE(shift_imm, 0u);  // Do not use Lsr if no shift is wanted.
   1056   if (shift_imm == 32) shift_imm = 0;  // Comply to UAL syntax.
   1057   if (setcc) {
   1058     movs(rd, ShifterOperand(rm, LSR, shift_imm), cond);
   1059   } else {
   1060     mov(rd, ShifterOperand(rm, LSR, shift_imm), cond);
   1061   }
   1062 }
   1063 
   1064 
   1065 void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
   1066                          bool setcc, Condition cond) {
   1067   CHECK_NE(shift_imm, 0u);  // Do not use Asr if no shift is wanted.
   1068   if (shift_imm == 32) shift_imm = 0;  // Comply to UAL syntax.
   1069   if (setcc) {
   1070     movs(rd, ShifterOperand(rm, ASR, shift_imm), cond);
   1071   } else {
   1072     mov(rd, ShifterOperand(rm, ASR, shift_imm), cond);
   1073   }
   1074 }
   1075 
   1076 
   1077 void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
   1078                          bool setcc, Condition cond) {
   1079   CHECK_NE(shift_imm, 0u);  // Use Rrx instruction.
   1080   if (setcc) {
   1081     movs(rd, ShifterOperand(rm, ROR, shift_imm), cond);
   1082   } else {
   1083     mov(rd, ShifterOperand(rm, ROR, shift_imm), cond);
   1084   }
   1085 }
   1086 
   1087 void Arm32Assembler::Rrx(Register rd, Register rm, bool setcc, Condition cond) {
   1088   if (setcc) {
   1089     movs(rd, ShifterOperand(rm, ROR, 0), cond);
   1090   } else {
   1091     mov(rd, ShifterOperand(rm, ROR, 0), cond);
   1092   }
   1093 }
   1094 
   1095 
   1096 void Arm32Assembler::Lsl(Register rd, Register rm, Register rn,
   1097                          bool setcc, Condition cond) {
   1098   if (setcc) {
   1099     movs(rd, ShifterOperand(rm, LSL, rn), cond);
   1100   } else {
   1101     mov(rd, ShifterOperand(rm, LSL, rn), cond);
   1102   }
   1103 }
   1104 
   1105 
   1106 void Arm32Assembler::Lsr(Register rd, Register rm, Register rn,
   1107                          bool setcc, Condition cond) {
   1108   if (setcc) {
   1109     movs(rd, ShifterOperand(rm, LSR, rn), cond);
   1110   } else {
   1111     mov(rd, ShifterOperand(rm, LSR, rn), cond);
   1112   }
   1113 }
   1114 
   1115 
   1116 void Arm32Assembler::Asr(Register rd, Register rm, Register rn,
   1117                          bool setcc, Condition cond) {
   1118   if (setcc) {
   1119     movs(rd, ShifterOperand(rm, ASR, rn), cond);
   1120   } else {
   1121     mov(rd, ShifterOperand(rm, ASR, rn), cond);
   1122   }
   1123 }
   1124 
   1125 
   1126 void Arm32Assembler::Ror(Register rd, Register rm, Register rn,
   1127                          bool setcc, Condition cond) {
   1128   if (setcc) {
   1129     movs(rd, ShifterOperand(rm, ROR, rn), cond);
   1130   } else {
   1131     mov(rd, ShifterOperand(rm, ROR, rn), cond);
   1132   }
   1133 }
   1134 
   1135 void Arm32Assembler::vmstat(Condition cond) {  // VMRS APSR_nzcv, FPSCR
   1136   CHECK_NE(cond, kNoCondition);
   1137   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1138       B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
   1139       (static_cast<int32_t>(PC)*B12) |
   1140       B11 | B9 | B4;
   1141   Emit(encoding);
   1142 }
   1143 
   1144 
   1145 void Arm32Assembler::svc(uint32_t imm24) {
   1146   CHECK(IsUint(24, imm24)) << imm24;
   1147   int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
   1148   Emit(encoding);
   1149 }
   1150 
   1151 
   1152 void Arm32Assembler::bkpt(uint16_t imm16) {
   1153   int32_t encoding = (AL << kConditionShift) | B24 | B21 |
   1154                      ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
   1155   Emit(encoding);
   1156 }
   1157 
   1158 
   1159 void Arm32Assembler::blx(Register rm, Condition cond) {
   1160   CHECK_NE(rm, kNoRegister);
   1161   CHECK_NE(cond, kNoCondition);
   1162   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1163                      B24 | B21 | (0xfff << 8) | B5 | B4 |
   1164                      (static_cast<int32_t>(rm) << kRmShift);
   1165   Emit(encoding);
   1166 }
   1167 
   1168 
   1169 void Arm32Assembler::bx(Register rm, Condition cond) {
   1170   CHECK_NE(rm, kNoRegister);
   1171   CHECK_NE(cond, kNoCondition);
   1172   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1173                      B24 | B21 | (0xfff << 8) | B4 |
   1174                      (static_cast<int32_t>(rm) << kRmShift);
   1175   Emit(encoding);
   1176 }
   1177 
   1178 
   1179 void Arm32Assembler::Push(Register rd, Condition cond) {
   1180   str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
   1181 }
   1182 
   1183 
   1184 void Arm32Assembler::Pop(Register rd, Condition cond) {
   1185   ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
   1186 }
   1187 
   1188 
   1189 void Arm32Assembler::PushList(RegList regs, Condition cond) {
   1190   stm(DB_W, SP, regs, cond);
   1191 }
   1192 
   1193 
   1194 void Arm32Assembler::PopList(RegList regs, Condition cond) {
   1195   ldm(IA_W, SP, regs, cond);
   1196 }
   1197 
   1198 
   1199 void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) {
   1200   if (rd != rm) {
   1201     mov(rd, ShifterOperand(rm), cond);
   1202   }
   1203 }
   1204 
   1205 
   1206 void Arm32Assembler::Bind(Label* label) {
   1207   CHECK(!label->IsBound());
   1208   int bound_pc = buffer_.Size();
   1209   while (label->IsLinked()) {
   1210     int32_t position = label->Position();
   1211     int32_t next = buffer_.Load<int32_t>(position);
   1212     int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next);
   1213     buffer_.Store<int32_t>(position, encoded);
   1214     label->position_ = Arm32Assembler::DecodeBranchOffset(next);
   1215   }
   1216   label->BindTo(bound_pc);
   1217 }
   1218 
   1219 
   1220 int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) {
   1221   // The offset is off by 8 due to the way the ARM CPUs read PC.
   1222   offset -= 8;
   1223   CHECK_ALIGNED(offset, 4);
   1224   CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset;
   1225 
   1226   // Properly preserve only the bits supported in the instruction.
   1227   offset >>= 2;
   1228   offset &= kBranchOffsetMask;
   1229   return (inst & ~kBranchOffsetMask) | offset;
   1230 }
   1231 
   1232 
   1233 int Arm32Assembler::DecodeBranchOffset(int32_t inst) {
   1234   // Sign-extend, left-shift by 2, then add 8.
   1235   return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
   1236 }
   1237 
   1238 
   1239 void Arm32Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
   1240   AddConstant(rd, rd, value, cond);
   1241 }
   1242 
   1243 
   1244 void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value,
   1245                                  Condition cond) {
   1246   if (value == 0) {
   1247     if (rd != rn) {
   1248       mov(rd, ShifterOperand(rn), cond);
   1249     }
   1250     return;
   1251   }
   1252   // We prefer to select the shorter code sequence rather than selecting add for
   1253   // positive values and sub for negatives ones, which would slightly improve
   1254   // the readability of generated code for some constants.
   1255   ShifterOperand shifter_op;
   1256   if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
   1257     add(rd, rn, shifter_op, cond);
   1258   } else if (ShifterOperand::CanHoldArm(-value, &shifter_op)) {
   1259     sub(rd, rn, shifter_op, cond);
   1260   } else {
   1261     CHECK(rn != IP);
   1262     if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
   1263       mvn(IP, shifter_op, cond);
   1264       add(rd, rn, ShifterOperand(IP), cond);
   1265     } else if (ShifterOperand::CanHoldArm(~(-value), &shifter_op)) {
   1266       mvn(IP, shifter_op, cond);
   1267       sub(rd, rn, ShifterOperand(IP), cond);
   1268     } else {
   1269       movw(IP, Low16Bits(value), cond);
   1270       uint16_t value_high = High16Bits(value);
   1271       if (value_high != 0) {
   1272         movt(IP, value_high, cond);
   1273       }
   1274       add(rd, rn, ShifterOperand(IP), cond);
   1275     }
   1276   }
   1277 }
   1278 
   1279 
   1280 void Arm32Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
   1281                                          Condition cond) {
   1282   ShifterOperand shifter_op;
   1283   if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
   1284     adds(rd, rn, shifter_op, cond);
   1285   } else if (ShifterOperand::CanHoldArm(-value, &shifter_op)) {
   1286     subs(rd, rn, shifter_op, cond);
   1287   } else {
   1288     CHECK(rn != IP);
   1289     if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
   1290       mvn(IP, shifter_op, cond);
   1291       adds(rd, rn, ShifterOperand(IP), cond);
   1292     } else if (ShifterOperand::CanHoldArm(~(-value), &shifter_op)) {
   1293       mvn(IP, shifter_op, cond);
   1294       subs(rd, rn, ShifterOperand(IP), cond);
   1295     } else {
   1296       movw(IP, Low16Bits(value), cond);
   1297       uint16_t value_high = High16Bits(value);
   1298       if (value_high != 0) {
   1299         movt(IP, value_high, cond);
   1300       }
   1301       adds(rd, rn, ShifterOperand(IP), cond);
   1302     }
   1303   }
   1304 }
   1305 
   1306 
   1307 void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
   1308   ShifterOperand shifter_op;
   1309   if (ShifterOperand::CanHoldArm(value, &shifter_op)) {
   1310     mov(rd, shifter_op, cond);
   1311   } else if (ShifterOperand::CanHoldArm(~value, &shifter_op)) {
   1312     mvn(rd, shifter_op, cond);
   1313   } else {
   1314     movw(rd, Low16Bits(value), cond);
   1315     uint16_t value_high = High16Bits(value);
   1316     if (value_high != 0) {
   1317       movt(rd, value_high, cond);
   1318     }
   1319   }
   1320 }
   1321 
   1322 
   1323 // Implementation note: this method must emit at most one instruction when
   1324 // Address::CanHoldLoadOffsetArm.
   1325 void Arm32Assembler::LoadFromOffset(LoadOperandType type,
   1326                                     Register reg,
   1327                                     Register base,
   1328                                     int32_t offset,
   1329                                     Condition cond) {
   1330   if (!Address::CanHoldLoadOffsetArm(type, offset)) {
   1331     CHECK(base != IP);
   1332     LoadImmediate(IP, offset, cond);
   1333     add(IP, IP, ShifterOperand(base), cond);
   1334     base = IP;
   1335     offset = 0;
   1336   }
   1337   CHECK(Address::CanHoldLoadOffsetArm(type, offset));
   1338   switch (type) {
   1339     case kLoadSignedByte:
   1340       ldrsb(reg, Address(base, offset), cond);
   1341       break;
   1342     case kLoadUnsignedByte:
   1343       ldrb(reg, Address(base, offset), cond);
   1344       break;
   1345     case kLoadSignedHalfword:
   1346       ldrsh(reg, Address(base, offset), cond);
   1347       break;
   1348     case kLoadUnsignedHalfword:
   1349       ldrh(reg, Address(base, offset), cond);
   1350       break;
   1351     case kLoadWord:
   1352       ldr(reg, Address(base, offset), cond);
   1353       break;
   1354     case kLoadWordPair:
   1355       ldrd(reg, Address(base, offset), cond);
   1356       break;
   1357     default:
   1358       LOG(FATAL) << "UNREACHABLE";
   1359   }
   1360 }
   1361 
   1362 
   1363 // Implementation note: this method must emit at most one instruction when
   1364 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
   1365 void Arm32Assembler::LoadSFromOffset(SRegister reg,
   1366                                      Register base,
   1367                                      int32_t offset,
   1368                                      Condition cond) {
   1369   if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) {
   1370     CHECK_NE(base, IP);
   1371     LoadImmediate(IP, offset, cond);
   1372     add(IP, IP, ShifterOperand(base), cond);
   1373     base = IP;
   1374     offset = 0;
   1375   }
   1376   CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset));
   1377   vldrs(reg, Address(base, offset), cond);
   1378 }
   1379 
   1380 
   1381 // Implementation note: this method must emit at most one instruction when
   1382 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
   1383 void Arm32Assembler::LoadDFromOffset(DRegister reg,
   1384                                      Register base,
   1385                                      int32_t offset,
   1386                                      Condition cond) {
   1387   if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) {
   1388     CHECK_NE(base, IP);
   1389     LoadImmediate(IP, offset, cond);
   1390     add(IP, IP, ShifterOperand(base), cond);
   1391     base = IP;
   1392     offset = 0;
   1393   }
   1394   CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset));
   1395   vldrd(reg, Address(base, offset), cond);
   1396 }
   1397 
   1398 
   1399 // Implementation note: this method must emit at most one instruction when
   1400 // Address::CanHoldStoreOffsetArm.
   1401 void Arm32Assembler::StoreToOffset(StoreOperandType type,
   1402                                    Register reg,
   1403                                    Register base,
   1404                                    int32_t offset,
   1405                                    Condition cond) {
   1406   if (!Address::CanHoldStoreOffsetArm(type, offset)) {
   1407     CHECK(reg != IP);
   1408     CHECK(base != IP);
   1409     LoadImmediate(IP, offset, cond);
   1410     add(IP, IP, ShifterOperand(base), cond);
   1411     base = IP;
   1412     offset = 0;
   1413   }
   1414   CHECK(Address::CanHoldStoreOffsetArm(type, offset));
   1415   switch (type) {
   1416     case kStoreByte:
   1417       strb(reg, Address(base, offset), cond);
   1418       break;
   1419     case kStoreHalfword:
   1420       strh(reg, Address(base, offset), cond);
   1421       break;
   1422     case kStoreWord:
   1423       str(reg, Address(base, offset), cond);
   1424       break;
   1425     case kStoreWordPair:
   1426       strd(reg, Address(base, offset), cond);
   1427       break;
   1428     default:
   1429       LOG(FATAL) << "UNREACHABLE";
   1430   }
   1431 }
   1432 
   1433 
   1434 // Implementation note: this method must emit at most one instruction when
   1435 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset.
   1436 void Arm32Assembler::StoreSToOffset(SRegister reg,
   1437                                     Register base,
   1438                                     int32_t offset,
   1439                                     Condition cond) {
   1440   if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) {
   1441     CHECK_NE(base, IP);
   1442     LoadImmediate(IP, offset, cond);
   1443     add(IP, IP, ShifterOperand(base), cond);
   1444     base = IP;
   1445     offset = 0;
   1446   }
   1447   CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset));
   1448   vstrs(reg, Address(base, offset), cond);
   1449 }
   1450 
   1451 
   1452 // Implementation note: this method must emit at most one instruction when
   1453 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset.
   1454 void Arm32Assembler::StoreDToOffset(DRegister reg,
   1455                                     Register base,
   1456                                     int32_t offset,
   1457                                     Condition cond) {
   1458   if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) {
   1459     CHECK_NE(base, IP);
   1460     LoadImmediate(IP, offset, cond);
   1461     add(IP, IP, ShifterOperand(base), cond);
   1462     base = IP;
   1463     offset = 0;
   1464   }
   1465   CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset));
   1466   vstrd(reg, Address(base, offset), cond);
   1467 }
   1468 
   1469 
   1470 void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) {
   1471   CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
   1472 #if ANDROID_SMP != 0
   1473   int32_t encoding = 0xf57ff05f;  // dmb
   1474   Emit(encoding);
   1475 #endif
   1476 }
   1477 
   1478 
   1479 void Arm32Assembler::cbz(Register rn, Label* target) {
   1480   LOG(FATAL) << "cbz is not supported on ARM32";
   1481 }
   1482 
   1483 
   1484 void Arm32Assembler::cbnz(Register rn, Label* target) {
   1485   LOG(FATAL) << "cbnz is not supported on ARM32";
   1486 }
   1487 
   1488 
   1489 void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) {
   1490   cmp(r, ShifterOperand(0));
   1491   b(label, EQ);
   1492 }
   1493 
   1494 
   1495 void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
   1496   cmp(r, ShifterOperand(0));
   1497   b(label, NE);
   1498 }
   1499 
   1500 
   1501 }  // namespace arm
   1502 }  // namespace art
   1503