Home | History | Annotate | Download | only in arm
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "assembler_arm32.h"
     18 
     19 #include "base/bit_utils.h"
     20 #include "base/logging.h"
     21 #include "entrypoints/quick/quick_entrypoints.h"
     22 #include "offsets.h"
     23 #include "thread.h"
     24 
     25 namespace art {
     26 namespace arm {
     27 
     28 bool Arm32Assembler::ShifterOperandCanHoldArm32(uint32_t immediate, ShifterOperand* shifter_op) {
     29   // Avoid the more expensive test for frequent small immediate values.
     30   if (immediate < (1 << kImmed8Bits)) {
     31     shifter_op->type_ = ShifterOperand::kImmediate;
     32     shifter_op->is_rotate_ = true;
     33     shifter_op->rotate_ = 0;
     34     shifter_op->immed_ = immediate;
     35     return true;
     36   }
     37   // Note that immediate must be unsigned for the test to work correctly.
     38   for (int rot = 0; rot < 16; rot++) {
     39     uint32_t imm8 = (immediate << 2*rot) | (immediate >> (32 - 2*rot));
     40     if (imm8 < (1 << kImmed8Bits)) {
     41       shifter_op->type_ = ShifterOperand::kImmediate;
     42       shifter_op->is_rotate_ = true;
     43       shifter_op->rotate_ = rot;
     44       shifter_op->immed_ = imm8;
     45       return true;
     46     }
     47   }
     48   return false;
     49 }
     50 
     51 bool Arm32Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
     52                                            Register rn ATTRIBUTE_UNUSED,
     53                                            Opcode opcode ATTRIBUTE_UNUSED,
     54                                            uint32_t immediate,
     55                                            ShifterOperand* shifter_op) {
     56   return ShifterOperandCanHoldArm32(immediate, shifter_op);
     57 }
     58 
     59 void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
     60                         Condition cond) {
     61   EmitType01(cond, so.type(), AND, 0, rn, rd, so);
     62 }
     63 
     64 
     65 void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
     66                        Condition cond) {
     67   EmitType01(cond, so.type(), EOR, 0, rn, rd, so);
     68 }
     69 
     70 
     71 void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
     72                        Condition cond) {
     73   EmitType01(cond, so.type(), SUB, 0, rn, rd, so);
     74 }
     75 
     76 void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
     77                        Condition cond) {
     78   EmitType01(cond, so.type(), RSB, 0, rn, rd, so);
     79 }
     80 
     81 void Arm32Assembler::rsbs(Register rd, Register rn, const ShifterOperand& so,
     82                         Condition cond) {
     83   EmitType01(cond, so.type(), RSB, 1, rn, rd, so);
     84 }
     85 
     86 
     87 void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so,
     88                        Condition cond) {
     89   EmitType01(cond, so.type(), ADD, 0, rn, rd, so);
     90 }
     91 
     92 
     93 void Arm32Assembler::adds(Register rd, Register rn, const ShifterOperand& so,
     94                         Condition cond) {
     95   EmitType01(cond, so.type(), ADD, 1, rn, rd, so);
     96 }
     97 
     98 
     99 void Arm32Assembler::subs(Register rd, Register rn, const ShifterOperand& so,
    100                         Condition cond) {
    101   EmitType01(cond, so.type(), SUB, 1, rn, rd, so);
    102 }
    103 
    104 
    105 void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
    106                        Condition cond) {
    107   EmitType01(cond, so.type(), ADC, 0, rn, rd, so);
    108 }
    109 
    110 
    111 void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
    112                        Condition cond) {
    113   EmitType01(cond, so.type(), SBC, 0, rn, rd, so);
    114 }
    115 
    116 
    117 void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
    118                        Condition cond) {
    119   EmitType01(cond, so.type(), RSC, 0, rn, rd, so);
    120 }
    121 
    122 
    123 void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
    124   CHECK_NE(rn, PC);  // Reserve tst pc instruction for exception handler marker.
    125   EmitType01(cond, so.type(), TST, 1, rn, R0, so);
    126 }
    127 
    128 
    129 void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
    130   CHECK_NE(rn, PC);  // Reserve teq pc instruction for exception handler marker.
    131   EmitType01(cond, so.type(), TEQ, 1, rn, R0, so);
    132 }
    133 
    134 
    135 void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
    136   EmitType01(cond, so.type(), CMP, 1, rn, R0, so);
    137 }
    138 
    139 
    140 void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
    141   EmitType01(cond, so.type(), CMN, 1, rn, R0, so);
    142 }
    143 
    144 
    145 void Arm32Assembler::orr(Register rd, Register rn,
    146                     const ShifterOperand& so, Condition cond) {
    147   EmitType01(cond, so.type(), ORR, 0, rn, rd, so);
    148 }
    149 
    150 
    151 void Arm32Assembler::orrs(Register rd, Register rn,
    152                         const ShifterOperand& so, Condition cond) {
    153   EmitType01(cond, so.type(), ORR, 1, rn, rd, so);
    154 }
    155 
    156 
    157 void Arm32Assembler::mov(Register rd, const ShifterOperand& so, Condition cond) {
    158   EmitType01(cond, so.type(), MOV, 0, R0, rd, so);
    159 }
    160 
    161 
    162 void Arm32Assembler::movs(Register rd, const ShifterOperand& so, Condition cond) {
    163   EmitType01(cond, so.type(), MOV, 1, R0, rd, so);
    164 }
    165 
    166 
    167 void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
    168                        Condition cond) {
    169   EmitType01(cond, so.type(), BIC, 0, rn, rd, so);
    170 }
    171 
    172 
    173 void Arm32Assembler::mvn(Register rd, const ShifterOperand& so, Condition cond) {
    174   EmitType01(cond, so.type(), MVN, 0, R0, rd, so);
    175 }
    176 
    177 
    178 void Arm32Assembler::mvns(Register rd, const ShifterOperand& so, Condition cond) {
    179   EmitType01(cond, so.type(), MVN, 1, R0, rd, so);
    180 }
    181 
    182 
    183 void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
    184   // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
    185   EmitMulOp(cond, 0, R0, rd, rn, rm);
    186 }
    187 
    188 
    189 void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra,
    190                          Condition cond) {
    191   // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
    192   EmitMulOp(cond, B21, ra, rd, rn, rm);
    193 }
    194 
    195 
    196 void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra,
    197                          Condition cond) {
    198   // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
    199   EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
    200 }
    201 
    202 
    203 void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
    204                            Register rm, Condition cond) {
    205   // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
    206   EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
    207 }
    208 
    209 
    210 void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
    211   CHECK_NE(rd, kNoRegister);
    212   CHECK_NE(rn, kNoRegister);
    213   CHECK_NE(rm, kNoRegister);
    214   CHECK_NE(cond, kNoCondition);
    215   int32_t encoding = B26 | B25 | B24 | B20 |
    216       B15 | B14 | B13 | B12 |
    217       (static_cast<int32_t>(cond) << kConditionShift) |
    218       (static_cast<int32_t>(rn) << 0) |
    219       (static_cast<int32_t>(rd) << 16) |
    220       (static_cast<int32_t>(rm) << 8) |
    221       B4;
    222   Emit(encoding);
    223 }
    224 
    225 
    226 void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
    227   CHECK_NE(rd, kNoRegister);
    228   CHECK_NE(rn, kNoRegister);
    229   CHECK_NE(rm, kNoRegister);
    230   CHECK_NE(cond, kNoCondition);
    231   int32_t encoding = B26 | B25 | B24 | B21 | B20 |
    232       B15 | B14 | B13 | B12 |
    233       (static_cast<int32_t>(cond) << kConditionShift) |
    234       (static_cast<int32_t>(rn) << 0) |
    235       (static_cast<int32_t>(rd) << 16) |
    236       (static_cast<int32_t>(rm) << 8) |
    237       B4;
    238   Emit(encoding);
    239 }
    240 
    241 
    242 void Arm32Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
    243   CHECK_NE(rd, kNoRegister);
    244   CHECK_NE(rn, kNoRegister);
    245   CHECK_NE(cond, kNoCondition);
    246   CHECK_LE(lsb, 31U);
    247   CHECK(1U <= width && width <= 32U) << width;
    248   uint32_t widthminus1 = width - 1;
    249 
    250   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    251       B26 | B25 | B24 | B23 | B21 |
    252       (widthminus1 << 16) |
    253       (static_cast<uint32_t>(rd) << 12) |
    254       (lsb << 7) |
    255       B6 | B4 |
    256       static_cast<uint32_t>(rn);
    257   Emit(encoding);
    258 }
    259 
    260 
    261 void Arm32Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
    262   CHECK_NE(rd, kNoRegister);
    263   CHECK_NE(rn, kNoRegister);
    264   CHECK_NE(cond, kNoCondition);
    265   CHECK_LE(lsb, 31U);
    266   CHECK(1U <= width && width <= 32U) << width;
    267   uint32_t widthminus1 = width - 1;
    268 
    269   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    270       B26 | B25 | B24 | B23 | B22 | B21 |
    271       (widthminus1 << 16) |
    272       (static_cast<uint32_t>(rd) << 12) |
    273       (lsb << 7) |
    274       B6 | B4 |
    275       static_cast<uint32_t>(rn);
    276   Emit(encoding);
    277 }
    278 
    279 
    280 void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) {
    281   EmitMemOp(cond, true, false, rd, ad);
    282 }
    283 
    284 
    285 void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) {
    286   EmitMemOp(cond, false, false, rd, ad);
    287 }
    288 
    289 
    290 void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
    291   EmitMemOp(cond, true, true, rd, ad);
    292 }
    293 
    294 
    295 void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) {
    296   EmitMemOp(cond, false, true, rd, ad);
    297 }
    298 
    299 
    300 void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
    301   EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
    302 }
    303 
    304 
    305 void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) {
    306   EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
    307 }
    308 
    309 
    310 void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
    311   EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
    312 }
    313 
    314 
    315 void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
    316   EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
    317 }
    318 
    319 
    320 void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
    321   CHECK_EQ(rd % 2, 0);
    322   EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
    323 }
    324 
    325 
    326 void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) {
    327   CHECK_EQ(rd % 2, 0);
    328   EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
    329 }
    330 
    331 
    332 void Arm32Assembler::ldm(BlockAddressMode am,
    333                        Register base,
    334                        RegList regs,
    335                        Condition cond) {
    336   EmitMultiMemOp(cond, am, true, base, regs);
    337 }
    338 
    339 
    340 void Arm32Assembler::stm(BlockAddressMode am,
    341                        Register base,
    342                        RegList regs,
    343                        Condition cond) {
    344   EmitMultiMemOp(cond, am, false, base, regs);
    345 }
    346 
    347 
    348 void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
    349   EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
    350 }
    351 
    352 
    353 void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
    354   EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
    355 }
    356 
    357 
    358 bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
    359   uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
    360   if (((imm32 & ((1 << 19) - 1)) == 0) &&
    361       ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
    362        (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
    363     uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
    364         ((imm32 >> 19) & ((1 << 6) -1));
    365     EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
    366                sd, S0, S0);
    367     return true;
    368   }
    369   return false;
    370 }
    371 
    372 
    373 bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
    374   uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
    375   if (((imm64 & ((1LL << 48) - 1)) == 0) &&
    376       ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
    377        (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
    378     uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
    379         ((imm64 >> 48) & ((1 << 6) -1));
    380     EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
    381                dd, D0, D0);
    382     return true;
    383   }
    384   return false;
    385 }
    386 
    387 
    388 void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
    389                            Condition cond) {
    390   EmitVFPsss(cond, B21 | B20, sd, sn, sm);
    391 }
    392 
    393 
    394 void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
    395                            Condition cond) {
    396   EmitVFPddd(cond, B21 | B20, dd, dn, dm);
    397 }
    398 
    399 
    400 void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
    401                            Condition cond) {
    402   EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
    403 }
    404 
    405 
    406 void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
    407                            Condition cond) {
    408   EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
    409 }
    410 
    411 
    412 void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
    413                            Condition cond) {
    414   EmitVFPsss(cond, B21, sd, sn, sm);
    415 }
    416 
    417 
    418 void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
    419                            Condition cond) {
    420   EmitVFPddd(cond, B21, dd, dn, dm);
    421 }
    422 
    423 
    424 void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
    425                            Condition cond) {
    426   EmitVFPsss(cond, 0, sd, sn, sm);
    427 }
    428 
    429 
    430 void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
    431                            Condition cond) {
    432   EmitVFPddd(cond, 0, dd, dn, dm);
    433 }
    434 
    435 
    436 void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
    437                            Condition cond) {
    438   EmitVFPsss(cond, B6, sd, sn, sm);
    439 }
    440 
    441 
    442 void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
    443                            Condition cond) {
    444   EmitVFPddd(cond, B6, dd, dn, dm);
    445 }
    446 
    447 
    448 void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
    449                            Condition cond) {
    450   EmitVFPsss(cond, B23, sd, sn, sm);
    451 }
    452 
    453 
    454 void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
    455                            Condition cond) {
    456   EmitVFPddd(cond, B23, dd, dn, dm);
    457 }
    458 
    459 
    460 void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
    461   EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
    462 }
    463 
    464 
    465 void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
    466   EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
    467 }
    468 
    469 
    470 void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
    471   EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
    472 }
    473 
    474 
    475 void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
    476   EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
    477 }
    478 
    479 
    480 void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
    481   EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
    482 }
    483 
    484 void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
    485   EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
    486 }
    487 
    488 
    489 void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
    490   EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
    491 }
    492 
    493 
    494 void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
    495   EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
    496 }
    497 
    498 
    499 void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
    500   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
    501 }
    502 
    503 
    504 void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
    505   EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
    506 }
    507 
    508 
    509 void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
    510   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
    511 }
    512 
    513 
    514 void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
    515   EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
    516 }
    517 
    518 
    519 void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
    520   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
    521 }
    522 
    523 
    524 void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
    525   EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
    526 }
    527 
    528 
    529 void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
    530   EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
    531 }
    532 
    533 
    534 void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
    535   EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
    536 }
    537 
    538 
    539 void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
    540   EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
    541 }
    542 
    543 
    544 void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
    545   EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
    546 }
    547 
    548 
    549 void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) {
    550   EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
    551 }
    552 
    553 
    554 void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) {
    555   EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
    556 }
    557 
    558 void Arm32Assembler::b(Label* label, Condition cond) {
    559   EmitBranch(cond, label, false);
    560 }
    561 
    562 
    563 void Arm32Assembler::bl(Label* label, Condition cond) {
    564   EmitBranch(cond, label, true);
    565 }
    566 
    567 
    568 void Arm32Assembler::MarkExceptionHandler(Label* label) {
    569   EmitType01(AL, 1, TST, 1, PC, R0, ShifterOperand(0));
    570   Label l;
    571   b(&l);
    572   EmitBranch(AL, label, false);
    573   Bind(&l);
    574 }
    575 
    576 
    577 void Arm32Assembler::Emit(int32_t value) {
    578   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
    579   buffer_.Emit<int32_t>(value);
    580 }
    581 
    582 
    583 void Arm32Assembler::EmitType01(Condition cond,
    584                                 int type,
    585                                 Opcode opcode,
    586                                 int set_cc,
    587                                 Register rn,
    588                                 Register rd,
    589                                 const ShifterOperand& so) {
    590   CHECK_NE(rd, kNoRegister);
    591   CHECK_NE(cond, kNoCondition);
    592   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    593                      type << kTypeShift |
    594                      static_cast<int32_t>(opcode) << kOpcodeShift |
    595                      set_cc << kSShift |
    596                      static_cast<int32_t>(rn) << kRnShift |
    597                      static_cast<int32_t>(rd) << kRdShift |
    598                      so.encodingArm();
    599   Emit(encoding);
    600 }
    601 
    602 
    603 void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) {
    604   CHECK_NE(cond, kNoCondition);
    605   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    606                      5 << kTypeShift |
    607                      (link ? 1 : 0) << kLinkShift;
    608   Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding));
    609 }
    610 
    611 
    612 void Arm32Assembler::EmitMemOp(Condition cond,
    613                                bool load,
    614                                bool byte,
    615                                Register rd,
    616                                const Address& ad) {
    617   CHECK_NE(rd, kNoRegister);
    618   CHECK_NE(cond, kNoCondition);
    619   const Address& addr = static_cast<const Address&>(ad);
    620 
    621   int32_t encoding = 0;
    622   if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) {
    623     // PC relative LDR(literal)
    624     int32_t offset = ad.GetOffset();
    625     int32_t u = B23;
    626     if (offset < 0) {
    627       offset = -offset;
    628       u = 0;
    629     }
    630     CHECK_LT(offset, (1 << 12));
    631     encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    632          B26 | B24 | u | B20 |
    633          (load ? L : 0) |
    634          (byte ? B : 0) |
    635          (static_cast<int32_t>(rd) << kRdShift) |
    636          0xf << 16 |
    637          (offset & 0xfff);
    638 
    639   } else {
    640     encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    641         B26 |
    642         (load ? L : 0) |
    643         (byte ? B : 0) |
    644         (static_cast<int32_t>(rd) << kRdShift) |
    645         addr.encodingArm();
    646   }
    647   Emit(encoding);
    648 }
    649 
    650 
    651 void Arm32Assembler::EmitMemOpAddressMode3(Condition cond,
    652                                            int32_t mode,
    653                                            Register rd,
    654                                            const Address& ad) {
    655   CHECK_NE(rd, kNoRegister);
    656   CHECK_NE(cond, kNoCondition);
    657   const Address& addr = static_cast<const Address&>(ad);
    658   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    659                      B22  |
    660                      mode |
    661                      (static_cast<int32_t>(rd) << kRdShift) |
    662                      addr.encoding3();
    663   Emit(encoding);
    664 }
    665 
    666 
    667 void Arm32Assembler::EmitMultiMemOp(Condition cond,
    668                                     BlockAddressMode am,
    669                                     bool load,
    670                                     Register base,
    671                                     RegList regs) {
    672   CHECK_NE(base, kNoRegister);
    673   CHECK_NE(cond, kNoCondition);
    674   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    675                      B27 |
    676                      am |
    677                      (load ? L : 0) |
    678                      (static_cast<int32_t>(base) << kRnShift) |
    679                      regs;
    680   Emit(encoding);
    681 }
    682 
    683 
    684 void Arm32Assembler::EmitShiftImmediate(Condition cond,
    685                                         Shift opcode,
    686                                         Register rd,
    687                                         Register rm,
    688                                         const ShifterOperand& so) {
    689   CHECK_NE(cond, kNoCondition);
    690   CHECK(so.IsImmediate());
    691   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    692                      static_cast<int32_t>(MOV) << kOpcodeShift |
    693                      static_cast<int32_t>(rd) << kRdShift |
    694                      so.encodingArm() << kShiftImmShift |
    695                      static_cast<int32_t>(opcode) << kShiftShift |
    696                      static_cast<int32_t>(rm);
    697   Emit(encoding);
    698 }
    699 
    700 
    701 void Arm32Assembler::EmitShiftRegister(Condition cond,
    702                                        Shift opcode,
    703                                        Register rd,
    704                                        Register rm,
    705                                        const ShifterOperand& so) {
    706   CHECK_NE(cond, kNoCondition);
    707   CHECK(so.IsRegister());
    708   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    709                      static_cast<int32_t>(MOV) << kOpcodeShift |
    710                      static_cast<int32_t>(rd) << kRdShift |
    711                      so.encodingArm() << kShiftRegisterShift |
    712                      static_cast<int32_t>(opcode) << kShiftShift |
    713                      B4 |
    714                      static_cast<int32_t>(rm);
    715   Emit(encoding);
    716 }
    717 
    718 
    719 void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) {
    720   if (label->IsBound()) {
    721     EmitType5(cond, label->Position() - buffer_.Size(), link);
    722   } else {
    723     int position = buffer_.Size();
    724     // Use the offset field of the branch instruction for linking the sites.
    725     EmitType5(cond, label->position_, link);
    726     label->LinkTo(position);
    727   }
    728 }
    729 
    730 
    731 void Arm32Assembler::clz(Register rd, Register rm, Condition cond) {
    732   CHECK_NE(rd, kNoRegister);
    733   CHECK_NE(rm, kNoRegister);
    734   CHECK_NE(cond, kNoCondition);
    735   CHECK_NE(rd, PC);
    736   CHECK_NE(rm, PC);
    737   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    738                      B24 | B22 | B21 | (0xf << 16) |
    739                      (static_cast<int32_t>(rd) << kRdShift) |
    740                      (0xf << 8) | B4 | static_cast<int32_t>(rm);
    741   Emit(encoding);
    742 }
    743 
    744 
    745 void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
    746   CHECK_NE(cond, kNoCondition);
    747   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    748                      B25 | B24 | ((imm16 >> 12) << 16) |
    749                      static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
    750   Emit(encoding);
    751 }
    752 
    753 
    754 void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
    755   CHECK_NE(cond, kNoCondition);
    756   int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
    757                      B25 | B24 | B22 | ((imm16 >> 12) << 16) |
    758                      static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
    759   Emit(encoding);
    760 }
    761 
    762 
    763 void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode,
    764                                Register rd, Register rn,
    765                                Register rm, Register rs) {
    766   CHECK_NE(rd, kNoRegister);
    767   CHECK_NE(rn, kNoRegister);
    768   CHECK_NE(rm, kNoRegister);
    769   CHECK_NE(rs, kNoRegister);
    770   CHECK_NE(cond, kNoCondition);
    771   int32_t encoding = opcode |
    772       (static_cast<int32_t>(cond) << kConditionShift) |
    773       (static_cast<int32_t>(rn) << kRnShift) |
    774       (static_cast<int32_t>(rd) << kRdShift) |
    775       (static_cast<int32_t>(rs) << kRsShift) |
    776       B7 | B4 |
    777       (static_cast<int32_t>(rm) << kRmShift);
    778   Emit(encoding);
    779 }
    780 
    781 
    782 void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) {
    783   CHECK_NE(rn, kNoRegister);
    784   CHECK_NE(rt, kNoRegister);
    785   CHECK_NE(cond, kNoCondition);
    786   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    787                      B24 |
    788                      B23 |
    789                      L   |
    790                      (static_cast<int32_t>(rn) << kLdExRnShift) |
    791                      (static_cast<int32_t>(rt) << kLdExRtShift) |
    792                      B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
    793   Emit(encoding);
    794 }
    795 
    796 
    797 void Arm32Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
    798   CHECK_NE(rn, kNoRegister);
    799   CHECK_NE(rt, kNoRegister);
    800   CHECK_NE(rt2, kNoRegister);
    801   CHECK_NE(rt, R14);
    802   CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
    803   CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
    804   CHECK_NE(cond, kNoCondition);
    805 
    806   int32_t encoding =
    807       (static_cast<uint32_t>(cond) << kConditionShift) |
    808       B24 | B23 | B21 | B20 |
    809       static_cast<uint32_t>(rn) << 16 |
    810       static_cast<uint32_t>(rt) << 12 |
    811       B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
    812   Emit(encoding);
    813 }
    814 
    815 
    816 void Arm32Assembler::strex(Register rd,
    817                            Register rt,
    818                            Register rn,
    819                            Condition cond) {
    820   CHECK_NE(rn, kNoRegister);
    821   CHECK_NE(rd, kNoRegister);
    822   CHECK_NE(rt, kNoRegister);
    823   CHECK_NE(cond, kNoCondition);
    824   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    825                      B24 |
    826                      B23 |
    827                      (static_cast<int32_t>(rn) << kStrExRnShift) |
    828                      (static_cast<int32_t>(rd) << kStrExRdShift) |
    829                      B11 | B10 | B9 | B8 | B7 | B4 |
    830                      (static_cast<int32_t>(rt) << kStrExRtShift);
    831   Emit(encoding);
    832 }
    833 
    834 void Arm32Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
    835   CHECK_NE(rd, kNoRegister);
    836   CHECK_NE(rn, kNoRegister);
    837   CHECK_NE(rt, kNoRegister);
    838   CHECK_NE(rt2, kNoRegister);
    839   CHECK_NE(rt, R14);
    840   CHECK_NE(rd, rt);
    841   CHECK_NE(rd, rt2);
    842   CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2);
    843   CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2));
    844   CHECK_NE(cond, kNoCondition);
    845 
    846   int32_t encoding =
    847       (static_cast<uint32_t>(cond) << kConditionShift) |
    848       B24 | B23 | B21 |
    849       static_cast<uint32_t>(rn) << 16 |
    850       static_cast<uint32_t>(rd) << 12 |
    851       B11 | B10 | B9 | B8 | B7 | B4 |
    852       static_cast<uint32_t>(rt);
    853   Emit(encoding);
    854 }
    855 
    856 
    857 void Arm32Assembler::clrex(Condition cond) {
    858   CHECK_EQ(cond, AL);   // This cannot be conditional on ARM.
    859   int32_t encoding = (kSpecialCondition << kConditionShift) |
    860                      B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
    861   Emit(encoding);
    862 }
    863 
    864 
    865 void Arm32Assembler::nop(Condition cond) {
    866   CHECK_NE(cond, kNoCondition);
    867   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    868                      B25 | B24 | B21 | (0xf << 12);
    869   Emit(encoding);
    870 }
    871 
    872 
    873 void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
    874   CHECK_NE(sn, kNoSRegister);
    875   CHECK_NE(rt, kNoRegister);
    876   CHECK_NE(rt, SP);
    877   CHECK_NE(rt, PC);
    878   CHECK_NE(cond, kNoCondition);
    879   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    880                      B27 | B26 | B25 |
    881                      ((static_cast<int32_t>(sn) >> 1)*B16) |
    882                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    883                      ((static_cast<int32_t>(sn) & 1)*B7) | B4;
    884   Emit(encoding);
    885 }
    886 
    887 
    888 void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
    889   CHECK_NE(sn, kNoSRegister);
    890   CHECK_NE(rt, kNoRegister);
    891   CHECK_NE(rt, SP);
    892   CHECK_NE(rt, PC);
    893   CHECK_NE(cond, kNoCondition);
    894   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    895                      B27 | B26 | B25 | B20 |
    896                      ((static_cast<int32_t>(sn) >> 1)*B16) |
    897                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    898                      ((static_cast<int32_t>(sn) & 1)*B7) | B4;
    899   Emit(encoding);
    900 }
    901 
    902 
    903 void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
    904                              Condition cond) {
    905   CHECK_NE(sm, kNoSRegister);
    906   CHECK_NE(sm, S31);
    907   CHECK_NE(rt, kNoRegister);
    908   CHECK_NE(rt, SP);
    909   CHECK_NE(rt, PC);
    910   CHECK_NE(rt2, kNoRegister);
    911   CHECK_NE(rt2, SP);
    912   CHECK_NE(rt2, PC);
    913   CHECK_NE(cond, kNoCondition);
    914   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    915                      B27 | B26 | B22 |
    916                      (static_cast<int32_t>(rt2)*B16) |
    917                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    918                      ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
    919                      (static_cast<int32_t>(sm) >> 1);
    920   Emit(encoding);
    921 }
    922 
    923 
    924 void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
    925                              Condition cond) {
    926   CHECK_NE(sm, kNoSRegister);
    927   CHECK_NE(sm, S31);
    928   CHECK_NE(rt, kNoRegister);
    929   CHECK_NE(rt, SP);
    930   CHECK_NE(rt, PC);
    931   CHECK_NE(rt2, kNoRegister);
    932   CHECK_NE(rt2, SP);
    933   CHECK_NE(rt2, PC);
    934   CHECK_NE(rt, rt2);
    935   CHECK_NE(cond, kNoCondition);
    936   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    937                      B27 | B26 | B22 | B20 |
    938                      (static_cast<int32_t>(rt2)*B16) |
    939                      (static_cast<int32_t>(rt)*B12) | B11 | B9 |
    940                      ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
    941                      (static_cast<int32_t>(sm) >> 1);
    942   Emit(encoding);
    943 }
    944 
    945 
    946 void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
    947                              Condition cond) {
    948   CHECK_NE(dm, kNoDRegister);
    949   CHECK_NE(rt, kNoRegister);
    950   CHECK_NE(rt, SP);
    951   CHECK_NE(rt, PC);
    952   CHECK_NE(rt2, kNoRegister);
    953   CHECK_NE(rt2, SP);
    954   CHECK_NE(rt2, PC);
    955   CHECK_NE(cond, kNoCondition);
    956   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    957                      B27 | B26 | B22 |
    958                      (static_cast<int32_t>(rt2)*B16) |
    959                      (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
    960                      ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
    961                      (static_cast<int32_t>(dm) & 0xf);
    962   Emit(encoding);
    963 }
    964 
    965 
    966 void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
    967                              Condition cond) {
    968   CHECK_NE(dm, kNoDRegister);
    969   CHECK_NE(rt, kNoRegister);
    970   CHECK_NE(rt, SP);
    971   CHECK_NE(rt, PC);
    972   CHECK_NE(rt2, kNoRegister);
    973   CHECK_NE(rt2, SP);
    974   CHECK_NE(rt2, PC);
    975   CHECK_NE(rt, rt2);
    976   CHECK_NE(cond, kNoCondition);
    977   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    978                      B27 | B26 | B22 | B20 |
    979                      (static_cast<int32_t>(rt2)*B16) |
    980                      (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
    981                      ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
    982                      (static_cast<int32_t>(dm) & 0xf);
    983   Emit(encoding);
    984 }
    985 
    986 
    987 void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
    988   const Address& addr = static_cast<const Address&>(ad);
    989   CHECK_NE(sd, kNoSRegister);
    990   CHECK_NE(cond, kNoCondition);
    991   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
    992                      B27 | B26 | B24 | B20 |
    993                      ((static_cast<int32_t>(sd) & 1)*B22) |
    994                      ((static_cast<int32_t>(sd) >> 1)*B12) |
    995                      B11 | B9 | addr.vencoding();
    996   Emit(encoding);
    997 }
    998 
    999 
   1000 void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
   1001   const Address& addr = static_cast<const Address&>(ad);
   1002   CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
   1003   CHECK_NE(sd, kNoSRegister);
   1004   CHECK_NE(cond, kNoCondition);
   1005   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1006                      B27 | B26 | B24 |
   1007                      ((static_cast<int32_t>(sd) & 1)*B22) |
   1008                      ((static_cast<int32_t>(sd) >> 1)*B12) |
   1009                      B11 | B9 | addr.vencoding();
   1010   Emit(encoding);
   1011 }
   1012 
   1013 
   1014 void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
   1015   const Address& addr = static_cast<const Address&>(ad);
   1016   CHECK_NE(dd, kNoDRegister);
   1017   CHECK_NE(cond, kNoCondition);
   1018   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1019                      B27 | B26 | B24 | B20 |
   1020                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1021                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1022                      B11 | B9 | B8 | addr.vencoding();
   1023   Emit(encoding);
   1024 }
   1025 
   1026 
   1027 void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
   1028   const Address& addr = static_cast<const Address&>(ad);
   1029   CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
   1030   CHECK_NE(dd, kNoDRegister);
   1031   CHECK_NE(cond, kNoCondition);
   1032   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1033                      B27 | B26 | B24 |
   1034                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1035                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1036                      B11 | B9 | B8 | addr.vencoding();
   1037   Emit(encoding);
   1038 }
   1039 
   1040 
   1041 void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
   1042   EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
   1043 }
   1044 
   1045 
   1046 void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
   1047   EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
   1048 }
   1049 
   1050 
   1051 void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) {
   1052   EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
   1053 }
   1054 
   1055 
   1056 void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
   1057   EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
   1058 }
   1059 
   1060 
   1061 void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
   1062   CHECK_NE(cond, kNoCondition);
   1063   CHECK_GT(nregs, 0);
   1064   uint32_t D;
   1065   uint32_t Vd;
   1066   if (dbl) {
   1067     // Encoded as D:Vd.
   1068     D = (reg >> 4) & 1;
   1069     Vd = reg & 15U /* 0b1111 */;
   1070   } else {
   1071     // Encoded as Vd:D.
   1072     D = reg & 1;
   1073     Vd = (reg >> 1) & 15U /* 0b1111 */;
   1074   }
   1075   int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
   1076                     B11 | B9 |
   1077         (dbl ? B8 : 0) |
   1078         (push ? B24 : (B23 | B20)) |
   1079         static_cast<int32_t>(cond) << kConditionShift |
   1080         nregs << (dbl ? 1 : 0) |
   1081         D << 22 |
   1082         Vd << 12;
   1083   Emit(encoding);
   1084 }
   1085 
   1086 
   1087 void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode,
   1088                                 SRegister sd, SRegister sn, SRegister sm) {
   1089   CHECK_NE(sd, kNoSRegister);
   1090   CHECK_NE(sn, kNoSRegister);
   1091   CHECK_NE(sm, kNoSRegister);
   1092   CHECK_NE(cond, kNoCondition);
   1093   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1094                      B27 | B26 | B25 | B11 | B9 | opcode |
   1095                      ((static_cast<int32_t>(sd) & 1)*B22) |
   1096                      ((static_cast<int32_t>(sn) >> 1)*B16) |
   1097                      ((static_cast<int32_t>(sd) >> 1)*B12) |
   1098                      ((static_cast<int32_t>(sn) & 1)*B7) |
   1099                      ((static_cast<int32_t>(sm) & 1)*B5) |
   1100                      (static_cast<int32_t>(sm) >> 1);
   1101   Emit(encoding);
   1102 }
   1103 
   1104 
   1105 void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode,
   1106                                 DRegister dd, DRegister dn, DRegister dm) {
   1107   CHECK_NE(dd, kNoDRegister);
   1108   CHECK_NE(dn, kNoDRegister);
   1109   CHECK_NE(dm, kNoDRegister);
   1110   CHECK_NE(cond, kNoCondition);
   1111   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1112                      B27 | B26 | B25 | B11 | B9 | B8 | opcode |
   1113                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1114                      ((static_cast<int32_t>(dn) & 0xf)*B16) |
   1115                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1116                      ((static_cast<int32_t>(dn) >> 4)*B7) |
   1117                      ((static_cast<int32_t>(dm) >> 4)*B5) |
   1118                      (static_cast<int32_t>(dm) & 0xf);
   1119   Emit(encoding);
   1120 }
   1121 
   1122 
   1123 void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode,
   1124                                SRegister sd, DRegister dm) {
   1125   CHECK_NE(sd, kNoSRegister);
   1126   CHECK_NE(dm, kNoDRegister);
   1127   CHECK_NE(cond, kNoCondition);
   1128   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1129                      B27 | B26 | B25 | B11 | B9 | opcode |
   1130                      ((static_cast<int32_t>(sd) & 1)*B22) |
   1131                      ((static_cast<int32_t>(sd) >> 1)*B12) |
   1132                      ((static_cast<int32_t>(dm) >> 4)*B5) |
   1133                      (static_cast<int32_t>(dm) & 0xf);
   1134   Emit(encoding);
   1135 }
   1136 
   1137 
   1138 void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode,
   1139                              DRegister dd, SRegister sm) {
   1140   CHECK_NE(dd, kNoDRegister);
   1141   CHECK_NE(sm, kNoSRegister);
   1142   CHECK_NE(cond, kNoCondition);
   1143   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1144                      B27 | B26 | B25 | B11 | B9 | opcode |
   1145                      ((static_cast<int32_t>(dd) >> 4)*B22) |
   1146                      ((static_cast<int32_t>(dd) & 0xf)*B12) |
   1147                      ((static_cast<int32_t>(sm) & 1)*B5) |
   1148                      (static_cast<int32_t>(sm) >> 1);
   1149   Emit(encoding);
   1150 }
   1151 
   1152 
   1153 void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
   1154                          bool setcc, Condition cond) {
   1155   CHECK_LE(shift_imm, 31u);
   1156   if (setcc) {
   1157     movs(rd, ShifterOperand(rm, LSL, shift_imm), cond);
   1158   } else {
   1159     mov(rd, ShifterOperand(rm, LSL, shift_imm), cond);
   1160   }
   1161 }
   1162 
   1163 
   1164 void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
   1165                          bool setcc, Condition cond) {
   1166   CHECK(1u <= shift_imm && shift_imm <= 32u);
   1167   if (shift_imm == 32) shift_imm = 0;  // Comply to UAL syntax.
   1168   if (setcc) {
   1169     movs(rd, ShifterOperand(rm, LSR, shift_imm), cond);
   1170   } else {
   1171     mov(rd, ShifterOperand(rm, LSR, shift_imm), cond);
   1172   }
   1173 }
   1174 
   1175 
   1176 void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
   1177                          bool setcc, Condition cond) {
   1178   CHECK(1u <= shift_imm && shift_imm <= 32u);
   1179   if (shift_imm == 32) shift_imm = 0;  // Comply to UAL syntax.
   1180   if (setcc) {
   1181     movs(rd, ShifterOperand(rm, ASR, shift_imm), cond);
   1182   } else {
   1183     mov(rd, ShifterOperand(rm, ASR, shift_imm), cond);
   1184   }
   1185 }
   1186 
   1187 
   1188 void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
   1189                          bool setcc, Condition cond) {
   1190   CHECK(1u <= shift_imm && shift_imm <= 31u);
   1191   if (setcc) {
   1192     movs(rd, ShifterOperand(rm, ROR, shift_imm), cond);
   1193   } else {
   1194     mov(rd, ShifterOperand(rm, ROR, shift_imm), cond);
   1195   }
   1196 }
   1197 
   1198 void Arm32Assembler::Rrx(Register rd, Register rm, bool setcc, Condition cond) {
   1199   if (setcc) {
   1200     movs(rd, ShifterOperand(rm, ROR, 0), cond);
   1201   } else {
   1202     mov(rd, ShifterOperand(rm, ROR, 0), cond);
   1203   }
   1204 }
   1205 
   1206 
   1207 void Arm32Assembler::Lsl(Register rd, Register rm, Register rn,
   1208                          bool setcc, Condition cond) {
   1209   if (setcc) {
   1210     movs(rd, ShifterOperand(rm, LSL, rn), cond);
   1211   } else {
   1212     mov(rd, ShifterOperand(rm, LSL, rn), cond);
   1213   }
   1214 }
   1215 
   1216 
   1217 void Arm32Assembler::Lsr(Register rd, Register rm, Register rn,
   1218                          bool setcc, Condition cond) {
   1219   if (setcc) {
   1220     movs(rd, ShifterOperand(rm, LSR, rn), cond);
   1221   } else {
   1222     mov(rd, ShifterOperand(rm, LSR, rn), cond);
   1223   }
   1224 }
   1225 
   1226 
   1227 void Arm32Assembler::Asr(Register rd, Register rm, Register rn,
   1228                          bool setcc, Condition cond) {
   1229   if (setcc) {
   1230     movs(rd, ShifterOperand(rm, ASR, rn), cond);
   1231   } else {
   1232     mov(rd, ShifterOperand(rm, ASR, rn), cond);
   1233   }
   1234 }
   1235 
   1236 
   1237 void Arm32Assembler::Ror(Register rd, Register rm, Register rn,
   1238                          bool setcc, Condition cond) {
   1239   if (setcc) {
   1240     movs(rd, ShifterOperand(rm, ROR, rn), cond);
   1241   } else {
   1242     mov(rd, ShifterOperand(rm, ROR, rn), cond);
   1243   }
   1244 }
   1245 
   1246 void Arm32Assembler::vmstat(Condition cond) {  // VMRS APSR_nzcv, FPSCR
   1247   CHECK_NE(cond, kNoCondition);
   1248   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1249       B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
   1250       (static_cast<int32_t>(PC)*B12) |
   1251       B11 | B9 | B4;
   1252   Emit(encoding);
   1253 }
   1254 
   1255 
   1256 void Arm32Assembler::svc(uint32_t imm24) {
   1257   CHECK(IsUint<24>(imm24)) << imm24;
   1258   int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
   1259   Emit(encoding);
   1260 }
   1261 
   1262 
   1263 void Arm32Assembler::bkpt(uint16_t imm16) {
   1264   int32_t encoding = (AL << kConditionShift) | B24 | B21 |
   1265                      ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
   1266   Emit(encoding);
   1267 }
   1268 
   1269 
   1270 void Arm32Assembler::blx(Register rm, Condition cond) {
   1271   CHECK_NE(rm, kNoRegister);
   1272   CHECK_NE(cond, kNoCondition);
   1273   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1274                      B24 | B21 | (0xfff << 8) | B5 | B4 |
   1275                      (static_cast<int32_t>(rm) << kRmShift);
   1276   Emit(encoding);
   1277 }
   1278 
   1279 
   1280 void Arm32Assembler::bx(Register rm, Condition cond) {
   1281   CHECK_NE(rm, kNoRegister);
   1282   CHECK_NE(cond, kNoCondition);
   1283   int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
   1284                      B24 | B21 | (0xfff << 8) | B4 |
   1285                      (static_cast<int32_t>(rm) << kRmShift);
   1286   Emit(encoding);
   1287 }
   1288 
   1289 
   1290 void Arm32Assembler::Push(Register rd, Condition cond) {
   1291   str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
   1292 }
   1293 
   1294 
   1295 void Arm32Assembler::Pop(Register rd, Condition cond) {
   1296   ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
   1297 }
   1298 
   1299 
   1300 void Arm32Assembler::PushList(RegList regs, Condition cond) {
   1301   stm(DB_W, SP, regs, cond);
   1302 }
   1303 
   1304 
   1305 void Arm32Assembler::PopList(RegList regs, Condition cond) {
   1306   ldm(IA_W, SP, regs, cond);
   1307 }
   1308 
   1309 
   1310 void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) {
   1311   if (rd != rm) {
   1312     mov(rd, ShifterOperand(rm), cond);
   1313   }
   1314 }
   1315 
   1316 
   1317 void Arm32Assembler::Bind(Label* label) {
   1318   CHECK(!label->IsBound());
   1319   int bound_pc = buffer_.Size();
   1320   while (label->IsLinked()) {
   1321     int32_t position = label->Position();
   1322     int32_t next = buffer_.Load<int32_t>(position);
   1323     int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next);
   1324     buffer_.Store<int32_t>(position, encoded);
   1325     label->position_ = Arm32Assembler::DecodeBranchOffset(next);
   1326   }
   1327   label->BindTo(bound_pc);
   1328 }
   1329 
   1330 
   1331 int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) {
   1332   // The offset is off by 8 due to the way the ARM CPUs read PC.
   1333   offset -= 8;
   1334   CHECK_ALIGNED(offset, 4);
   1335   CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset;
   1336 
   1337   // Properly preserve only the bits supported in the instruction.
   1338   offset >>= 2;
   1339   offset &= kBranchOffsetMask;
   1340   return (inst & ~kBranchOffsetMask) | offset;
   1341 }
   1342 
   1343 
   1344 int Arm32Assembler::DecodeBranchOffset(int32_t inst) {
   1345   // Sign-extend, left-shift by 2, then add 8.
   1346   return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
   1347 }
   1348 
   1349 
   1350 void Arm32Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
   1351   AddConstant(rd, rd, value, cond);
   1352 }
   1353 
   1354 
   1355 void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value,
   1356                                  Condition cond) {
   1357   if (value == 0) {
   1358     if (rd != rn) {
   1359       mov(rd, ShifterOperand(rn), cond);
   1360     }
   1361     return;
   1362   }
   1363   // We prefer to select the shorter code sequence rather than selecting add for
   1364   // positive values and sub for negatives ones, which would slightly improve
   1365   // the readability of generated code for some constants.
   1366   ShifterOperand shifter_op;
   1367   if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
   1368     add(rd, rn, shifter_op, cond);
   1369   } else if (ShifterOperandCanHoldArm32(-value, &shifter_op)) {
   1370     sub(rd, rn, shifter_op, cond);
   1371   } else {
   1372     CHECK(rn != IP);
   1373     if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
   1374       mvn(IP, shifter_op, cond);
   1375       add(rd, rn, ShifterOperand(IP), cond);
   1376     } else if (ShifterOperandCanHoldArm32(~(-value), &shifter_op)) {
   1377       mvn(IP, shifter_op, cond);
   1378       sub(rd, rn, ShifterOperand(IP), cond);
   1379     } else {
   1380       movw(IP, Low16Bits(value), cond);
   1381       uint16_t value_high = High16Bits(value);
   1382       if (value_high != 0) {
   1383         movt(IP, value_high, cond);
   1384       }
   1385       add(rd, rn, ShifterOperand(IP), cond);
   1386     }
   1387   }
   1388 }
   1389 
   1390 
   1391 void Arm32Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
   1392                                          Condition cond) {
   1393   ShifterOperand shifter_op;
   1394   if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
   1395     adds(rd, rn, shifter_op, cond);
   1396   } else if (ShifterOperandCanHoldArm32(-value, &shifter_op)) {
   1397     subs(rd, rn, shifter_op, cond);
   1398   } else {
   1399     CHECK(rn != IP);
   1400     if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
   1401       mvn(IP, shifter_op, cond);
   1402       adds(rd, rn, ShifterOperand(IP), cond);
   1403     } else if (ShifterOperandCanHoldArm32(~(-value), &shifter_op)) {
   1404       mvn(IP, shifter_op, cond);
   1405       subs(rd, rn, ShifterOperand(IP), cond);
   1406     } else {
   1407       movw(IP, Low16Bits(value), cond);
   1408       uint16_t value_high = High16Bits(value);
   1409       if (value_high != 0) {
   1410         movt(IP, value_high, cond);
   1411       }
   1412       adds(rd, rn, ShifterOperand(IP), cond);
   1413     }
   1414   }
   1415 }
   1416 
   1417 void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
   1418   ShifterOperand shifter_op;
   1419   if (ShifterOperandCanHoldArm32(value, &shifter_op)) {
   1420     mov(rd, shifter_op, cond);
   1421   } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) {
   1422     mvn(rd, shifter_op, cond);
   1423   } else {
   1424     movw(rd, Low16Bits(value), cond);
   1425     uint16_t value_high = High16Bits(value);
   1426     if (value_high != 0) {
   1427       movt(rd, value_high, cond);
   1428     }
   1429   }
   1430 }
   1431 
   1432 
   1433 // Implementation note: this method must emit at most one instruction when
   1434 // Address::CanHoldLoadOffsetArm.
   1435 void Arm32Assembler::LoadFromOffset(LoadOperandType type,
   1436                                     Register reg,
   1437                                     Register base,
   1438                                     int32_t offset,
   1439                                     Condition cond) {
   1440   if (!Address::CanHoldLoadOffsetArm(type, offset)) {
   1441     CHECK(base != IP);
   1442     LoadImmediate(IP, offset, cond);
   1443     add(IP, IP, ShifterOperand(base), cond);
   1444     base = IP;
   1445     offset = 0;
   1446   }
   1447   CHECK(Address::CanHoldLoadOffsetArm(type, offset));
   1448   switch (type) {
   1449     case kLoadSignedByte:
   1450       ldrsb(reg, Address(base, offset), cond);
   1451       break;
   1452     case kLoadUnsignedByte:
   1453       ldrb(reg, Address(base, offset), cond);
   1454       break;
   1455     case kLoadSignedHalfword:
   1456       ldrsh(reg, Address(base, offset), cond);
   1457       break;
   1458     case kLoadUnsignedHalfword:
   1459       ldrh(reg, Address(base, offset), cond);
   1460       break;
   1461     case kLoadWord:
   1462       ldr(reg, Address(base, offset), cond);
   1463       break;
   1464     case kLoadWordPair:
   1465       ldrd(reg, Address(base, offset), cond);
   1466       break;
   1467     default:
   1468       LOG(FATAL) << "UNREACHABLE";
   1469       UNREACHABLE();
   1470   }
   1471 }
   1472 
   1473 
   1474 // Implementation note: this method must emit at most one instruction when
   1475 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
   1476 void Arm32Assembler::LoadSFromOffset(SRegister reg,
   1477                                      Register base,
   1478                                      int32_t offset,
   1479                                      Condition cond) {
   1480   if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) {
   1481     CHECK_NE(base, IP);
   1482     LoadImmediate(IP, offset, cond);
   1483     add(IP, IP, ShifterOperand(base), cond);
   1484     base = IP;
   1485     offset = 0;
   1486   }
   1487   CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset));
   1488   vldrs(reg, Address(base, offset), cond);
   1489 }
   1490 
   1491 
   1492 // Implementation note: this method must emit at most one instruction when
   1493 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset.
   1494 void Arm32Assembler::LoadDFromOffset(DRegister reg,
   1495                                      Register base,
   1496                                      int32_t offset,
   1497                                      Condition cond) {
   1498   if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) {
   1499     CHECK_NE(base, IP);
   1500     LoadImmediate(IP, offset, cond);
   1501     add(IP, IP, ShifterOperand(base), cond);
   1502     base = IP;
   1503     offset = 0;
   1504   }
   1505   CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset));
   1506   vldrd(reg, Address(base, offset), cond);
   1507 }
   1508 
   1509 
   1510 // Implementation note: this method must emit at most one instruction when
   1511 // Address::CanHoldStoreOffsetArm.
   1512 void Arm32Assembler::StoreToOffset(StoreOperandType type,
   1513                                    Register reg,
   1514                                    Register base,
   1515                                    int32_t offset,
   1516                                    Condition cond) {
   1517   if (!Address::CanHoldStoreOffsetArm(type, offset)) {
   1518     CHECK(reg != IP);
   1519     CHECK(base != IP);
   1520     LoadImmediate(IP, offset, cond);
   1521     add(IP, IP, ShifterOperand(base), cond);
   1522     base = IP;
   1523     offset = 0;
   1524   }
   1525   CHECK(Address::CanHoldStoreOffsetArm(type, offset));
   1526   switch (type) {
   1527     case kStoreByte:
   1528       strb(reg, Address(base, offset), cond);
   1529       break;
   1530     case kStoreHalfword:
   1531       strh(reg, Address(base, offset), cond);
   1532       break;
   1533     case kStoreWord:
   1534       str(reg, Address(base, offset), cond);
   1535       break;
   1536     case kStoreWordPair:
   1537       strd(reg, Address(base, offset), cond);
   1538       break;
   1539     default:
   1540       LOG(FATAL) << "UNREACHABLE";
   1541       UNREACHABLE();
   1542   }
   1543 }
   1544 
   1545 
   1546 // Implementation note: this method must emit at most one instruction when
   1547 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset.
   1548 void Arm32Assembler::StoreSToOffset(SRegister reg,
   1549                                     Register base,
   1550                                     int32_t offset,
   1551                                     Condition cond) {
   1552   if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) {
   1553     CHECK_NE(base, IP);
   1554     LoadImmediate(IP, offset, cond);
   1555     add(IP, IP, ShifterOperand(base), cond);
   1556     base = IP;
   1557     offset = 0;
   1558   }
   1559   CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset));
   1560   vstrs(reg, Address(base, offset), cond);
   1561 }
   1562 
   1563 
   1564 // Implementation note: this method must emit at most one instruction when
   1565 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset.
   1566 void Arm32Assembler::StoreDToOffset(DRegister reg,
   1567                                     Register base,
   1568                                     int32_t offset,
   1569                                     Condition cond) {
   1570   if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) {
   1571     CHECK_NE(base, IP);
   1572     LoadImmediate(IP, offset, cond);
   1573     add(IP, IP, ShifterOperand(base), cond);
   1574     base = IP;
   1575     offset = 0;
   1576   }
   1577   CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset));
   1578   vstrd(reg, Address(base, offset), cond);
   1579 }
   1580 
   1581 
   1582 void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) {
   1583   CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
   1584   dmb(SY);
   1585 }
   1586 
   1587 
   1588 void Arm32Assembler::dmb(DmbOptions flavor) {
   1589   int32_t encoding = 0xf57ff05f;  // dmb
   1590   Emit(encoding | flavor);
   1591 }
   1592 
   1593 
   1594 void Arm32Assembler::cbz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
   1595   LOG(FATAL) << "cbz is not supported on ARM32";
   1596 }
   1597 
   1598 
   1599 void Arm32Assembler::cbnz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) {
   1600   LOG(FATAL) << "cbnz is not supported on ARM32";
   1601 }
   1602 
   1603 
   1604 void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) {
   1605   cmp(r, ShifterOperand(0));
   1606   b(label, EQ);
   1607 }
   1608 
   1609 
   1610 void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
   1611   cmp(r, ShifterOperand(0));
   1612   b(label, NE);
   1613 }
   1614 
   1615 
   1616 }  // namespace arm
   1617 }  // namespace art
   1618