1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "assembler_arm32.h" 18 19 #include "base/bit_utils.h" 20 #include "base/logging.h" 21 #include "entrypoints/quick/quick_entrypoints.h" 22 #include "offsets.h" 23 #include "thread.h" 24 25 namespace art { 26 namespace arm { 27 28 bool Arm32Assembler::ShifterOperandCanHoldArm32(uint32_t immediate, ShifterOperand* shifter_op) { 29 // Avoid the more expensive test for frequent small immediate values. 30 if (immediate < (1 << kImmed8Bits)) { 31 shifter_op->type_ = ShifterOperand::kImmediate; 32 shifter_op->is_rotate_ = true; 33 shifter_op->rotate_ = 0; 34 shifter_op->immed_ = immediate; 35 return true; 36 } 37 // Note that immediate must be unsigned for the test to work correctly. 38 for (int rot = 0; rot < 16; rot++) { 39 uint32_t imm8 = (immediate << 2*rot) | (immediate >> (32 - 2*rot)); 40 if (imm8 < (1 << kImmed8Bits)) { 41 shifter_op->type_ = ShifterOperand::kImmediate; 42 shifter_op->is_rotate_ = true; 43 shifter_op->rotate_ = rot; 44 shifter_op->immed_ = imm8; 45 return true; 46 } 47 } 48 return false; 49 } 50 51 bool Arm32Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) { 52 ShifterOperand shifter_op; 53 return ShifterOperandCanHoldArm32(immediate, &shifter_op); 54 } 55 56 bool Arm32Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED, 57 Register rn ATTRIBUTE_UNUSED, 58 Opcode opcode ATTRIBUTE_UNUSED, 59 uint32_t immediate, 60 SetCc set_cc ATTRIBUTE_UNUSED, 61 ShifterOperand* shifter_op) { 62 return ShifterOperandCanHoldArm32(immediate, shifter_op); 63 } 64 65 void Arm32Assembler::and_(Register rd, Register rn, const ShifterOperand& so, 66 Condition cond, SetCc set_cc) { 67 EmitType01(cond, so.type(), AND, set_cc, rn, rd, so); 68 } 69 70 71 void Arm32Assembler::eor(Register rd, Register rn, const ShifterOperand& so, 72 Condition cond, SetCc set_cc) { 73 EmitType01(cond, so.type(), EOR, set_cc, rn, rd, so); 74 } 75 76 77 void Arm32Assembler::sub(Register rd, Register rn, const ShifterOperand& so, 78 Condition cond, SetCc set_cc) { 79 EmitType01(cond, so.type(), SUB, set_cc, rn, rd, so); 80 } 81 82 void Arm32Assembler::rsb(Register rd, Register rn, const ShifterOperand& so, 83 Condition cond, SetCc set_cc) { 84 EmitType01(cond, so.type(), RSB, set_cc, rn, rd, so); 85 } 86 87 void Arm32Assembler::add(Register rd, Register rn, const ShifterOperand& so, 88 Condition cond, SetCc set_cc) { 89 EmitType01(cond, so.type(), ADD, set_cc, rn, rd, so); 90 } 91 92 93 void Arm32Assembler::adc(Register rd, Register rn, const ShifterOperand& so, 94 Condition cond, SetCc set_cc) { 95 EmitType01(cond, so.type(), ADC, set_cc, rn, rd, so); 96 } 97 98 99 void Arm32Assembler::sbc(Register rd, Register rn, const ShifterOperand& so, 100 Condition cond, SetCc set_cc) { 101 EmitType01(cond, so.type(), SBC, set_cc, rn, rd, so); 102 } 103 104 105 void Arm32Assembler::rsc(Register rd, Register rn, const ShifterOperand& so, 106 Condition cond, SetCc set_cc) { 107 EmitType01(cond, so.type(), RSC, set_cc, rn, rd, so); 108 } 109 110 111 void Arm32Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) { 112 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker. 113 EmitType01(cond, so.type(), TST, kCcSet, rn, R0, so); 114 } 115 116 117 void Arm32Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) { 118 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker. 119 EmitType01(cond, so.type(), TEQ, kCcSet, rn, R0, so); 120 } 121 122 123 void Arm32Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) { 124 EmitType01(cond, so.type(), CMP, kCcSet, rn, R0, so); 125 } 126 127 128 void Arm32Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) { 129 EmitType01(cond, so.type(), CMN, kCcSet, rn, R0, so); 130 } 131 132 133 void Arm32Assembler::orr(Register rd, Register rn, const ShifterOperand& so, 134 Condition cond, SetCc set_cc) { 135 EmitType01(cond, so.type(), ORR, set_cc, rn, rd, so); 136 } 137 138 139 void Arm32Assembler::orn(Register rd ATTRIBUTE_UNUSED, 140 Register rn ATTRIBUTE_UNUSED, 141 const ShifterOperand& so ATTRIBUTE_UNUSED, 142 Condition cond ATTRIBUTE_UNUSED, 143 SetCc set_cc ATTRIBUTE_UNUSED) { 144 LOG(FATAL) << "orn is not supported on ARM32"; 145 } 146 147 148 void Arm32Assembler::mov(Register rd, const ShifterOperand& so, 149 Condition cond, SetCc set_cc) { 150 EmitType01(cond, so.type(), MOV, set_cc, R0, rd, so); 151 } 152 153 154 void Arm32Assembler::bic(Register rd, Register rn, const ShifterOperand& so, 155 Condition cond, SetCc set_cc) { 156 EmitType01(cond, so.type(), BIC, set_cc, rn, rd, so); 157 } 158 159 160 void Arm32Assembler::mvn(Register rd, const ShifterOperand& so, 161 Condition cond, SetCc set_cc) { 162 EmitType01(cond, so.type(), MVN, set_cc, R0, rd, so); 163 } 164 165 166 void Arm32Assembler::mul(Register rd, Register rn, Register rm, Condition cond) { 167 // Assembler registers rd, rn, rm are encoded as rn, rm, rs. 168 EmitMulOp(cond, 0, R0, rd, rn, rm); 169 } 170 171 172 void Arm32Assembler::mla(Register rd, Register rn, Register rm, Register ra, 173 Condition cond) { 174 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd. 175 EmitMulOp(cond, B21, ra, rd, rn, rm); 176 } 177 178 179 void Arm32Assembler::mls(Register rd, Register rn, Register rm, Register ra, 180 Condition cond) { 181 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd. 182 EmitMulOp(cond, B22 | B21, ra, rd, rn, rm); 183 } 184 185 186 void Arm32Assembler::smull(Register rd_lo, Register rd_hi, Register rn, 187 Register rm, Condition cond) { 188 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs. 189 EmitMulOp(cond, B23 | B22, rd_lo, rd_hi, rn, rm); 190 } 191 192 193 void Arm32Assembler::umull(Register rd_lo, Register rd_hi, Register rn, 194 Register rm, Condition cond) { 195 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs. 196 EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm); 197 } 198 199 200 void Arm32Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) { 201 CHECK_NE(rd, kNoRegister); 202 CHECK_NE(rn, kNoRegister); 203 CHECK_NE(rm, kNoRegister); 204 CHECK_NE(cond, kNoCondition); 205 int32_t encoding = B26 | B25 | B24 | B20 | 206 B15 | B14 | B13 | B12 | 207 (static_cast<int32_t>(cond) << kConditionShift) | 208 (static_cast<int32_t>(rn) << 0) | 209 (static_cast<int32_t>(rd) << 16) | 210 (static_cast<int32_t>(rm) << 8) | 211 B4; 212 Emit(encoding); 213 } 214 215 216 void Arm32Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) { 217 CHECK_NE(rd, kNoRegister); 218 CHECK_NE(rn, kNoRegister); 219 CHECK_NE(rm, kNoRegister); 220 CHECK_NE(cond, kNoCondition); 221 int32_t encoding = B26 | B25 | B24 | B21 | B20 | 222 B15 | B14 | B13 | B12 | 223 (static_cast<int32_t>(cond) << kConditionShift) | 224 (static_cast<int32_t>(rn) << 0) | 225 (static_cast<int32_t>(rd) << 16) | 226 (static_cast<int32_t>(rm) << 8) | 227 B4; 228 Emit(encoding); 229 } 230 231 232 void Arm32Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) { 233 CHECK_NE(rd, kNoRegister); 234 CHECK_NE(rn, kNoRegister); 235 CHECK_NE(cond, kNoCondition); 236 CHECK_LE(lsb, 31U); 237 CHECK(1U <= width && width <= 32U) << width; 238 uint32_t widthminus1 = width - 1; 239 240 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 241 B26 | B25 | B24 | B23 | B21 | 242 (widthminus1 << 16) | 243 (static_cast<uint32_t>(rd) << 12) | 244 (lsb << 7) | 245 B6 | B4 | 246 static_cast<uint32_t>(rn); 247 Emit(encoding); 248 } 249 250 251 void Arm32Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) { 252 CHECK_NE(rd, kNoRegister); 253 CHECK_NE(rn, kNoRegister); 254 CHECK_NE(cond, kNoCondition); 255 CHECK_LE(lsb, 31U); 256 CHECK(1U <= width && width <= 32U) << width; 257 uint32_t widthminus1 = width - 1; 258 259 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 260 B26 | B25 | B24 | B23 | B22 | B21 | 261 (widthminus1 << 16) | 262 (static_cast<uint32_t>(rd) << 12) | 263 (lsb << 7) | 264 B6 | B4 | 265 static_cast<uint32_t>(rn); 266 Emit(encoding); 267 } 268 269 270 void Arm32Assembler::ldr(Register rd, const Address& ad, Condition cond) { 271 EmitMemOp(cond, true, false, rd, ad); 272 } 273 274 275 void Arm32Assembler::str(Register rd, const Address& ad, Condition cond) { 276 EmitMemOp(cond, false, false, rd, ad); 277 } 278 279 280 void Arm32Assembler::ldrb(Register rd, const Address& ad, Condition cond) { 281 EmitMemOp(cond, true, true, rd, ad); 282 } 283 284 285 void Arm32Assembler::strb(Register rd, const Address& ad, Condition cond) { 286 EmitMemOp(cond, false, true, rd, ad); 287 } 288 289 290 void Arm32Assembler::ldrh(Register rd, const Address& ad, Condition cond) { 291 EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad); 292 } 293 294 295 void Arm32Assembler::strh(Register rd, const Address& ad, Condition cond) { 296 EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad); 297 } 298 299 300 void Arm32Assembler::ldrsb(Register rd, const Address& ad, Condition cond) { 301 EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad); 302 } 303 304 305 void Arm32Assembler::ldrsh(Register rd, const Address& ad, Condition cond) { 306 EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad); 307 } 308 309 310 void Arm32Assembler::ldrd(Register rd, const Address& ad, Condition cond) { 311 CHECK_EQ(rd % 2, 0); 312 EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad); 313 } 314 315 316 void Arm32Assembler::strd(Register rd, const Address& ad, Condition cond) { 317 CHECK_EQ(rd % 2, 0); 318 EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad); 319 } 320 321 322 void Arm32Assembler::ldm(BlockAddressMode am, 323 Register base, 324 RegList regs, 325 Condition cond) { 326 EmitMultiMemOp(cond, am, true, base, regs); 327 } 328 329 330 void Arm32Assembler::stm(BlockAddressMode am, 331 Register base, 332 RegList regs, 333 Condition cond) { 334 EmitMultiMemOp(cond, am, false, base, regs); 335 } 336 337 338 void Arm32Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) { 339 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm); 340 } 341 342 343 void Arm32Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) { 344 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm); 345 } 346 347 348 bool Arm32Assembler::vmovs(SRegister sd, float s_imm, Condition cond) { 349 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm); 350 if (((imm32 & ((1 << 19) - 1)) == 0) && 351 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) || 352 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) { 353 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) | 354 ((imm32 >> 19) & ((1 << 6) -1)); 355 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf), 356 sd, S0, S0); 357 return true; 358 } 359 return false; 360 } 361 362 363 bool Arm32Assembler::vmovd(DRegister dd, double d_imm, Condition cond) { 364 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm); 365 if (((imm64 & ((1LL << 48) - 1)) == 0) && 366 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) || 367 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) { 368 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) | 369 ((imm64 >> 48) & ((1 << 6) -1)); 370 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf), 371 dd, D0, D0); 372 return true; 373 } 374 return false; 375 } 376 377 378 void Arm32Assembler::vadds(SRegister sd, SRegister sn, SRegister sm, 379 Condition cond) { 380 EmitVFPsss(cond, B21 | B20, sd, sn, sm); 381 } 382 383 384 void Arm32Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm, 385 Condition cond) { 386 EmitVFPddd(cond, B21 | B20, dd, dn, dm); 387 } 388 389 390 void Arm32Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm, 391 Condition cond) { 392 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm); 393 } 394 395 396 void Arm32Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm, 397 Condition cond) { 398 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm); 399 } 400 401 402 void Arm32Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm, 403 Condition cond) { 404 EmitVFPsss(cond, B21, sd, sn, sm); 405 } 406 407 408 void Arm32Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm, 409 Condition cond) { 410 EmitVFPddd(cond, B21, dd, dn, dm); 411 } 412 413 414 void Arm32Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm, 415 Condition cond) { 416 EmitVFPsss(cond, 0, sd, sn, sm); 417 } 418 419 420 void Arm32Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm, 421 Condition cond) { 422 EmitVFPddd(cond, 0, dd, dn, dm); 423 } 424 425 426 void Arm32Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm, 427 Condition cond) { 428 EmitVFPsss(cond, B6, sd, sn, sm); 429 } 430 431 432 void Arm32Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm, 433 Condition cond) { 434 EmitVFPddd(cond, B6, dd, dn, dm); 435 } 436 437 438 void Arm32Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm, 439 Condition cond) { 440 EmitVFPsss(cond, B23, sd, sn, sm); 441 } 442 443 444 void Arm32Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm, 445 Condition cond) { 446 EmitVFPddd(cond, B23, dd, dn, dm); 447 } 448 449 450 void Arm32Assembler::vabss(SRegister sd, SRegister sm, Condition cond) { 451 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm); 452 } 453 454 455 void Arm32Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) { 456 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm); 457 } 458 459 460 void Arm32Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) { 461 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm); 462 } 463 464 465 void Arm32Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) { 466 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm); 467 } 468 469 470 void Arm32Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) { 471 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm); 472 } 473 474 void Arm32Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) { 475 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm); 476 } 477 478 479 void Arm32Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) { 480 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm); 481 } 482 483 484 void Arm32Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) { 485 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm); 486 } 487 488 489 void Arm32Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) { 490 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm); 491 } 492 493 494 void Arm32Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) { 495 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm); 496 } 497 498 499 void Arm32Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) { 500 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm); 501 } 502 503 504 void Arm32Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) { 505 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm); 506 } 507 508 509 void Arm32Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) { 510 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm); 511 } 512 513 514 void Arm32Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) { 515 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm); 516 } 517 518 519 void Arm32Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) { 520 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm); 521 } 522 523 524 void Arm32Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) { 525 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm); 526 } 527 528 529 void Arm32Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) { 530 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm); 531 } 532 533 534 void Arm32Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) { 535 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm); 536 } 537 538 539 void Arm32Assembler::vcmpsz(SRegister sd, Condition cond) { 540 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0); 541 } 542 543 544 void Arm32Assembler::vcmpdz(DRegister dd, Condition cond) { 545 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0); 546 } 547 548 void Arm32Assembler::b(Label* label, Condition cond) { 549 EmitBranch(cond, label, false); 550 } 551 552 553 void Arm32Assembler::bl(Label* label, Condition cond) { 554 EmitBranch(cond, label, true); 555 } 556 557 558 void Arm32Assembler::MarkExceptionHandler(Label* label) { 559 EmitType01(AL, 1, TST, kCcSet, PC, R0, ShifterOperand(0)); 560 Label l; 561 b(&l); 562 EmitBranch(AL, label, false); 563 Bind(&l); 564 } 565 566 567 void Arm32Assembler::Emit(int32_t value) { 568 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 569 buffer_.Emit<int32_t>(value); 570 } 571 572 573 void Arm32Assembler::EmitType01(Condition cond, 574 int type, 575 Opcode opcode, 576 SetCc set_cc, 577 Register rn, 578 Register rd, 579 const ShifterOperand& so) { 580 CHECK_NE(rd, kNoRegister); 581 CHECK_NE(cond, kNoCondition); 582 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 583 type << kTypeShift | 584 static_cast<int32_t>(opcode) << kOpcodeShift | 585 (set_cc == kCcSet ? 1 : 0) << kSShift | 586 static_cast<int32_t>(rn) << kRnShift | 587 static_cast<int32_t>(rd) << kRdShift | 588 so.encodingArm(); 589 Emit(encoding); 590 } 591 592 593 void Arm32Assembler::EmitType5(Condition cond, int offset, bool link) { 594 CHECK_NE(cond, kNoCondition); 595 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 596 5 << kTypeShift | 597 (link ? 1 : 0) << kLinkShift; 598 Emit(Arm32Assembler::EncodeBranchOffset(offset, encoding)); 599 } 600 601 602 void Arm32Assembler::EmitMemOp(Condition cond, 603 bool load, 604 bool byte, 605 Register rd, 606 const Address& ad) { 607 CHECK_NE(rd, kNoRegister); 608 CHECK_NE(cond, kNoCondition); 609 const Address& addr = static_cast<const Address&>(ad); 610 611 int32_t encoding = 0; 612 if (!ad.IsImmediate() && ad.GetRegisterOffset() == PC) { 613 // PC relative LDR(literal) 614 int32_t offset = ad.GetOffset(); 615 int32_t u = B23; 616 if (offset < 0) { 617 offset = -offset; 618 u = 0; 619 } 620 CHECK_LT(offset, (1 << 12)); 621 encoding = (static_cast<int32_t>(cond) << kConditionShift) | 622 B26 | B24 | u | B20 | 623 (load ? L : 0) | 624 (byte ? B : 0) | 625 (static_cast<int32_t>(rd) << kRdShift) | 626 0xf << 16 | 627 (offset & 0xfff); 628 629 } else { 630 encoding = (static_cast<int32_t>(cond) << kConditionShift) | 631 B26 | 632 (load ? L : 0) | 633 (byte ? B : 0) | 634 (static_cast<int32_t>(rd) << kRdShift) | 635 addr.encodingArm(); 636 } 637 Emit(encoding); 638 } 639 640 641 void Arm32Assembler::EmitMemOpAddressMode3(Condition cond, 642 int32_t mode, 643 Register rd, 644 const Address& ad) { 645 CHECK_NE(rd, kNoRegister); 646 CHECK_NE(cond, kNoCondition); 647 const Address& addr = static_cast<const Address&>(ad); 648 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 649 B22 | 650 mode | 651 (static_cast<int32_t>(rd) << kRdShift) | 652 addr.encoding3(); 653 Emit(encoding); 654 } 655 656 657 void Arm32Assembler::EmitMultiMemOp(Condition cond, 658 BlockAddressMode am, 659 bool load, 660 Register base, 661 RegList regs) { 662 CHECK_NE(base, kNoRegister); 663 CHECK_NE(cond, kNoCondition); 664 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 665 B27 | 666 am | 667 (load ? L : 0) | 668 (static_cast<int32_t>(base) << kRnShift) | 669 regs; 670 Emit(encoding); 671 } 672 673 674 void Arm32Assembler::EmitShiftImmediate(Condition cond, 675 Shift opcode, 676 Register rd, 677 Register rm, 678 const ShifterOperand& so) { 679 CHECK_NE(cond, kNoCondition); 680 CHECK(so.IsImmediate()); 681 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 682 static_cast<int32_t>(MOV) << kOpcodeShift | 683 static_cast<int32_t>(rd) << kRdShift | 684 so.encodingArm() << kShiftImmShift | 685 static_cast<int32_t>(opcode) << kShiftShift | 686 static_cast<int32_t>(rm); 687 Emit(encoding); 688 } 689 690 691 void Arm32Assembler::EmitShiftRegister(Condition cond, 692 Shift opcode, 693 Register rd, 694 Register rm, 695 const ShifterOperand& so) { 696 CHECK_NE(cond, kNoCondition); 697 CHECK(so.IsRegister()); 698 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 699 static_cast<int32_t>(MOV) << kOpcodeShift | 700 static_cast<int32_t>(rd) << kRdShift | 701 so.encodingArm() << kShiftRegisterShift | 702 static_cast<int32_t>(opcode) << kShiftShift | 703 B4 | 704 static_cast<int32_t>(rm); 705 Emit(encoding); 706 } 707 708 709 void Arm32Assembler::EmitBranch(Condition cond, Label* label, bool link) { 710 if (label->IsBound()) { 711 EmitType5(cond, label->Position() - buffer_.Size(), link); 712 } else { 713 int position = buffer_.Size(); 714 // Use the offset field of the branch instruction for linking the sites. 715 EmitType5(cond, label->position_, link); 716 label->LinkTo(position); 717 } 718 } 719 720 721 void Arm32Assembler::clz(Register rd, Register rm, Condition cond) { 722 CHECK_NE(rd, kNoRegister); 723 CHECK_NE(rm, kNoRegister); 724 CHECK_NE(cond, kNoCondition); 725 CHECK_NE(rd, PC); 726 CHECK_NE(rm, PC); 727 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 728 B24 | B22 | B21 | (0xf << 16) | 729 (static_cast<int32_t>(rd) << kRdShift) | 730 (0xf << 8) | B4 | static_cast<int32_t>(rm); 731 Emit(encoding); 732 } 733 734 735 void Arm32Assembler::movw(Register rd, uint16_t imm16, Condition cond) { 736 CHECK_NE(cond, kNoCondition); 737 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 738 B25 | B24 | ((imm16 >> 12) << 16) | 739 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff); 740 Emit(encoding); 741 } 742 743 744 void Arm32Assembler::movt(Register rd, uint16_t imm16, Condition cond) { 745 CHECK_NE(cond, kNoCondition); 746 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift | 747 B25 | B24 | B22 | ((imm16 >> 12) << 16) | 748 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff); 749 Emit(encoding); 750 } 751 752 753 void Arm32Assembler::EmitMiscellaneous(Condition cond, uint8_t op1, 754 uint8_t op2, uint32_t a_part, 755 uint32_t rest) { 756 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 757 B26 | B25 | B23 | 758 (op1 << 20) | 759 (a_part << 16) | 760 (op2 << 5) | 761 B4 | 762 rest; 763 Emit(encoding); 764 } 765 766 767 void Arm32Assembler::EmitReverseBytes(Register rd, Register rm, Condition cond, 768 uint8_t op1, uint8_t op2) { 769 CHECK_NE(rd, kNoRegister); 770 CHECK_NE(rm, kNoRegister); 771 CHECK_NE(cond, kNoCondition); 772 CHECK_NE(rd, PC); 773 CHECK_NE(rm, PC); 774 775 int32_t encoding = (static_cast<int32_t>(rd) << kRdShift) | 776 (0b1111 << 8) | 777 static_cast<int32_t>(rm); 778 EmitMiscellaneous(cond, op1, op2, 0b1111, encoding); 779 } 780 781 782 void Arm32Assembler::rbit(Register rd, Register rm, Condition cond) { 783 CHECK_NE(rd, kNoRegister); 784 CHECK_NE(rm, kNoRegister); 785 CHECK_NE(cond, kNoCondition); 786 CHECK_NE(rd, PC); 787 CHECK_NE(rm, PC); 788 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 789 B26 | B25 | B23 | B22 | B21 | B20 | (0xf << 16) | 790 (static_cast<int32_t>(rd) << kRdShift) | 791 (0xf << 8) | B5 | B4 | static_cast<int32_t>(rm); 792 Emit(encoding); 793 } 794 795 796 void Arm32Assembler::rev(Register rd, Register rm, Condition cond) { 797 EmitReverseBytes(rd, rm, cond, 0b011, 0b001); 798 } 799 800 801 void Arm32Assembler::rev16(Register rd, Register rm, Condition cond) { 802 EmitReverseBytes(rd, rm, cond, 0b011, 0b101); 803 } 804 805 806 void Arm32Assembler::revsh(Register rd, Register rm, Condition cond) { 807 EmitReverseBytes(rd, rm, cond, 0b111, 0b101); 808 } 809 810 811 void Arm32Assembler::EmitMulOp(Condition cond, int32_t opcode, 812 Register rd, Register rn, 813 Register rm, Register rs) { 814 CHECK_NE(rd, kNoRegister); 815 CHECK_NE(rn, kNoRegister); 816 CHECK_NE(rm, kNoRegister); 817 CHECK_NE(rs, kNoRegister); 818 CHECK_NE(cond, kNoCondition); 819 int32_t encoding = opcode | 820 (static_cast<int32_t>(cond) << kConditionShift) | 821 (static_cast<int32_t>(rn) << kRnShift) | 822 (static_cast<int32_t>(rd) << kRdShift) | 823 (static_cast<int32_t>(rs) << kRsShift) | 824 B7 | B4 | 825 (static_cast<int32_t>(rm) << kRmShift); 826 Emit(encoding); 827 } 828 829 830 void Arm32Assembler::ldrex(Register rt, Register rn, Condition cond) { 831 CHECK_NE(rn, kNoRegister); 832 CHECK_NE(rt, kNoRegister); 833 CHECK_NE(cond, kNoCondition); 834 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 835 B24 | 836 B23 | 837 L | 838 (static_cast<int32_t>(rn) << kLdExRnShift) | 839 (static_cast<int32_t>(rt) << kLdExRtShift) | 840 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0; 841 Emit(encoding); 842 } 843 844 845 void Arm32Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) { 846 CHECK_NE(rn, kNoRegister); 847 CHECK_NE(rt, kNoRegister); 848 CHECK_NE(rt2, kNoRegister); 849 CHECK_NE(rt, R14); 850 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2); 851 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2)); 852 CHECK_NE(cond, kNoCondition); 853 854 int32_t encoding = 855 (static_cast<uint32_t>(cond) << kConditionShift) | 856 B24 | B23 | B21 | B20 | 857 static_cast<uint32_t>(rn) << 16 | 858 static_cast<uint32_t>(rt) << 12 | 859 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0; 860 Emit(encoding); 861 } 862 863 864 void Arm32Assembler::strex(Register rd, 865 Register rt, 866 Register rn, 867 Condition cond) { 868 CHECK_NE(rn, kNoRegister); 869 CHECK_NE(rd, kNoRegister); 870 CHECK_NE(rt, kNoRegister); 871 CHECK_NE(cond, kNoCondition); 872 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 873 B24 | 874 B23 | 875 (static_cast<int32_t>(rn) << kStrExRnShift) | 876 (static_cast<int32_t>(rd) << kStrExRdShift) | 877 B11 | B10 | B9 | B8 | B7 | B4 | 878 (static_cast<int32_t>(rt) << kStrExRtShift); 879 Emit(encoding); 880 } 881 882 void Arm32Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) { 883 CHECK_NE(rd, kNoRegister); 884 CHECK_NE(rn, kNoRegister); 885 CHECK_NE(rt, kNoRegister); 886 CHECK_NE(rt2, kNoRegister); 887 CHECK_NE(rt, R14); 888 CHECK_NE(rd, rt); 889 CHECK_NE(rd, rt2); 890 CHECK_EQ(0u, static_cast<uint32_t>(rt) % 2); 891 CHECK_EQ(static_cast<uint32_t>(rt) + 1, static_cast<uint32_t>(rt2)); 892 CHECK_NE(cond, kNoCondition); 893 894 int32_t encoding = 895 (static_cast<uint32_t>(cond) << kConditionShift) | 896 B24 | B23 | B21 | 897 static_cast<uint32_t>(rn) << 16 | 898 static_cast<uint32_t>(rd) << 12 | 899 B11 | B10 | B9 | B8 | B7 | B4 | 900 static_cast<uint32_t>(rt); 901 Emit(encoding); 902 } 903 904 905 void Arm32Assembler::clrex(Condition cond) { 906 CHECK_EQ(cond, AL); // This cannot be conditional on ARM. 907 int32_t encoding = (kSpecialCondition << kConditionShift) | 908 B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf; 909 Emit(encoding); 910 } 911 912 913 void Arm32Assembler::nop(Condition cond) { 914 CHECK_NE(cond, kNoCondition); 915 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 916 B25 | B24 | B21 | (0xf << 12); 917 Emit(encoding); 918 } 919 920 921 void Arm32Assembler::vmovsr(SRegister sn, Register rt, Condition cond) { 922 CHECK_NE(sn, kNoSRegister); 923 CHECK_NE(rt, kNoRegister); 924 CHECK_NE(rt, SP); 925 CHECK_NE(rt, PC); 926 CHECK_NE(cond, kNoCondition); 927 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 928 B27 | B26 | B25 | 929 ((static_cast<int32_t>(sn) >> 1)*B16) | 930 (static_cast<int32_t>(rt)*B12) | B11 | B9 | 931 ((static_cast<int32_t>(sn) & 1)*B7) | B4; 932 Emit(encoding); 933 } 934 935 936 void Arm32Assembler::vmovrs(Register rt, SRegister sn, Condition cond) { 937 CHECK_NE(sn, kNoSRegister); 938 CHECK_NE(rt, kNoRegister); 939 CHECK_NE(rt, SP); 940 CHECK_NE(rt, PC); 941 CHECK_NE(cond, kNoCondition); 942 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 943 B27 | B26 | B25 | B20 | 944 ((static_cast<int32_t>(sn) >> 1)*B16) | 945 (static_cast<int32_t>(rt)*B12) | B11 | B9 | 946 ((static_cast<int32_t>(sn) & 1)*B7) | B4; 947 Emit(encoding); 948 } 949 950 951 void Arm32Assembler::vmovsrr(SRegister sm, Register rt, Register rt2, 952 Condition cond) { 953 CHECK_NE(sm, kNoSRegister); 954 CHECK_NE(sm, S31); 955 CHECK_NE(rt, kNoRegister); 956 CHECK_NE(rt, SP); 957 CHECK_NE(rt, PC); 958 CHECK_NE(rt2, kNoRegister); 959 CHECK_NE(rt2, SP); 960 CHECK_NE(rt2, PC); 961 CHECK_NE(cond, kNoCondition); 962 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 963 B27 | B26 | B22 | 964 (static_cast<int32_t>(rt2)*B16) | 965 (static_cast<int32_t>(rt)*B12) | B11 | B9 | 966 ((static_cast<int32_t>(sm) & 1)*B5) | B4 | 967 (static_cast<int32_t>(sm) >> 1); 968 Emit(encoding); 969 } 970 971 972 void Arm32Assembler::vmovrrs(Register rt, Register rt2, SRegister sm, 973 Condition cond) { 974 CHECK_NE(sm, kNoSRegister); 975 CHECK_NE(sm, S31); 976 CHECK_NE(rt, kNoRegister); 977 CHECK_NE(rt, SP); 978 CHECK_NE(rt, PC); 979 CHECK_NE(rt2, kNoRegister); 980 CHECK_NE(rt2, SP); 981 CHECK_NE(rt2, PC); 982 CHECK_NE(rt, rt2); 983 CHECK_NE(cond, kNoCondition); 984 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 985 B27 | B26 | B22 | B20 | 986 (static_cast<int32_t>(rt2)*B16) | 987 (static_cast<int32_t>(rt)*B12) | B11 | B9 | 988 ((static_cast<int32_t>(sm) & 1)*B5) | B4 | 989 (static_cast<int32_t>(sm) >> 1); 990 Emit(encoding); 991 } 992 993 994 void Arm32Assembler::vmovdrr(DRegister dm, Register rt, Register rt2, 995 Condition cond) { 996 CHECK_NE(dm, kNoDRegister); 997 CHECK_NE(rt, kNoRegister); 998 CHECK_NE(rt, SP); 999 CHECK_NE(rt, PC); 1000 CHECK_NE(rt2, kNoRegister); 1001 CHECK_NE(rt2, SP); 1002 CHECK_NE(rt2, PC); 1003 CHECK_NE(cond, kNoCondition); 1004 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1005 B27 | B26 | B22 | 1006 (static_cast<int32_t>(rt2)*B16) | 1007 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 | 1008 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 | 1009 (static_cast<int32_t>(dm) & 0xf); 1010 Emit(encoding); 1011 } 1012 1013 1014 void Arm32Assembler::vmovrrd(Register rt, Register rt2, DRegister dm, 1015 Condition cond) { 1016 CHECK_NE(dm, kNoDRegister); 1017 CHECK_NE(rt, kNoRegister); 1018 CHECK_NE(rt, SP); 1019 CHECK_NE(rt, PC); 1020 CHECK_NE(rt2, kNoRegister); 1021 CHECK_NE(rt2, SP); 1022 CHECK_NE(rt2, PC); 1023 CHECK_NE(rt, rt2); 1024 CHECK_NE(cond, kNoCondition); 1025 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1026 B27 | B26 | B22 | B20 | 1027 (static_cast<int32_t>(rt2)*B16) | 1028 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 | 1029 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 | 1030 (static_cast<int32_t>(dm) & 0xf); 1031 Emit(encoding); 1032 } 1033 1034 1035 void Arm32Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) { 1036 const Address& addr = static_cast<const Address&>(ad); 1037 CHECK_NE(sd, kNoSRegister); 1038 CHECK_NE(cond, kNoCondition); 1039 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1040 B27 | B26 | B24 | B20 | 1041 ((static_cast<int32_t>(sd) & 1)*B22) | 1042 ((static_cast<int32_t>(sd) >> 1)*B12) | 1043 B11 | B9 | addr.vencoding(); 1044 Emit(encoding); 1045 } 1046 1047 1048 void Arm32Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) { 1049 const Address& addr = static_cast<const Address&>(ad); 1050 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC); 1051 CHECK_NE(sd, kNoSRegister); 1052 CHECK_NE(cond, kNoCondition); 1053 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1054 B27 | B26 | B24 | 1055 ((static_cast<int32_t>(sd) & 1)*B22) | 1056 ((static_cast<int32_t>(sd) >> 1)*B12) | 1057 B11 | B9 | addr.vencoding(); 1058 Emit(encoding); 1059 } 1060 1061 1062 void Arm32Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) { 1063 const Address& addr = static_cast<const Address&>(ad); 1064 CHECK_NE(dd, kNoDRegister); 1065 CHECK_NE(cond, kNoCondition); 1066 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1067 B27 | B26 | B24 | B20 | 1068 ((static_cast<int32_t>(dd) >> 4)*B22) | 1069 ((static_cast<int32_t>(dd) & 0xf)*B12) | 1070 B11 | B9 | B8 | addr.vencoding(); 1071 Emit(encoding); 1072 } 1073 1074 1075 void Arm32Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) { 1076 const Address& addr = static_cast<const Address&>(ad); 1077 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC); 1078 CHECK_NE(dd, kNoDRegister); 1079 CHECK_NE(cond, kNoCondition); 1080 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1081 B27 | B26 | B24 | 1082 ((static_cast<int32_t>(dd) >> 4)*B22) | 1083 ((static_cast<int32_t>(dd) & 0xf)*B12) | 1084 B11 | B9 | B8 | addr.vencoding(); 1085 Emit(encoding); 1086 } 1087 1088 1089 void Arm32Assembler::vpushs(SRegister reg, int nregs, Condition cond) { 1090 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond); 1091 } 1092 1093 1094 void Arm32Assembler::vpushd(DRegister reg, int nregs, Condition cond) { 1095 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond); 1096 } 1097 1098 1099 void Arm32Assembler::vpops(SRegister reg, int nregs, Condition cond) { 1100 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond); 1101 } 1102 1103 1104 void Arm32Assembler::vpopd(DRegister reg, int nregs, Condition cond) { 1105 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond); 1106 } 1107 1108 1109 void Arm32Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) { 1110 CHECK_NE(cond, kNoCondition); 1111 CHECK_GT(nregs, 0); 1112 uint32_t D; 1113 uint32_t Vd; 1114 if (dbl) { 1115 // Encoded as D:Vd. 1116 D = (reg >> 4) & 1; 1117 Vd = reg & 15U /* 0b1111 */; 1118 } else { 1119 // Encoded as Vd:D. 1120 D = reg & 1; 1121 Vd = (reg >> 1) & 15U /* 0b1111 */; 1122 } 1123 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 | 1124 B11 | B9 | 1125 (dbl ? B8 : 0) | 1126 (push ? B24 : (B23 | B20)) | 1127 static_cast<int32_t>(cond) << kConditionShift | 1128 nregs << (dbl ? 1 : 0) | 1129 D << 22 | 1130 Vd << 12; 1131 Emit(encoding); 1132 } 1133 1134 1135 void Arm32Assembler::EmitVFPsss(Condition cond, int32_t opcode, 1136 SRegister sd, SRegister sn, SRegister sm) { 1137 CHECK_NE(sd, kNoSRegister); 1138 CHECK_NE(sn, kNoSRegister); 1139 CHECK_NE(sm, kNoSRegister); 1140 CHECK_NE(cond, kNoCondition); 1141 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1142 B27 | B26 | B25 | B11 | B9 | opcode | 1143 ((static_cast<int32_t>(sd) & 1)*B22) | 1144 ((static_cast<int32_t>(sn) >> 1)*B16) | 1145 ((static_cast<int32_t>(sd) >> 1)*B12) | 1146 ((static_cast<int32_t>(sn) & 1)*B7) | 1147 ((static_cast<int32_t>(sm) & 1)*B5) | 1148 (static_cast<int32_t>(sm) >> 1); 1149 Emit(encoding); 1150 } 1151 1152 1153 void Arm32Assembler::EmitVFPddd(Condition cond, int32_t opcode, 1154 DRegister dd, DRegister dn, DRegister dm) { 1155 CHECK_NE(dd, kNoDRegister); 1156 CHECK_NE(dn, kNoDRegister); 1157 CHECK_NE(dm, kNoDRegister); 1158 CHECK_NE(cond, kNoCondition); 1159 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1160 B27 | B26 | B25 | B11 | B9 | B8 | opcode | 1161 ((static_cast<int32_t>(dd) >> 4)*B22) | 1162 ((static_cast<int32_t>(dn) & 0xf)*B16) | 1163 ((static_cast<int32_t>(dd) & 0xf)*B12) | 1164 ((static_cast<int32_t>(dn) >> 4)*B7) | 1165 ((static_cast<int32_t>(dm) >> 4)*B5) | 1166 (static_cast<int32_t>(dm) & 0xf); 1167 Emit(encoding); 1168 } 1169 1170 1171 void Arm32Assembler::EmitVFPsd(Condition cond, int32_t opcode, 1172 SRegister sd, DRegister dm) { 1173 CHECK_NE(sd, kNoSRegister); 1174 CHECK_NE(dm, kNoDRegister); 1175 CHECK_NE(cond, kNoCondition); 1176 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1177 B27 | B26 | B25 | B11 | B9 | opcode | 1178 ((static_cast<int32_t>(sd) & 1)*B22) | 1179 ((static_cast<int32_t>(sd) >> 1)*B12) | 1180 ((static_cast<int32_t>(dm) >> 4)*B5) | 1181 (static_cast<int32_t>(dm) & 0xf); 1182 Emit(encoding); 1183 } 1184 1185 1186 void Arm32Assembler::EmitVFPds(Condition cond, int32_t opcode, 1187 DRegister dd, SRegister sm) { 1188 CHECK_NE(dd, kNoDRegister); 1189 CHECK_NE(sm, kNoSRegister); 1190 CHECK_NE(cond, kNoCondition); 1191 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1192 B27 | B26 | B25 | B11 | B9 | opcode | 1193 ((static_cast<int32_t>(dd) >> 4)*B22) | 1194 ((static_cast<int32_t>(dd) & 0xf)*B12) | 1195 ((static_cast<int32_t>(sm) & 1)*B5) | 1196 (static_cast<int32_t>(sm) >> 1); 1197 Emit(encoding); 1198 } 1199 1200 1201 void Arm32Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm, 1202 Condition cond, SetCc set_cc) { 1203 CHECK_LE(shift_imm, 31u); 1204 mov(rd, ShifterOperand(rm, LSL, shift_imm), cond, set_cc); 1205 } 1206 1207 1208 void Arm32Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm, 1209 Condition cond, SetCc set_cc) { 1210 CHECK(1u <= shift_imm && shift_imm <= 32u); 1211 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax. 1212 mov(rd, ShifterOperand(rm, LSR, shift_imm), cond, set_cc); 1213 } 1214 1215 1216 void Arm32Assembler::Asr(Register rd, Register rm, uint32_t shift_imm, 1217 Condition cond, SetCc set_cc) { 1218 CHECK(1u <= shift_imm && shift_imm <= 32u); 1219 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax. 1220 mov(rd, ShifterOperand(rm, ASR, shift_imm), cond, set_cc); 1221 } 1222 1223 1224 void Arm32Assembler::Ror(Register rd, Register rm, uint32_t shift_imm, 1225 Condition cond, SetCc set_cc) { 1226 CHECK(1u <= shift_imm && shift_imm <= 31u); 1227 mov(rd, ShifterOperand(rm, ROR, shift_imm), cond, set_cc); 1228 } 1229 1230 void Arm32Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) { 1231 mov(rd, ShifterOperand(rm, ROR, 0), cond, set_cc); 1232 } 1233 1234 1235 void Arm32Assembler::Lsl(Register rd, Register rm, Register rn, 1236 Condition cond, SetCc set_cc) { 1237 mov(rd, ShifterOperand(rm, LSL, rn), cond, set_cc); 1238 } 1239 1240 1241 void Arm32Assembler::Lsr(Register rd, Register rm, Register rn, 1242 Condition cond, SetCc set_cc) { 1243 mov(rd, ShifterOperand(rm, LSR, rn), cond, set_cc); 1244 } 1245 1246 1247 void Arm32Assembler::Asr(Register rd, Register rm, Register rn, 1248 Condition cond, SetCc set_cc) { 1249 mov(rd, ShifterOperand(rm, ASR, rn), cond, set_cc); 1250 } 1251 1252 1253 void Arm32Assembler::Ror(Register rd, Register rm, Register rn, 1254 Condition cond, SetCc set_cc) { 1255 mov(rd, ShifterOperand(rm, ROR, rn), cond, set_cc); 1256 } 1257 1258 void Arm32Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR 1259 CHECK_NE(cond, kNoCondition); 1260 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1261 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 | 1262 (static_cast<int32_t>(PC)*B12) | 1263 B11 | B9 | B4; 1264 Emit(encoding); 1265 } 1266 1267 1268 void Arm32Assembler::svc(uint32_t imm24) { 1269 CHECK(IsUint<24>(imm24)) << imm24; 1270 int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24; 1271 Emit(encoding); 1272 } 1273 1274 1275 void Arm32Assembler::bkpt(uint16_t imm16) { 1276 int32_t encoding = (AL << kConditionShift) | B24 | B21 | 1277 ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf); 1278 Emit(encoding); 1279 } 1280 1281 1282 void Arm32Assembler::blx(Register rm, Condition cond) { 1283 CHECK_NE(rm, kNoRegister); 1284 CHECK_NE(cond, kNoCondition); 1285 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1286 B24 | B21 | (0xfff << 8) | B5 | B4 | 1287 (static_cast<int32_t>(rm) << kRmShift); 1288 Emit(encoding); 1289 } 1290 1291 1292 void Arm32Assembler::bx(Register rm, Condition cond) { 1293 CHECK_NE(rm, kNoRegister); 1294 CHECK_NE(cond, kNoCondition); 1295 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) | 1296 B24 | B21 | (0xfff << 8) | B4 | 1297 (static_cast<int32_t>(rm) << kRmShift); 1298 Emit(encoding); 1299 } 1300 1301 1302 void Arm32Assembler::Push(Register rd, Condition cond) { 1303 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond); 1304 } 1305 1306 1307 void Arm32Assembler::Pop(Register rd, Condition cond) { 1308 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond); 1309 } 1310 1311 1312 void Arm32Assembler::PushList(RegList regs, Condition cond) { 1313 stm(DB_W, SP, regs, cond); 1314 } 1315 1316 1317 void Arm32Assembler::PopList(RegList regs, Condition cond) { 1318 ldm(IA_W, SP, regs, cond); 1319 } 1320 1321 1322 void Arm32Assembler::Mov(Register rd, Register rm, Condition cond) { 1323 if (rd != rm) { 1324 mov(rd, ShifterOperand(rm), cond); 1325 } 1326 } 1327 1328 1329 void Arm32Assembler::Bind(Label* label) { 1330 CHECK(!label->IsBound()); 1331 int bound_pc = buffer_.Size(); 1332 while (label->IsLinked()) { 1333 int32_t position = label->Position(); 1334 int32_t next = buffer_.Load<int32_t>(position); 1335 int32_t encoded = Arm32Assembler::EncodeBranchOffset(bound_pc - position, next); 1336 buffer_.Store<int32_t>(position, encoded); 1337 label->position_ = Arm32Assembler::DecodeBranchOffset(next); 1338 } 1339 label->BindTo(bound_pc); 1340 } 1341 1342 1343 int32_t Arm32Assembler::EncodeBranchOffset(int offset, int32_t inst) { 1344 // The offset is off by 8 due to the way the ARM CPUs read PC. 1345 offset -= 8; 1346 CHECK_ALIGNED(offset, 4); 1347 CHECK(IsInt(POPCOUNT(kBranchOffsetMask), offset)) << offset; 1348 1349 // Properly preserve only the bits supported in the instruction. 1350 offset >>= 2; 1351 offset &= kBranchOffsetMask; 1352 return (inst & ~kBranchOffsetMask) | offset; 1353 } 1354 1355 1356 int Arm32Assembler::DecodeBranchOffset(int32_t inst) { 1357 // Sign-extend, left-shift by 2, then add 8. 1358 return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8); 1359 } 1360 1361 1362 uint32_t Arm32Assembler::GetAdjustedPosition(uint32_t old_position ATTRIBUTE_UNUSED) { 1363 LOG(FATAL) << "Unimplemented."; 1364 UNREACHABLE(); 1365 } 1366 1367 Literal* Arm32Assembler::NewLiteral(size_t size ATTRIBUTE_UNUSED, 1368 const uint8_t* data ATTRIBUTE_UNUSED) { 1369 LOG(FATAL) << "Unimplemented."; 1370 UNREACHABLE(); 1371 } 1372 1373 void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED, 1374 Literal* literal ATTRIBUTE_UNUSED) { 1375 LOG(FATAL) << "Unimplemented."; 1376 UNREACHABLE(); 1377 } 1378 1379 void Arm32Assembler::LoadLiteral(Register rt ATTRIBUTE_UNUSED, Register rt2 ATTRIBUTE_UNUSED, 1380 Literal* literal ATTRIBUTE_UNUSED) { 1381 LOG(FATAL) << "Unimplemented."; 1382 UNREACHABLE(); 1383 } 1384 1385 void Arm32Assembler::LoadLiteral(SRegister sd ATTRIBUTE_UNUSED, 1386 Literal* literal ATTRIBUTE_UNUSED) { 1387 LOG(FATAL) << "Unimplemented."; 1388 UNREACHABLE(); 1389 } 1390 1391 void Arm32Assembler::LoadLiteral(DRegister dd ATTRIBUTE_UNUSED, 1392 Literal* literal ATTRIBUTE_UNUSED) { 1393 LOG(FATAL) << "Unimplemented."; 1394 UNREACHABLE(); 1395 } 1396 1397 1398 void Arm32Assembler::AddConstant(Register rd, Register rn, int32_t value, 1399 Condition cond, SetCc set_cc) { 1400 if (value == 0 && set_cc != kCcSet) { 1401 if (rd != rn) { 1402 mov(rd, ShifterOperand(rn), cond, set_cc); 1403 } 1404 return; 1405 } 1406 // We prefer to select the shorter code sequence rather than selecting add for 1407 // positive values and sub for negatives ones, which would slightly improve 1408 // the readability of generated code for some constants. 1409 ShifterOperand shifter_op; 1410 if (ShifterOperandCanHoldArm32(value, &shifter_op)) { 1411 add(rd, rn, shifter_op, cond, set_cc); 1412 } else if (ShifterOperandCanHoldArm32(-value, &shifter_op)) { 1413 sub(rd, rn, shifter_op, cond, set_cc); 1414 } else { 1415 CHECK(rn != IP); 1416 if (ShifterOperandCanHoldArm32(~value, &shifter_op)) { 1417 mvn(IP, shifter_op, cond, kCcKeep); 1418 add(rd, rn, ShifterOperand(IP), cond, set_cc); 1419 } else if (ShifterOperandCanHoldArm32(~(-value), &shifter_op)) { 1420 mvn(IP, shifter_op, cond, kCcKeep); 1421 sub(rd, rn, ShifterOperand(IP), cond, set_cc); 1422 } else { 1423 movw(IP, Low16Bits(value), cond); 1424 uint16_t value_high = High16Bits(value); 1425 if (value_high != 0) { 1426 movt(IP, value_high, cond); 1427 } 1428 add(rd, rn, ShifterOperand(IP), cond, set_cc); 1429 } 1430 } 1431 } 1432 1433 void Arm32Assembler::CmpConstant(Register rn, int32_t value, Condition cond) { 1434 ShifterOperand shifter_op; 1435 if (ShifterOperandCanHoldArm32(value, &shifter_op)) { 1436 cmp(rn, shifter_op, cond); 1437 } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) { 1438 cmn(rn, shifter_op, cond); 1439 } else { 1440 movw(IP, Low16Bits(value), cond); 1441 uint16_t value_high = High16Bits(value); 1442 if (value_high != 0) { 1443 movt(IP, value_high, cond); 1444 } 1445 cmp(rn, ShifterOperand(IP), cond); 1446 } 1447 } 1448 1449 void Arm32Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) { 1450 ShifterOperand shifter_op; 1451 if (ShifterOperandCanHoldArm32(value, &shifter_op)) { 1452 mov(rd, shifter_op, cond); 1453 } else if (ShifterOperandCanHoldArm32(~value, &shifter_op)) { 1454 mvn(rd, shifter_op, cond); 1455 } else { 1456 movw(rd, Low16Bits(value), cond); 1457 uint16_t value_high = High16Bits(value); 1458 if (value_high != 0) { 1459 movt(rd, value_high, cond); 1460 } 1461 } 1462 } 1463 1464 1465 // Implementation note: this method must emit at most one instruction when 1466 // Address::CanHoldLoadOffsetArm. 1467 void Arm32Assembler::LoadFromOffset(LoadOperandType type, 1468 Register reg, 1469 Register base, 1470 int32_t offset, 1471 Condition cond) { 1472 if (!Address::CanHoldLoadOffsetArm(type, offset)) { 1473 CHECK(base != IP); 1474 LoadImmediate(IP, offset, cond); 1475 add(IP, IP, ShifterOperand(base), cond); 1476 base = IP; 1477 offset = 0; 1478 } 1479 CHECK(Address::CanHoldLoadOffsetArm(type, offset)); 1480 switch (type) { 1481 case kLoadSignedByte: 1482 ldrsb(reg, Address(base, offset), cond); 1483 break; 1484 case kLoadUnsignedByte: 1485 ldrb(reg, Address(base, offset), cond); 1486 break; 1487 case kLoadSignedHalfword: 1488 ldrsh(reg, Address(base, offset), cond); 1489 break; 1490 case kLoadUnsignedHalfword: 1491 ldrh(reg, Address(base, offset), cond); 1492 break; 1493 case kLoadWord: 1494 ldr(reg, Address(base, offset), cond); 1495 break; 1496 case kLoadWordPair: 1497 ldrd(reg, Address(base, offset), cond); 1498 break; 1499 default: 1500 LOG(FATAL) << "UNREACHABLE"; 1501 UNREACHABLE(); 1502 } 1503 } 1504 1505 1506 // Implementation note: this method must emit at most one instruction when 1507 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset. 1508 void Arm32Assembler::LoadSFromOffset(SRegister reg, 1509 Register base, 1510 int32_t offset, 1511 Condition cond) { 1512 if (!Address::CanHoldLoadOffsetArm(kLoadSWord, offset)) { 1513 CHECK_NE(base, IP); 1514 LoadImmediate(IP, offset, cond); 1515 add(IP, IP, ShifterOperand(base), cond); 1516 base = IP; 1517 offset = 0; 1518 } 1519 CHECK(Address::CanHoldLoadOffsetArm(kLoadSWord, offset)); 1520 vldrs(reg, Address(base, offset), cond); 1521 } 1522 1523 1524 // Implementation note: this method must emit at most one instruction when 1525 // Address::CanHoldLoadOffsetArm, as expected by JIT::GuardedLoadFromOffset. 1526 void Arm32Assembler::LoadDFromOffset(DRegister reg, 1527 Register base, 1528 int32_t offset, 1529 Condition cond) { 1530 if (!Address::CanHoldLoadOffsetArm(kLoadDWord, offset)) { 1531 CHECK_NE(base, IP); 1532 LoadImmediate(IP, offset, cond); 1533 add(IP, IP, ShifterOperand(base), cond); 1534 base = IP; 1535 offset = 0; 1536 } 1537 CHECK(Address::CanHoldLoadOffsetArm(kLoadDWord, offset)); 1538 vldrd(reg, Address(base, offset), cond); 1539 } 1540 1541 1542 // Implementation note: this method must emit at most one instruction when 1543 // Address::CanHoldStoreOffsetArm. 1544 void Arm32Assembler::StoreToOffset(StoreOperandType type, 1545 Register reg, 1546 Register base, 1547 int32_t offset, 1548 Condition cond) { 1549 if (!Address::CanHoldStoreOffsetArm(type, offset)) { 1550 CHECK(reg != IP); 1551 CHECK(base != IP); 1552 LoadImmediate(IP, offset, cond); 1553 add(IP, IP, ShifterOperand(base), cond); 1554 base = IP; 1555 offset = 0; 1556 } 1557 CHECK(Address::CanHoldStoreOffsetArm(type, offset)); 1558 switch (type) { 1559 case kStoreByte: 1560 strb(reg, Address(base, offset), cond); 1561 break; 1562 case kStoreHalfword: 1563 strh(reg, Address(base, offset), cond); 1564 break; 1565 case kStoreWord: 1566 str(reg, Address(base, offset), cond); 1567 break; 1568 case kStoreWordPair: 1569 strd(reg, Address(base, offset), cond); 1570 break; 1571 default: 1572 LOG(FATAL) << "UNREACHABLE"; 1573 UNREACHABLE(); 1574 } 1575 } 1576 1577 1578 // Implementation note: this method must emit at most one instruction when 1579 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreToOffset. 1580 void Arm32Assembler::StoreSToOffset(SRegister reg, 1581 Register base, 1582 int32_t offset, 1583 Condition cond) { 1584 if (!Address::CanHoldStoreOffsetArm(kStoreSWord, offset)) { 1585 CHECK_NE(base, IP); 1586 LoadImmediate(IP, offset, cond); 1587 add(IP, IP, ShifterOperand(base), cond); 1588 base = IP; 1589 offset = 0; 1590 } 1591 CHECK(Address::CanHoldStoreOffsetArm(kStoreSWord, offset)); 1592 vstrs(reg, Address(base, offset), cond); 1593 } 1594 1595 1596 // Implementation note: this method must emit at most one instruction when 1597 // Address::CanHoldStoreOffsetArm, as expected by JIT::GuardedStoreSToOffset. 1598 void Arm32Assembler::StoreDToOffset(DRegister reg, 1599 Register base, 1600 int32_t offset, 1601 Condition cond) { 1602 if (!Address::CanHoldStoreOffsetArm(kStoreDWord, offset)) { 1603 CHECK_NE(base, IP); 1604 LoadImmediate(IP, offset, cond); 1605 add(IP, IP, ShifterOperand(base), cond); 1606 base = IP; 1607 offset = 0; 1608 } 1609 CHECK(Address::CanHoldStoreOffsetArm(kStoreDWord, offset)); 1610 vstrd(reg, Address(base, offset), cond); 1611 } 1612 1613 1614 void Arm32Assembler::MemoryBarrier(ManagedRegister mscratch) { 1615 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12); 1616 dmb(SY); 1617 } 1618 1619 1620 void Arm32Assembler::dmb(DmbOptions flavor) { 1621 int32_t encoding = 0xf57ff05f; // dmb 1622 Emit(encoding | flavor); 1623 } 1624 1625 1626 void Arm32Assembler::cbz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) { 1627 LOG(FATAL) << "cbz is not supported on ARM32"; 1628 } 1629 1630 1631 void Arm32Assembler::cbnz(Register rn ATTRIBUTE_UNUSED, Label* target ATTRIBUTE_UNUSED) { 1632 LOG(FATAL) << "cbnz is not supported on ARM32"; 1633 } 1634 1635 1636 void Arm32Assembler::CompareAndBranchIfZero(Register r, Label* label) { 1637 cmp(r, ShifterOperand(0)); 1638 b(label, EQ); 1639 } 1640 1641 1642 void Arm32Assembler::CompareAndBranchIfNonZero(Register r, Label* label) { 1643 cmp(r, ShifterOperand(0)); 1644 b(label, NE); 1645 } 1646 1647 JumpTable* Arm32Assembler::CreateJumpTable(std::vector<Label*>&& labels ATTRIBUTE_UNUSED, 1648 Register base_reg ATTRIBUTE_UNUSED) { 1649 LOG(FATAL) << "CreateJumpTable is not supported on ARM32"; 1650 UNREACHABLE(); 1651 } 1652 1653 void Arm32Assembler::EmitJumpTableDispatch(JumpTable* jump_table ATTRIBUTE_UNUSED, 1654 Register displacement_reg ATTRIBUTE_UNUSED) { 1655 LOG(FATAL) << "EmitJumpTableDispatch is not supported on ARM32"; 1656 UNREACHABLE(); 1657 } 1658 1659 void Arm32Assembler::FinalizeCode() { 1660 ArmAssembler::FinalizeCode(); 1661 // Currently the arm32 assembler does not support fixups, and thus no tracking. We must not call 1662 // FinalizeTrackedLabels(), which would lead to an abort. 1663 } 1664 1665 } // namespace arm 1666 } // namespace art 1667