1 /* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "intrinsics_mips.h" 18 19 #include "arch/mips/instruction_set_features_mips.h" 20 #include "art_method.h" 21 #include "code_generator_mips.h" 22 #include "entrypoints/quick/quick_entrypoints.h" 23 #include "intrinsics.h" 24 #include "mirror/array-inl.h" 25 #include "mirror/string.h" 26 #include "thread.h" 27 #include "utils/mips/assembler_mips.h" 28 #include "utils/mips/constants_mips.h" 29 30 namespace art { 31 32 namespace mips { 33 34 IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen) 35 : arena_(codegen->GetGraph()->GetArena()) { 36 } 37 38 MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() { 39 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler()); 40 } 41 42 ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() { 43 return codegen_->GetGraph()->GetArena(); 44 } 45 46 inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() const { 47 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(); 48 } 49 50 inline bool IntrinsicCodeGeneratorMIPS::IsR6() const { 51 return codegen_->GetInstructionSetFeatures().IsR6(); 52 } 53 54 inline bool IntrinsicCodeGeneratorMIPS::Is32BitFPU() const { 55 return codegen_->GetInstructionSetFeatures().Is32BitFloatingPoint(); 56 } 57 58 #define __ codegen->GetAssembler()-> 59 60 static void MoveFromReturnRegister(Location trg, 61 Primitive::Type type, 62 CodeGeneratorMIPS* codegen) { 63 if (!trg.IsValid()) { 64 DCHECK_EQ(type, Primitive::kPrimVoid); 65 return; 66 } 67 68 DCHECK_NE(type, Primitive::kPrimVoid); 69 70 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) { 71 Register trg_reg = trg.AsRegister<Register>(); 72 if (trg_reg != V0) { 73 __ Move(V0, trg_reg); 74 } 75 } else { 76 FRegister trg_reg = trg.AsFpuRegister<FRegister>(); 77 if (trg_reg != F0) { 78 if (type == Primitive::kPrimFloat) { 79 __ MovS(F0, trg_reg); 80 } else { 81 __ MovD(F0, trg_reg); 82 } 83 } 84 } 85 } 86 87 static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) { 88 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor; 89 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 90 } 91 92 // Slow-path for fallback (calling the managed code to handle the 93 // intrinsic) in an intrinsified call. This will copy the arguments 94 // into the positions for a regular call. 95 // 96 // Note: The actual parameters are required to be in the locations 97 // given by the invoke's location summary. If an intrinsic 98 // modifies those locations before a slowpath call, they must be 99 // restored! 100 class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS { 101 public: 102 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : SlowPathCodeMIPS(invoke), invoke_(invoke) { } 103 104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { 105 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in); 106 107 __ Bind(GetEntryLabel()); 108 109 SaveLiveRegisters(codegen, invoke_->GetLocations()); 110 111 MoveArguments(invoke_, codegen); 112 113 if (invoke_->IsInvokeStaticOrDirect()) { 114 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), 115 Location::RegisterLocation(A0)); 116 } else { 117 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0)); 118 } 119 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this); 120 121 // Copy the result back to the expected output. 122 Location out = invoke_->GetLocations()->Out(); 123 if (out.IsValid()) { 124 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. 125 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 126 MoveFromReturnRegister(out, invoke_->GetType(), codegen); 127 } 128 129 RestoreLiveRegisters(codegen, invoke_->GetLocations()); 130 __ B(GetExitLabel()); 131 } 132 133 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; } 134 135 private: 136 // The instruction where this slow path is happening. 137 HInvoke* const invoke_; 138 139 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS); 140 }; 141 142 #undef __ 143 144 bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) { 145 Dispatch(invoke); 146 LocationSummary* res = invoke->GetLocations(); 147 return res != nullptr && res->Intrinsified(); 148 } 149 150 #define __ assembler-> 151 152 static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 153 LocationSummary* locations = new (arena) LocationSummary(invoke, 154 LocationSummary::kNoCall, 155 kIntrinsified); 156 locations->SetInAt(0, Location::RequiresFpuRegister()); 157 locations->SetOut(Location::RequiresRegister()); 158 } 159 160 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { 161 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>(); 162 163 if (is64bit) { 164 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 165 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 166 167 __ Mfc1(out_lo, in); 168 __ MoveFromFpuHigh(out_hi, in); 169 } else { 170 Register out = locations->Out().AsRegister<Register>(); 171 172 __ Mfc1(out, in); 173 } 174 } 175 176 // long java.lang.Double.doubleToRawLongBits(double) 177 void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { 178 CreateFPToIntLocations(arena_, invoke); 179 } 180 181 void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { 182 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 183 } 184 185 // int java.lang.Float.floatToRawIntBits(float) 186 void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) { 187 CreateFPToIntLocations(arena_, invoke); 188 } 189 190 void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) { 191 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 192 } 193 194 static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 195 LocationSummary* locations = new (arena) LocationSummary(invoke, 196 LocationSummary::kNoCall, 197 kIntrinsified); 198 locations->SetInAt(0, Location::RequiresRegister()); 199 locations->SetOut(Location::RequiresFpuRegister()); 200 } 201 202 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { 203 FRegister out = locations->Out().AsFpuRegister<FRegister>(); 204 205 if (is64bit) { 206 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 207 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 208 209 __ Mtc1(in_lo, out); 210 __ MoveToFpuHigh(in_hi, out); 211 } else { 212 Register in = locations->InAt(0).AsRegister<Register>(); 213 214 __ Mtc1(in, out); 215 } 216 } 217 218 // double java.lang.Double.longBitsToDouble(long) 219 void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) { 220 CreateIntToFPLocations(arena_, invoke); 221 } 222 223 void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) { 224 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 225 } 226 227 // float java.lang.Float.intBitsToFloat(int) 228 void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { 229 CreateIntToFPLocations(arena_, invoke); 230 } 231 232 void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { 233 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 234 } 235 236 static void CreateIntToIntLocations(ArenaAllocator* arena, 237 HInvoke* invoke, 238 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) { 239 LocationSummary* locations = new (arena) LocationSummary(invoke, 240 LocationSummary::kNoCall, 241 kIntrinsified); 242 locations->SetInAt(0, Location::RequiresRegister()); 243 locations->SetOut(Location::RequiresRegister(), overlaps); 244 } 245 246 static void GenReverse(LocationSummary* locations, 247 Primitive::Type type, 248 bool isR2OrNewer, 249 bool isR6, 250 bool reverseBits, 251 MipsAssembler* assembler) { 252 DCHECK(type == Primitive::kPrimShort || 253 type == Primitive::kPrimInt || 254 type == Primitive::kPrimLong); 255 DCHECK(type != Primitive::kPrimShort || !reverseBits); 256 257 if (type == Primitive::kPrimShort) { 258 Register in = locations->InAt(0).AsRegister<Register>(); 259 Register out = locations->Out().AsRegister<Register>(); 260 261 if (isR2OrNewer) { 262 __ Wsbh(out, in); 263 __ Seh(out, out); 264 } else { 265 __ Sll(TMP, in, 24); 266 __ Sra(TMP, TMP, 16); 267 __ Sll(out, in, 16); 268 __ Srl(out, out, 24); 269 __ Or(out, out, TMP); 270 } 271 } else if (type == Primitive::kPrimInt) { 272 Register in = locations->InAt(0).AsRegister<Register>(); 273 Register out = locations->Out().AsRegister<Register>(); 274 275 if (isR2OrNewer) { 276 __ Rotr(out, in, 16); 277 __ Wsbh(out, out); 278 } else { 279 // MIPS32r1 280 // __ Rotr(out, in, 16); 281 __ Sll(TMP, in, 16); 282 __ Srl(out, in, 16); 283 __ Or(out, out, TMP); 284 // __ Wsbh(out, out); 285 __ LoadConst32(AT, 0x00FF00FF); 286 __ And(TMP, out, AT); 287 __ Sll(TMP, TMP, 8); 288 __ Srl(out, out, 8); 289 __ And(out, out, AT); 290 __ Or(out, out, TMP); 291 } 292 if (reverseBits) { 293 if (isR6) { 294 __ Bitswap(out, out); 295 } else { 296 __ LoadConst32(AT, 0x0F0F0F0F); 297 __ And(TMP, out, AT); 298 __ Sll(TMP, TMP, 4); 299 __ Srl(out, out, 4); 300 __ And(out, out, AT); 301 __ Or(out, TMP, out); 302 __ LoadConst32(AT, 0x33333333); 303 __ And(TMP, out, AT); 304 __ Sll(TMP, TMP, 2); 305 __ Srl(out, out, 2); 306 __ And(out, out, AT); 307 __ Or(out, TMP, out); 308 __ LoadConst32(AT, 0x55555555); 309 __ And(TMP, out, AT); 310 __ Sll(TMP, TMP, 1); 311 __ Srl(out, out, 1); 312 __ And(out, out, AT); 313 __ Or(out, TMP, out); 314 } 315 } 316 } else if (type == Primitive::kPrimLong) { 317 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 318 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 319 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 320 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 321 322 if (isR2OrNewer) { 323 __ Rotr(AT, in_hi, 16); 324 __ Rotr(TMP, in_lo, 16); 325 __ Wsbh(out_lo, AT); 326 __ Wsbh(out_hi, TMP); 327 } else { 328 // When calling CreateIntToIntLocations() we promised that the 329 // use of the out_lo/out_hi wouldn't overlap with the use of 330 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi 331 // until we're completely done reading from in_lo/in_hi. 332 // __ Rotr(TMP, in_lo, 16); 333 __ Sll(TMP, in_lo, 16); 334 __ Srl(AT, in_lo, 16); 335 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe 336 // to write to out_hi. 337 // __ Rotr(out_lo, in_hi, 16); 338 __ Sll(AT, in_hi, 16); 339 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading 340 // from in_lo/in_hi so it's okay to 341 // write to out_lo/out_hi. 342 __ Or(out_lo, out_lo, AT); 343 // __ Wsbh(out_hi, out_hi); 344 __ LoadConst32(AT, 0x00FF00FF); 345 __ And(out_hi, TMP, AT); 346 __ Sll(out_hi, out_hi, 8); 347 __ Srl(TMP, TMP, 8); 348 __ And(TMP, TMP, AT); 349 __ Or(out_hi, out_hi, TMP); 350 // __ Wsbh(out_lo, out_lo); 351 __ And(TMP, out_lo, AT); // AT already holds the correct mask value 352 __ Sll(TMP, TMP, 8); 353 __ Srl(out_lo, out_lo, 8); 354 __ And(out_lo, out_lo, AT); 355 __ Or(out_lo, out_lo, TMP); 356 } 357 if (reverseBits) { 358 if (isR6) { 359 __ Bitswap(out_hi, out_hi); 360 __ Bitswap(out_lo, out_lo); 361 } else { 362 __ LoadConst32(AT, 0x0F0F0F0F); 363 __ And(TMP, out_hi, AT); 364 __ Sll(TMP, TMP, 4); 365 __ Srl(out_hi, out_hi, 4); 366 __ And(out_hi, out_hi, AT); 367 __ Or(out_hi, TMP, out_hi); 368 __ And(TMP, out_lo, AT); 369 __ Sll(TMP, TMP, 4); 370 __ Srl(out_lo, out_lo, 4); 371 __ And(out_lo, out_lo, AT); 372 __ Or(out_lo, TMP, out_lo); 373 __ LoadConst32(AT, 0x33333333); 374 __ And(TMP, out_hi, AT); 375 __ Sll(TMP, TMP, 2); 376 __ Srl(out_hi, out_hi, 2); 377 __ And(out_hi, out_hi, AT); 378 __ Or(out_hi, TMP, out_hi); 379 __ And(TMP, out_lo, AT); 380 __ Sll(TMP, TMP, 2); 381 __ Srl(out_lo, out_lo, 2); 382 __ And(out_lo, out_lo, AT); 383 __ Or(out_lo, TMP, out_lo); 384 __ LoadConst32(AT, 0x55555555); 385 __ And(TMP, out_hi, AT); 386 __ Sll(TMP, TMP, 1); 387 __ Srl(out_hi, out_hi, 1); 388 __ And(out_hi, out_hi, AT); 389 __ Or(out_hi, TMP, out_hi); 390 __ And(TMP, out_lo, AT); 391 __ Sll(TMP, TMP, 1); 392 __ Srl(out_lo, out_lo, 1); 393 __ And(out_lo, out_lo, AT); 394 __ Or(out_lo, TMP, out_lo); 395 } 396 } 397 } 398 } 399 400 // int java.lang.Integer.reverseBytes(int) 401 void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { 402 CreateIntToIntLocations(arena_, invoke); 403 } 404 405 void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { 406 GenReverse(invoke->GetLocations(), 407 Primitive::kPrimInt, 408 IsR2OrNewer(), 409 IsR6(), 410 /* reverseBits */ false, 411 GetAssembler()); 412 } 413 414 // long java.lang.Long.reverseBytes(long) 415 void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) { 416 CreateIntToIntLocations(arena_, invoke); 417 } 418 419 void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) { 420 GenReverse(invoke->GetLocations(), 421 Primitive::kPrimLong, 422 IsR2OrNewer(), 423 IsR6(), 424 /* reverseBits */ false, 425 GetAssembler()); 426 } 427 428 // short java.lang.Short.reverseBytes(short) 429 void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) { 430 CreateIntToIntLocations(arena_, invoke); 431 } 432 433 void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) { 434 GenReverse(invoke->GetLocations(), 435 Primitive::kPrimShort, 436 IsR2OrNewer(), 437 IsR6(), 438 /* reverseBits */ false, 439 GetAssembler()); 440 } 441 442 static void GenNumberOfLeadingZeroes(LocationSummary* locations, 443 bool is64bit, 444 bool isR6, 445 MipsAssembler* assembler) { 446 Register out = locations->Out().AsRegister<Register>(); 447 if (is64bit) { 448 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 449 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 450 451 if (isR6) { 452 __ ClzR6(AT, in_hi); 453 __ ClzR6(TMP, in_lo); 454 __ Seleqz(TMP, TMP, in_hi); 455 } else { 456 __ ClzR2(AT, in_hi); 457 __ ClzR2(TMP, in_lo); 458 __ Movn(TMP, ZERO, in_hi); 459 } 460 __ Addu(out, AT, TMP); 461 } else { 462 Register in = locations->InAt(0).AsRegister<Register>(); 463 464 if (isR6) { 465 __ ClzR6(out, in); 466 } else { 467 __ ClzR2(out, in); 468 } 469 } 470 } 471 472 // int java.lang.Integer.numberOfLeadingZeros(int i) 473 void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { 474 CreateIntToIntLocations(arena_, invoke); 475 } 476 477 void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { 478 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler()); 479 } 480 481 // int java.lang.Long.numberOfLeadingZeros(long i) 482 void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { 483 CreateIntToIntLocations(arena_, invoke); 484 } 485 486 void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { 487 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler()); 488 } 489 490 static void GenNumberOfTrailingZeroes(LocationSummary* locations, 491 bool is64bit, 492 bool isR6, 493 MipsAssembler* assembler) { 494 Register out = locations->Out().AsRegister<Register>(); 495 Register in_lo; 496 Register in; 497 498 if (is64bit) { 499 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 500 501 in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 502 503 // If in_lo is zero then count the number of trailing zeroes in in_hi; 504 // otherwise count the number of trailing zeroes in in_lo. 505 // out = in_lo ? in_lo : in_hi; 506 if (isR6) { 507 __ Seleqz(out, in_hi, in_lo); 508 __ Selnez(TMP, in_lo, in_lo); 509 __ Or(out, out, TMP); 510 } else { 511 __ Movz(out, in_hi, in_lo); 512 __ Movn(out, in_lo, in_lo); 513 } 514 515 in = out; 516 } else { 517 in = locations->InAt(0).AsRegister<Register>(); 518 // Give in_lo a dummy value to keep the compiler from complaining. 519 // Since we only get here in the 32-bit case, this value will never 520 // be used. 521 in_lo = in; 522 } 523 524 if (isR6) { 525 // We don't have an instruction to count the number of trailing zeroes. 526 // Start by flipping the bits end-for-end so we can count the number of 527 // leading zeroes instead. 528 __ Rotr(out, in, 16); 529 __ Wsbh(out, out); 530 __ Bitswap(out, out); 531 __ ClzR6(out, out); 532 } else { 533 // Convert trailing zeroes to trailing ones, and bits to their left 534 // to zeroes. 535 __ Addiu(TMP, in, -1); 536 __ Xor(out, TMP, in); 537 __ And(out, out, TMP); 538 // Count number of leading zeroes. 539 __ ClzR2(out, out); 540 // Subtract number of leading zeroes from 32 to get number of trailing ones. 541 // Remember that the trailing ones were formerly trailing zeroes. 542 __ LoadConst32(TMP, 32); 543 __ Subu(out, TMP, out); 544 } 545 546 if (is64bit) { 547 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the 548 // number of trailing zeroes in in_lo (32) to get the correct final count 549 __ LoadConst32(TMP, 32); 550 if (isR6) { 551 __ Seleqz(TMP, TMP, in_lo); 552 } else { 553 __ Movn(TMP, ZERO, in_lo); 554 } 555 __ Addu(out, out, TMP); 556 } 557 } 558 559 // int java.lang.Integer.numberOfTrailingZeros(int i) 560 void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { 561 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); 562 } 563 564 void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { 565 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler()); 566 } 567 568 // int java.lang.Long.numberOfTrailingZeros(long i) 569 void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { 570 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); 571 } 572 573 void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { 574 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler()); 575 } 576 577 // int java.lang.Integer.reverse(int) 578 void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) { 579 CreateIntToIntLocations(arena_, invoke); 580 } 581 582 void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) { 583 GenReverse(invoke->GetLocations(), 584 Primitive::kPrimInt, 585 IsR2OrNewer(), 586 IsR6(), 587 /* reverseBits */ true, 588 GetAssembler()); 589 } 590 591 // long java.lang.Long.reverse(long) 592 void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) { 593 CreateIntToIntLocations(arena_, invoke); 594 } 595 596 void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) { 597 GenReverse(invoke->GetLocations(), 598 Primitive::kPrimLong, 599 IsR2OrNewer(), 600 IsR6(), 601 /* reverseBits */ true, 602 GetAssembler()); 603 } 604 605 static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 606 LocationSummary* locations = new (arena) LocationSummary(invoke, 607 LocationSummary::kNoCall, 608 kIntrinsified); 609 locations->SetInAt(0, Location::RequiresFpuRegister()); 610 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 611 } 612 613 static void GenBitCount(LocationSummary* locations, 614 Primitive::Type type, 615 bool isR6, 616 MipsAssembler* assembler) { 617 Register out = locations->Out().AsRegister<Register>(); 618 619 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel 620 // 621 // A generalization of the best bit counting method to integers of 622 // bit-widths up to 128 (parameterized by type T) is this: 623 // 624 // v = v - ((v >> 1) & (T)~(T)0/3); // temp 625 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp 626 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp 627 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count 628 // 629 // For comparison, for 32-bit quantities, this algorithm can be executed 630 // using 20 MIPS instructions (the calls to LoadConst32() generate two 631 // machine instructions each for the values being used in this algorithm). 632 // A(n unrolled) loop-based algorithm required 25 instructions. 633 // 634 // For 64-bit quantities, this algorithm gets executed twice, (once 635 // for in_lo, and again for in_hi), but saves a few instructions 636 // because the mask values only have to be loaded once. Using this 637 // algorithm the count for a 64-bit operand can be performed in 33 638 // instructions compared to a loop-based algorithm which required 47 639 // instructions. 640 641 if (type == Primitive::kPrimInt) { 642 Register in = locations->InAt(0).AsRegister<Register>(); 643 644 __ Srl(TMP, in, 1); 645 __ LoadConst32(AT, 0x55555555); 646 __ And(TMP, TMP, AT); 647 __ Subu(TMP, in, TMP); 648 __ LoadConst32(AT, 0x33333333); 649 __ And(out, TMP, AT); 650 __ Srl(TMP, TMP, 2); 651 __ And(TMP, TMP, AT); 652 __ Addu(TMP, out, TMP); 653 __ Srl(out, TMP, 4); 654 __ Addu(out, out, TMP); 655 __ LoadConst32(AT, 0x0F0F0F0F); 656 __ And(out, out, AT); 657 __ LoadConst32(TMP, 0x01010101); 658 if (isR6) { 659 __ MulR6(out, out, TMP); 660 } else { 661 __ MulR2(out, out, TMP); 662 } 663 __ Srl(out, out, 24); 664 } else { 665 DCHECK_EQ(type, Primitive::kPrimLong); 666 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 667 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 668 Register tmp_hi = locations->GetTemp(0).AsRegister<Register>(); 669 Register out_hi = locations->GetTemp(1).AsRegister<Register>(); 670 Register tmp_lo = TMP; 671 Register out_lo = out; 672 673 __ Srl(tmp_lo, in_lo, 1); 674 __ Srl(tmp_hi, in_hi, 1); 675 676 __ LoadConst32(AT, 0x55555555); 677 678 __ And(tmp_lo, tmp_lo, AT); 679 __ Subu(tmp_lo, in_lo, tmp_lo); 680 681 __ And(tmp_hi, tmp_hi, AT); 682 __ Subu(tmp_hi, in_hi, tmp_hi); 683 684 __ LoadConst32(AT, 0x33333333); 685 686 __ And(out_lo, tmp_lo, AT); 687 __ Srl(tmp_lo, tmp_lo, 2); 688 __ And(tmp_lo, tmp_lo, AT); 689 __ Addu(tmp_lo, out_lo, tmp_lo); 690 __ Srl(out_lo, tmp_lo, 4); 691 __ Addu(out_lo, out_lo, tmp_lo); 692 693 __ And(out_hi, tmp_hi, AT); 694 __ Srl(tmp_hi, tmp_hi, 2); 695 __ And(tmp_hi, tmp_hi, AT); 696 __ Addu(tmp_hi, out_hi, tmp_hi); 697 __ Srl(out_hi, tmp_hi, 4); 698 __ Addu(out_hi, out_hi, tmp_hi); 699 700 __ LoadConst32(AT, 0x0F0F0F0F); 701 702 __ And(out_lo, out_lo, AT); 703 __ And(out_hi, out_hi, AT); 704 705 __ LoadConst32(AT, 0x01010101); 706 707 if (isR6) { 708 __ MulR6(out_lo, out_lo, AT); 709 710 __ MulR6(out_hi, out_hi, AT); 711 } else { 712 __ MulR2(out_lo, out_lo, AT); 713 714 __ MulR2(out_hi, out_hi, AT); 715 } 716 717 __ Srl(out_lo, out_lo, 24); 718 __ Srl(out_hi, out_hi, 24); 719 720 __ Addu(out, out_hi, out_lo); 721 } 722 } 723 724 // int java.lang.Integer.bitCount(int) 725 void IntrinsicLocationsBuilderMIPS::VisitIntegerBitCount(HInvoke* invoke) { 726 CreateIntToIntLocations(arena_, invoke); 727 } 728 729 void IntrinsicCodeGeneratorMIPS::VisitIntegerBitCount(HInvoke* invoke) { 730 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler()); 731 } 732 733 // int java.lang.Long.bitCount(int) 734 void IntrinsicLocationsBuilderMIPS::VisitLongBitCount(HInvoke* invoke) { 735 LocationSummary* locations = new (arena_) LocationSummary(invoke, 736 LocationSummary::kNoCall, 737 kIntrinsified); 738 locations->SetInAt(0, Location::RequiresRegister()); 739 locations->SetOut(Location::RequiresRegister()); 740 locations->AddTemp(Location::RequiresRegister()); 741 locations->AddTemp(Location::RequiresRegister()); 742 } 743 744 void IntrinsicCodeGeneratorMIPS::VisitLongBitCount(HInvoke* invoke) { 745 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler()); 746 } 747 748 static void MathAbsFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { 749 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>(); 750 FRegister out = locations->Out().AsFpuRegister<FRegister>(); 751 752 if (is64bit) { 753 __ AbsD(out, in); 754 } else { 755 __ AbsS(out, in); 756 } 757 } 758 759 // double java.lang.Math.abs(double) 760 void IntrinsicLocationsBuilderMIPS::VisitMathAbsDouble(HInvoke* invoke) { 761 CreateFPToFPLocations(arena_, invoke); 762 } 763 764 void IntrinsicCodeGeneratorMIPS::VisitMathAbsDouble(HInvoke* invoke) { 765 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 766 } 767 768 // float java.lang.Math.abs(float) 769 void IntrinsicLocationsBuilderMIPS::VisitMathAbsFloat(HInvoke* invoke) { 770 CreateFPToFPLocations(arena_, invoke); 771 } 772 773 void IntrinsicCodeGeneratorMIPS::VisitMathAbsFloat(HInvoke* invoke) { 774 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 775 } 776 777 static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { 778 if (is64bit) { 779 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 780 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 781 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 782 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 783 784 // The comments in this section show the analogous operations which would 785 // be performed if we had 64-bit registers "in", and "out". 786 // __ Dsra32(AT, in, 31); 787 __ Sra(AT, in_hi, 31); 788 // __ Xor(out, in, AT); 789 __ Xor(TMP, in_lo, AT); 790 __ Xor(out_hi, in_hi, AT); 791 // __ Dsubu(out, out, AT); 792 __ Subu(out_lo, TMP, AT); 793 __ Sltu(TMP, out_lo, TMP); 794 __ Addu(out_hi, out_hi, TMP); 795 } else { 796 Register in = locations->InAt(0).AsRegister<Register>(); 797 Register out = locations->Out().AsRegister<Register>(); 798 799 __ Sra(AT, in, 31); 800 __ Xor(out, in, AT); 801 __ Subu(out, out, AT); 802 } 803 } 804 805 // int java.lang.Math.abs(int) 806 void IntrinsicLocationsBuilderMIPS::VisitMathAbsInt(HInvoke* invoke) { 807 CreateIntToIntLocations(arena_, invoke); 808 } 809 810 void IntrinsicCodeGeneratorMIPS::VisitMathAbsInt(HInvoke* invoke) { 811 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); 812 } 813 814 // long java.lang.Math.abs(long) 815 void IntrinsicLocationsBuilderMIPS::VisitMathAbsLong(HInvoke* invoke) { 816 CreateIntToIntLocations(arena_, invoke); 817 } 818 819 void IntrinsicCodeGeneratorMIPS::VisitMathAbsLong(HInvoke* invoke) { 820 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); 821 } 822 823 static void GenMinMaxFP(LocationSummary* locations, 824 bool is_min, 825 Primitive::Type type, 826 bool is_R6, 827 MipsAssembler* assembler) { 828 FRegister out = locations->Out().AsFpuRegister<FRegister>(); 829 FRegister a = locations->InAt(0).AsFpuRegister<FRegister>(); 830 FRegister b = locations->InAt(1).AsFpuRegister<FRegister>(); 831 832 if (is_R6) { 833 MipsLabel noNaNs; 834 MipsLabel done; 835 FRegister ftmp = ((out != a) && (out != b)) ? out : FTMP; 836 837 // When Java computes min/max it prefers a NaN to a number; the 838 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of 839 // the inputs is a NaN and the other is a valid number, the MIPS 840 // instruction will return the number; Java wants the NaN value 841 // returned. This is why there is extra logic preceding the use of 842 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a 843 // NaN, return the NaN, otherwise return the min/max. 844 if (type == Primitive::kPrimDouble) { 845 __ CmpUnD(FTMP, a, b); 846 __ Bc1eqz(FTMP, &noNaNs); 847 848 // One of the inputs is a NaN 849 __ CmpEqD(ftmp, a, a); 850 // If a == a then b is the NaN, otherwise a is the NaN. 851 __ SelD(ftmp, a, b); 852 853 if (ftmp != out) { 854 __ MovD(out, ftmp); 855 } 856 857 __ B(&done); 858 859 __ Bind(&noNaNs); 860 861 if (is_min) { 862 __ MinD(out, a, b); 863 } else { 864 __ MaxD(out, a, b); 865 } 866 } else { 867 DCHECK_EQ(type, Primitive::kPrimFloat); 868 __ CmpUnS(FTMP, a, b); 869 __ Bc1eqz(FTMP, &noNaNs); 870 871 // One of the inputs is a NaN 872 __ CmpEqS(ftmp, a, a); 873 // If a == a then b is the NaN, otherwise a is the NaN. 874 __ SelS(ftmp, a, b); 875 876 if (ftmp != out) { 877 __ MovS(out, ftmp); 878 } 879 880 __ B(&done); 881 882 __ Bind(&noNaNs); 883 884 if (is_min) { 885 __ MinS(out, a, b); 886 } else { 887 __ MaxS(out, a, b); 888 } 889 } 890 891 __ Bind(&done); 892 } else { 893 MipsLabel ordered; 894 MipsLabel compare; 895 MipsLabel select; 896 MipsLabel done; 897 898 if (type == Primitive::kPrimDouble) { 899 __ CunD(a, b); 900 } else { 901 DCHECK_EQ(type, Primitive::kPrimFloat); 902 __ CunS(a, b); 903 } 904 __ Bc1f(&ordered); 905 906 // a or b (or both) is a NaN. Return one, which is a NaN. 907 if (type == Primitive::kPrimDouble) { 908 __ CeqD(b, b); 909 } else { 910 __ CeqS(b, b); 911 } 912 __ B(&select); 913 914 __ Bind(&ordered); 915 916 // Neither is a NaN. 917 // a == b? (-0.0 compares equal with +0.0) 918 // If equal, handle zeroes, else compare further. 919 if (type == Primitive::kPrimDouble) { 920 __ CeqD(a, b); 921 } else { 922 __ CeqS(a, b); 923 } 924 __ Bc1f(&compare); 925 926 // a == b either bit for bit or one is -0.0 and the other is +0.0. 927 if (type == Primitive::kPrimDouble) { 928 __ MoveFromFpuHigh(TMP, a); 929 __ MoveFromFpuHigh(AT, b); 930 } else { 931 __ Mfc1(TMP, a); 932 __ Mfc1(AT, b); 933 } 934 935 if (is_min) { 936 // -0.0 prevails over +0.0. 937 __ Or(TMP, TMP, AT); 938 } else { 939 // +0.0 prevails over -0.0. 940 __ And(TMP, TMP, AT); 941 } 942 943 if (type == Primitive::kPrimDouble) { 944 __ Mfc1(AT, a); 945 __ Mtc1(AT, out); 946 __ MoveToFpuHigh(TMP, out); 947 } else { 948 __ Mtc1(TMP, out); 949 } 950 __ B(&done); 951 952 __ Bind(&compare); 953 954 if (type == Primitive::kPrimDouble) { 955 if (is_min) { 956 // return (a <= b) ? a : b; 957 __ ColeD(a, b); 958 } else { 959 // return (a >= b) ? a : b; 960 __ ColeD(b, a); // b <= a 961 } 962 } else { 963 if (is_min) { 964 // return (a <= b) ? a : b; 965 __ ColeS(a, b); 966 } else { 967 // return (a >= b) ? a : b; 968 __ ColeS(b, a); // b <= a 969 } 970 } 971 972 __ Bind(&select); 973 974 if (type == Primitive::kPrimDouble) { 975 __ MovtD(out, a); 976 __ MovfD(out, b); 977 } else { 978 __ MovtS(out, a); 979 __ MovfS(out, b); 980 } 981 982 __ Bind(&done); 983 } 984 } 985 986 static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 987 LocationSummary* locations = new (arena) LocationSummary(invoke, 988 LocationSummary::kNoCall, 989 kIntrinsified); 990 locations->SetInAt(0, Location::RequiresFpuRegister()); 991 locations->SetInAt(1, Location::RequiresFpuRegister()); 992 locations->SetOut(Location::RequiresFpuRegister(), Location::kOutputOverlap); 993 } 994 995 // double java.lang.Math.min(double, double) 996 void IntrinsicLocationsBuilderMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) { 997 CreateFPFPToFPLocations(arena_, invoke); 998 } 999 1000 void IntrinsicCodeGeneratorMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) { 1001 GenMinMaxFP(invoke->GetLocations(), 1002 /* is_min */ true, 1003 Primitive::kPrimDouble, 1004 IsR6(), 1005 GetAssembler()); 1006 } 1007 1008 // float java.lang.Math.min(float, float) 1009 void IntrinsicLocationsBuilderMIPS::VisitMathMinFloatFloat(HInvoke* invoke) { 1010 CreateFPFPToFPLocations(arena_, invoke); 1011 } 1012 1013 void IntrinsicCodeGeneratorMIPS::VisitMathMinFloatFloat(HInvoke* invoke) { 1014 GenMinMaxFP(invoke->GetLocations(), 1015 /* is_min */ true, 1016 Primitive::kPrimFloat, 1017 IsR6(), 1018 GetAssembler()); 1019 } 1020 1021 // double java.lang.Math.max(double, double) 1022 void IntrinsicLocationsBuilderMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) { 1023 CreateFPFPToFPLocations(arena_, invoke); 1024 } 1025 1026 void IntrinsicCodeGeneratorMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) { 1027 GenMinMaxFP(invoke->GetLocations(), 1028 /* is_min */ false, 1029 Primitive::kPrimDouble, 1030 IsR6(), 1031 GetAssembler()); 1032 } 1033 1034 // float java.lang.Math.max(float, float) 1035 void IntrinsicLocationsBuilderMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) { 1036 CreateFPFPToFPLocations(arena_, invoke); 1037 } 1038 1039 void IntrinsicCodeGeneratorMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) { 1040 GenMinMaxFP(invoke->GetLocations(), 1041 /* is_min */ false, 1042 Primitive::kPrimFloat, 1043 IsR6(), 1044 GetAssembler()); 1045 } 1046 1047 static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 1048 LocationSummary* locations = new (arena) LocationSummary(invoke, 1049 LocationSummary::kNoCall, 1050 kIntrinsified); 1051 locations->SetInAt(0, Location::RequiresRegister()); 1052 locations->SetInAt(1, Location::RequiresRegister()); 1053 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1054 } 1055 1056 static void GenMinMax(LocationSummary* locations, 1057 bool is_min, 1058 Primitive::Type type, 1059 bool is_R6, 1060 MipsAssembler* assembler) { 1061 if (is_R6) { 1062 // Some architectures, such as ARM and MIPS (prior to r6), have a 1063 // conditional move instruction which only changes the target 1064 // (output) register if the condition is true (MIPS prior to r6 had 1065 // MOVF, MOVT, MOVN, and MOVZ). The SELEQZ and SELNEZ instructions 1066 // always change the target (output) register. If the condition is 1067 // true the output register gets the contents of the "rs" register; 1068 // otherwise, the output register is set to zero. One consequence 1069 // of this is that to implement something like "rd = c==0 ? rs : rt" 1070 // MIPS64r6 needs to use a pair of SELEQZ/SELNEZ instructions. 1071 // After executing this pair of instructions one of the output 1072 // registers from the pair will necessarily contain zero. Then the 1073 // code ORs the output registers from the SELEQZ/SELNEZ instructions 1074 // to get the final result. 1075 // 1076 // The initial test to see if the output register is same as the 1077 // first input register is needed to make sure that value in the 1078 // first input register isn't clobbered before we've finished 1079 // computing the output value. The logic in the corresponding else 1080 // clause performs the same task but makes sure the second input 1081 // register isn't clobbered in the event that it's the same register 1082 // as the output register; the else clause also handles the case 1083 // where the output register is distinct from both the first, and the 1084 // second input registers. 1085 if (type == Primitive::kPrimLong) { 1086 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 1087 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 1088 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>(); 1089 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>(); 1090 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 1091 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 1092 1093 MipsLabel compare_done; 1094 1095 if (a_lo == b_lo) { 1096 if (out_lo != a_lo) { 1097 __ Move(out_lo, a_lo); 1098 __ Move(out_hi, a_hi); 1099 } 1100 } else { 1101 __ Slt(TMP, b_hi, a_hi); 1102 __ Bne(b_hi, a_hi, &compare_done); 1103 1104 __ Sltu(TMP, b_lo, a_lo); 1105 1106 __ Bind(&compare_done); 1107 1108 if (is_min) { 1109 __ Seleqz(AT, a_lo, TMP); 1110 __ Selnez(out_lo, b_lo, TMP); // Safe even if out_lo == a_lo/b_lo 1111 // because at this point we're 1112 // done using a_lo/b_lo. 1113 } else { 1114 __ Selnez(AT, a_lo, TMP); 1115 __ Seleqz(out_lo, b_lo, TMP); // ditto 1116 } 1117 __ Or(out_lo, out_lo, AT); 1118 if (is_min) { 1119 __ Seleqz(AT, a_hi, TMP); 1120 __ Selnez(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi 1121 } else { 1122 __ Selnez(AT, a_hi, TMP); 1123 __ Seleqz(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi 1124 } 1125 __ Or(out_hi, out_hi, AT); 1126 } 1127 } else { 1128 DCHECK_EQ(type, Primitive::kPrimInt); 1129 Register a = locations->InAt(0).AsRegister<Register>(); 1130 Register b = locations->InAt(1).AsRegister<Register>(); 1131 Register out = locations->Out().AsRegister<Register>(); 1132 1133 if (a == b) { 1134 if (out != a) { 1135 __ Move(out, a); 1136 } 1137 } else { 1138 __ Slt(AT, b, a); 1139 if (is_min) { 1140 __ Seleqz(TMP, a, AT); 1141 __ Selnez(AT, b, AT); 1142 } else { 1143 __ Selnez(TMP, a, AT); 1144 __ Seleqz(AT, b, AT); 1145 } 1146 __ Or(out, TMP, AT); 1147 } 1148 } 1149 } else { 1150 if (type == Primitive::kPrimLong) { 1151 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 1152 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 1153 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>(); 1154 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>(); 1155 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 1156 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 1157 1158 MipsLabel compare_done; 1159 1160 if (a_lo == b_lo) { 1161 if (out_lo != a_lo) { 1162 __ Move(out_lo, a_lo); 1163 __ Move(out_hi, a_hi); 1164 } 1165 } else { 1166 __ Slt(TMP, a_hi, b_hi); 1167 __ Bne(a_hi, b_hi, &compare_done); 1168 1169 __ Sltu(TMP, a_lo, b_lo); 1170 1171 __ Bind(&compare_done); 1172 1173 if (is_min) { 1174 if (out_lo != a_lo) { 1175 __ Movn(out_hi, a_hi, TMP); 1176 __ Movn(out_lo, a_lo, TMP); 1177 } 1178 if (out_lo != b_lo) { 1179 __ Movz(out_hi, b_hi, TMP); 1180 __ Movz(out_lo, b_lo, TMP); 1181 } 1182 } else { 1183 if (out_lo != a_lo) { 1184 __ Movz(out_hi, a_hi, TMP); 1185 __ Movz(out_lo, a_lo, TMP); 1186 } 1187 if (out_lo != b_lo) { 1188 __ Movn(out_hi, b_hi, TMP); 1189 __ Movn(out_lo, b_lo, TMP); 1190 } 1191 } 1192 } 1193 } else { 1194 DCHECK_EQ(type, Primitive::kPrimInt); 1195 Register a = locations->InAt(0).AsRegister<Register>(); 1196 Register b = locations->InAt(1).AsRegister<Register>(); 1197 Register out = locations->Out().AsRegister<Register>(); 1198 1199 if (a == b) { 1200 if (out != a) { 1201 __ Move(out, a); 1202 } 1203 } else { 1204 __ Slt(AT, a, b); 1205 if (is_min) { 1206 if (out != a) { 1207 __ Movn(out, a, AT); 1208 } 1209 if (out != b) { 1210 __ Movz(out, b, AT); 1211 } 1212 } else { 1213 if (out != a) { 1214 __ Movz(out, a, AT); 1215 } 1216 if (out != b) { 1217 __ Movn(out, b, AT); 1218 } 1219 } 1220 } 1221 } 1222 } 1223 } 1224 1225 // int java.lang.Math.min(int, int) 1226 void IntrinsicLocationsBuilderMIPS::VisitMathMinIntInt(HInvoke* invoke) { 1227 CreateIntIntToIntLocations(arena_, invoke); 1228 } 1229 1230 void IntrinsicCodeGeneratorMIPS::VisitMathMinIntInt(HInvoke* invoke) { 1231 GenMinMax(invoke->GetLocations(), 1232 /* is_min */ true, 1233 Primitive::kPrimInt, 1234 IsR6(), 1235 GetAssembler()); 1236 } 1237 1238 // long java.lang.Math.min(long, long) 1239 void IntrinsicLocationsBuilderMIPS::VisitMathMinLongLong(HInvoke* invoke) { 1240 CreateIntIntToIntLocations(arena_, invoke); 1241 } 1242 1243 void IntrinsicCodeGeneratorMIPS::VisitMathMinLongLong(HInvoke* invoke) { 1244 GenMinMax(invoke->GetLocations(), 1245 /* is_min */ true, 1246 Primitive::kPrimLong, 1247 IsR6(), 1248 GetAssembler()); 1249 } 1250 1251 // int java.lang.Math.max(int, int) 1252 void IntrinsicLocationsBuilderMIPS::VisitMathMaxIntInt(HInvoke* invoke) { 1253 CreateIntIntToIntLocations(arena_, invoke); 1254 } 1255 1256 void IntrinsicCodeGeneratorMIPS::VisitMathMaxIntInt(HInvoke* invoke) { 1257 GenMinMax(invoke->GetLocations(), 1258 /* is_min */ false, 1259 Primitive::kPrimInt, 1260 IsR6(), 1261 GetAssembler()); 1262 } 1263 1264 // long java.lang.Math.max(long, long) 1265 void IntrinsicLocationsBuilderMIPS::VisitMathMaxLongLong(HInvoke* invoke) { 1266 CreateIntIntToIntLocations(arena_, invoke); 1267 } 1268 1269 void IntrinsicCodeGeneratorMIPS::VisitMathMaxLongLong(HInvoke* invoke) { 1270 GenMinMax(invoke->GetLocations(), 1271 /* is_min */ false, 1272 Primitive::kPrimLong, 1273 IsR6(), 1274 GetAssembler()); 1275 } 1276 1277 // double java.lang.Math.sqrt(double) 1278 void IntrinsicLocationsBuilderMIPS::VisitMathSqrt(HInvoke* invoke) { 1279 CreateFPToFPLocations(arena_, invoke); 1280 } 1281 1282 void IntrinsicCodeGeneratorMIPS::VisitMathSqrt(HInvoke* invoke) { 1283 LocationSummary* locations = invoke->GetLocations(); 1284 MipsAssembler* assembler = GetAssembler(); 1285 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>(); 1286 FRegister out = locations->Out().AsFpuRegister<FRegister>(); 1287 1288 __ SqrtD(out, in); 1289 } 1290 1291 // byte libcore.io.Memory.peekByte(long address) 1292 void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekByte(HInvoke* invoke) { 1293 CreateIntToIntLocations(arena_, invoke); 1294 } 1295 1296 void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekByte(HInvoke* invoke) { 1297 MipsAssembler* assembler = GetAssembler(); 1298 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1299 Register out = invoke->GetLocations()->Out().AsRegister<Register>(); 1300 1301 __ Lb(out, adr, 0); 1302 } 1303 1304 // short libcore.io.Memory.peekShort(long address) 1305 void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) { 1306 CreateIntToIntLocations(arena_, invoke); 1307 } 1308 1309 void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) { 1310 MipsAssembler* assembler = GetAssembler(); 1311 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1312 Register out = invoke->GetLocations()->Out().AsRegister<Register>(); 1313 1314 if (IsR6()) { 1315 __ Lh(out, adr, 0); 1316 } else if (IsR2OrNewer()) { 1317 // Unlike for words, there are no lhl/lhr instructions to load 1318 // unaligned halfwords so the code loads individual bytes, in case 1319 // the address isn't halfword-aligned, and assembles them into a 1320 // signed halfword. 1321 __ Lb(AT, adr, 1); // This byte must be sign-extended. 1322 __ Lb(out, adr, 0); // This byte can be either sign-extended, or 1323 // zero-extended because the following 1324 // instruction overwrites the sign bits. 1325 __ Ins(out, AT, 8, 24); 1326 } else { 1327 __ Lbu(AT, adr, 0); // This byte must be zero-extended. If it's not 1328 // the "or" instruction below will destroy the upper 1329 // 24 bits of the final result. 1330 __ Lb(out, adr, 1); // This byte must be sign-extended. 1331 __ Sll(out, out, 8); 1332 __ Or(out, out, AT); 1333 } 1334 } 1335 1336 // int libcore.io.Memory.peekInt(long address) 1337 void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) { 1338 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); 1339 } 1340 1341 void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) { 1342 MipsAssembler* assembler = GetAssembler(); 1343 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1344 Register out = invoke->GetLocations()->Out().AsRegister<Register>(); 1345 1346 if (IsR6()) { 1347 __ Lw(out, adr, 0); 1348 } else { 1349 __ Lwr(out, adr, 0); 1350 __ Lwl(out, adr, 3); 1351 } 1352 } 1353 1354 // long libcore.io.Memory.peekLong(long address) 1355 void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) { 1356 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); 1357 } 1358 1359 void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) { 1360 MipsAssembler* assembler = GetAssembler(); 1361 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1362 Register out_lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>(); 1363 Register out_hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>(); 1364 1365 if (IsR6()) { 1366 __ Lw(out_lo, adr, 0); 1367 __ Lw(out_hi, adr, 4); 1368 } else { 1369 __ Lwr(out_lo, adr, 0); 1370 __ Lwl(out_lo, adr, 3); 1371 __ Lwr(out_hi, adr, 4); 1372 __ Lwl(out_hi, adr, 7); 1373 } 1374 } 1375 1376 static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { 1377 LocationSummary* locations = new (arena) LocationSummary(invoke, 1378 LocationSummary::kNoCall, 1379 kIntrinsified); 1380 locations->SetInAt(0, Location::RequiresRegister()); 1381 locations->SetInAt(1, Location::RequiresRegister()); 1382 } 1383 1384 // void libcore.io.Memory.pokeByte(long address, byte value) 1385 void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeByte(HInvoke* invoke) { 1386 CreateIntIntToVoidLocations(arena_, invoke); 1387 } 1388 1389 void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeByte(HInvoke* invoke) { 1390 MipsAssembler* assembler = GetAssembler(); 1391 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1392 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>(); 1393 1394 __ Sb(val, adr, 0); 1395 } 1396 1397 // void libcore.io.Memory.pokeShort(long address, short value) 1398 void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) { 1399 CreateIntIntToVoidLocations(arena_, invoke); 1400 } 1401 1402 void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) { 1403 MipsAssembler* assembler = GetAssembler(); 1404 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1405 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>(); 1406 1407 if (IsR6()) { 1408 __ Sh(val, adr, 0); 1409 } else { 1410 // Unlike for words, there are no shl/shr instructions to store 1411 // unaligned halfwords so the code stores individual bytes, in case 1412 // the address isn't halfword-aligned. 1413 __ Sb(val, adr, 0); 1414 __ Srl(AT, val, 8); 1415 __ Sb(AT, adr, 1); 1416 } 1417 } 1418 1419 // void libcore.io.Memory.pokeInt(long address, int value) 1420 void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) { 1421 CreateIntIntToVoidLocations(arena_, invoke); 1422 } 1423 1424 void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) { 1425 MipsAssembler* assembler = GetAssembler(); 1426 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1427 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>(); 1428 1429 if (IsR6()) { 1430 __ Sw(val, adr, 0); 1431 } else { 1432 __ Swr(val, adr, 0); 1433 __ Swl(val, adr, 3); 1434 } 1435 } 1436 1437 // void libcore.io.Memory.pokeLong(long address, long value) 1438 void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) { 1439 CreateIntIntToVoidLocations(arena_, invoke); 1440 } 1441 1442 void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) { 1443 MipsAssembler* assembler = GetAssembler(); 1444 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); 1445 Register val_lo = invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(); 1446 Register val_hi = invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(); 1447 1448 if (IsR6()) { 1449 __ Sw(val_lo, adr, 0); 1450 __ Sw(val_hi, adr, 4); 1451 } else { 1452 __ Swr(val_lo, adr, 0); 1453 __ Swl(val_lo, adr, 3); 1454 __ Swr(val_hi, adr, 4); 1455 __ Swl(val_hi, adr, 7); 1456 } 1457 } 1458 1459 // Thread java.lang.Thread.currentThread() 1460 void IntrinsicLocationsBuilderMIPS::VisitThreadCurrentThread(HInvoke* invoke) { 1461 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1462 LocationSummary::kNoCall, 1463 kIntrinsified); 1464 locations->SetOut(Location::RequiresRegister()); 1465 } 1466 1467 void IntrinsicCodeGeneratorMIPS::VisitThreadCurrentThread(HInvoke* invoke) { 1468 MipsAssembler* assembler = GetAssembler(); 1469 Register out = invoke->GetLocations()->Out().AsRegister<Register>(); 1470 1471 __ LoadFromOffset(kLoadWord, 1472 out, 1473 TR, 1474 Thread::PeerOffset<kMipsPointerSize>().Int32Value()); 1475 } 1476 1477 static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 1478 bool can_call = 1479 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject || 1480 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile; 1481 LocationSummary* locations = new (arena) LocationSummary(invoke, 1482 can_call ? 1483 LocationSummary::kCallOnSlowPath : 1484 LocationSummary::kNoCall, 1485 kIntrinsified); 1486 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 1487 locations->SetInAt(1, Location::RequiresRegister()); 1488 locations->SetInAt(2, Location::RequiresRegister()); 1489 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1490 } 1491 1492 static void GenUnsafeGet(HInvoke* invoke, 1493 Primitive::Type type, 1494 bool is_volatile, 1495 bool is_R6, 1496 CodeGeneratorMIPS* codegen) { 1497 LocationSummary* locations = invoke->GetLocations(); 1498 DCHECK((type == Primitive::kPrimInt) || 1499 (type == Primitive::kPrimLong) || 1500 (type == Primitive::kPrimNot)) << type; 1501 MipsAssembler* assembler = codegen->GetAssembler(); 1502 // Object pointer. 1503 Register base = locations->InAt(1).AsRegister<Register>(); 1504 // The "offset" argument is passed as a "long". Since this code is for 1505 // a 32-bit processor, we can only use 32-bit addresses, so we only 1506 // need the low 32-bits of offset. 1507 Register offset_lo = invoke->GetLocations()->InAt(2).AsRegisterPairLow<Register>(); 1508 1509 __ Addu(TMP, base, offset_lo); 1510 if (is_volatile) { 1511 __ Sync(0); 1512 } 1513 if (type == Primitive::kPrimLong) { 1514 Register trg_lo = locations->Out().AsRegisterPairLow<Register>(); 1515 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>(); 1516 1517 if (is_R6) { 1518 __ Lw(trg_lo, TMP, 0); 1519 __ Lw(trg_hi, TMP, 4); 1520 } else { 1521 __ Lwr(trg_lo, TMP, 0); 1522 __ Lwl(trg_lo, TMP, 3); 1523 __ Lwr(trg_hi, TMP, 4); 1524 __ Lwl(trg_hi, TMP, 7); 1525 } 1526 } else { 1527 Register trg = locations->Out().AsRegister<Register>(); 1528 1529 if (is_R6) { 1530 __ Lw(trg, TMP, 0); 1531 } else { 1532 __ Lwr(trg, TMP, 0); 1533 __ Lwl(trg, TMP, 3); 1534 } 1535 } 1536 } 1537 1538 // int sun.misc.Unsafe.getInt(Object o, long offset) 1539 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGet(HInvoke* invoke) { 1540 CreateIntIntIntToIntLocations(arena_, invoke); 1541 } 1542 1543 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGet(HInvoke* invoke) { 1544 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, IsR6(), codegen_); 1545 } 1546 1547 // int sun.misc.Unsafe.getIntVolatile(Object o, long offset) 1548 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) { 1549 CreateIntIntIntToIntLocations(arena_, invoke); 1550 } 1551 1552 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) { 1553 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, IsR6(), codegen_); 1554 } 1555 1556 // long sun.misc.Unsafe.getLong(Object o, long offset) 1557 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetLong(HInvoke* invoke) { 1558 CreateIntIntIntToIntLocations(arena_, invoke); 1559 } 1560 1561 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetLong(HInvoke* invoke) { 1562 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, IsR6(), codegen_); 1563 } 1564 1565 // long sun.misc.Unsafe.getLongVolatile(Object o, long offset) 1566 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetLongVolatile(HInvoke* invoke) { 1567 CreateIntIntIntToIntLocations(arena_, invoke); 1568 } 1569 1570 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetLongVolatile(HInvoke* invoke) { 1571 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, IsR6(), codegen_); 1572 } 1573 1574 // Object sun.misc.Unsafe.getObject(Object o, long offset) 1575 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObject(HInvoke* invoke) { 1576 CreateIntIntIntToIntLocations(arena_, invoke); 1577 } 1578 1579 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObject(HInvoke* invoke) { 1580 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, IsR6(), codegen_); 1581 } 1582 1583 // Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset) 1584 void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { 1585 CreateIntIntIntToIntLocations(arena_, invoke); 1586 } 1587 1588 void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { 1589 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, IsR6(), codegen_); 1590 } 1591 1592 static void CreateIntIntIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { 1593 LocationSummary* locations = new (arena) LocationSummary(invoke, 1594 LocationSummary::kNoCall, 1595 kIntrinsified); 1596 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 1597 locations->SetInAt(1, Location::RequiresRegister()); 1598 locations->SetInAt(2, Location::RequiresRegister()); 1599 locations->SetInAt(3, Location::RequiresRegister()); 1600 } 1601 1602 static void GenUnsafePut(LocationSummary* locations, 1603 Primitive::Type type, 1604 bool is_volatile, 1605 bool is_ordered, 1606 bool is_R6, 1607 CodeGeneratorMIPS* codegen) { 1608 DCHECK((type == Primitive::kPrimInt) || 1609 (type == Primitive::kPrimLong) || 1610 (type == Primitive::kPrimNot)) << type; 1611 MipsAssembler* assembler = codegen->GetAssembler(); 1612 // Object pointer. 1613 Register base = locations->InAt(1).AsRegister<Register>(); 1614 // The "offset" argument is passed as a "long", i.e., it's 64-bits in 1615 // size. Since this code is for a 32-bit processor, we can only use 1616 // 32-bit addresses, so we only need the low 32-bits of offset. 1617 Register offset_lo = locations->InAt(2).AsRegisterPairLow<Register>(); 1618 1619 __ Addu(TMP, base, offset_lo); 1620 if (is_volatile || is_ordered) { 1621 __ Sync(0); 1622 } 1623 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) { 1624 Register value = locations->InAt(3).AsRegister<Register>(); 1625 1626 if (is_R6) { 1627 __ Sw(value, TMP, 0); 1628 } else { 1629 __ Swr(value, TMP, 0); 1630 __ Swl(value, TMP, 3); 1631 } 1632 } else { 1633 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>(); 1634 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>(); 1635 1636 if (is_R6) { 1637 __ Sw(value_lo, TMP, 0); 1638 __ Sw(value_hi, TMP, 4); 1639 } else { 1640 __ Swr(value_lo, TMP, 0); 1641 __ Swl(value_lo, TMP, 3); 1642 __ Swr(value_hi, TMP, 4); 1643 __ Swl(value_hi, TMP, 7); 1644 } 1645 } 1646 1647 if (is_volatile) { 1648 __ Sync(0); 1649 } 1650 1651 if (type == Primitive::kPrimNot) { 1652 codegen->MarkGCCard(base, locations->InAt(3).AsRegister<Register>()); 1653 } 1654 } 1655 1656 // void sun.misc.Unsafe.putInt(Object o, long offset, int x) 1657 void IntrinsicLocationsBuilderMIPS::VisitUnsafePut(HInvoke* invoke) { 1658 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1659 } 1660 1661 void IntrinsicCodeGeneratorMIPS::VisitUnsafePut(HInvoke* invoke) { 1662 GenUnsafePut(invoke->GetLocations(), 1663 Primitive::kPrimInt, 1664 /* is_volatile */ false, 1665 /* is_ordered */ false, 1666 IsR6(), 1667 codegen_); 1668 } 1669 1670 // void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x) 1671 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutOrdered(HInvoke* invoke) { 1672 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1673 } 1674 1675 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutOrdered(HInvoke* invoke) { 1676 GenUnsafePut(invoke->GetLocations(), 1677 Primitive::kPrimInt, 1678 /* is_volatile */ false, 1679 /* is_ordered */ true, 1680 IsR6(), 1681 codegen_); 1682 } 1683 1684 // void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x) 1685 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutVolatile(HInvoke* invoke) { 1686 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1687 } 1688 1689 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutVolatile(HInvoke* invoke) { 1690 GenUnsafePut(invoke->GetLocations(), 1691 Primitive::kPrimInt, 1692 /* is_volatile */ true, 1693 /* is_ordered */ false, 1694 IsR6(), 1695 codegen_); 1696 } 1697 1698 // void sun.misc.Unsafe.putObject(Object o, long offset, Object x) 1699 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObject(HInvoke* invoke) { 1700 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1701 } 1702 1703 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObject(HInvoke* invoke) { 1704 GenUnsafePut(invoke->GetLocations(), 1705 Primitive::kPrimNot, 1706 /* is_volatile */ false, 1707 /* is_ordered */ false, 1708 IsR6(), 1709 codegen_); 1710 } 1711 1712 // void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x) 1713 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) { 1714 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1715 } 1716 1717 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) { 1718 GenUnsafePut(invoke->GetLocations(), 1719 Primitive::kPrimNot, 1720 /* is_volatile */ false, 1721 /* is_ordered */ true, 1722 IsR6(), 1723 codegen_); 1724 } 1725 1726 // void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x) 1727 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) { 1728 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1729 } 1730 1731 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) { 1732 GenUnsafePut(invoke->GetLocations(), 1733 Primitive::kPrimNot, 1734 /* is_volatile */ true, 1735 /* is_ordered */ false, 1736 IsR6(), 1737 codegen_); 1738 } 1739 1740 // void sun.misc.Unsafe.putLong(Object o, long offset, long x) 1741 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLong(HInvoke* invoke) { 1742 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1743 } 1744 1745 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLong(HInvoke* invoke) { 1746 GenUnsafePut(invoke->GetLocations(), 1747 Primitive::kPrimLong, 1748 /* is_volatile */ false, 1749 /* is_ordered */ false, 1750 IsR6(), 1751 codegen_); 1752 } 1753 1754 // void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x) 1755 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) { 1756 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1757 } 1758 1759 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) { 1760 GenUnsafePut(invoke->GetLocations(), 1761 Primitive::kPrimLong, 1762 /* is_volatile */ false, 1763 /* is_ordered */ true, 1764 IsR6(), 1765 codegen_); 1766 } 1767 1768 // void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x) 1769 void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLongVolatile(HInvoke* invoke) { 1770 CreateIntIntIntIntToVoidLocations(arena_, invoke); 1771 } 1772 1773 void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLongVolatile(HInvoke* invoke) { 1774 GenUnsafePut(invoke->GetLocations(), 1775 Primitive::kPrimLong, 1776 /* is_volatile */ true, 1777 /* is_ordered */ false, 1778 IsR6(), 1779 codegen_); 1780 } 1781 1782 static void CreateIntIntIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 1783 LocationSummary* locations = new (arena) LocationSummary(invoke, 1784 LocationSummary::kNoCall, 1785 kIntrinsified); 1786 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 1787 locations->SetInAt(1, Location::RequiresRegister()); 1788 locations->SetInAt(2, Location::RequiresRegister()); 1789 locations->SetInAt(3, Location::RequiresRegister()); 1790 locations->SetInAt(4, Location::RequiresRegister()); 1791 1792 locations->SetOut(Location::RequiresRegister()); 1793 } 1794 1795 static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorMIPS* codegen) { 1796 MipsAssembler* assembler = codegen->GetAssembler(); 1797 bool isR6 = codegen->GetInstructionSetFeatures().IsR6(); 1798 Register base = locations->InAt(1).AsRegister<Register>(); 1799 Register offset_lo = locations->InAt(2).AsRegisterPairLow<Register>(); 1800 Register expected = locations->InAt(3).AsRegister<Register>(); 1801 Register value = locations->InAt(4).AsRegister<Register>(); 1802 Register out = locations->Out().AsRegister<Register>(); 1803 1804 DCHECK_NE(base, out); 1805 DCHECK_NE(offset_lo, out); 1806 DCHECK_NE(expected, out); 1807 1808 if (type == Primitive::kPrimNot) { 1809 // Mark card for object assuming new value is stored. 1810 codegen->MarkGCCard(base, value); 1811 } 1812 1813 // do { 1814 // tmp_value = [tmp_ptr] - expected; 1815 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); 1816 // result = tmp_value != 0; 1817 1818 MipsLabel loop_head, exit_loop; 1819 __ Addu(TMP, base, offset_lo); 1820 __ Sync(0); 1821 __ Bind(&loop_head); 1822 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) { 1823 if (isR6) { 1824 __ LlR6(out, TMP); 1825 } else { 1826 __ LlR2(out, TMP); 1827 } 1828 } else { 1829 LOG(FATAL) << "Unsupported op size " << type; 1830 UNREACHABLE(); 1831 } 1832 __ Subu(out, out, expected); // If we didn't get the 'expected' 1833 __ Sltiu(out, out, 1); // value, set 'out' to false, and 1834 __ Beqz(out, &exit_loop); // return. 1835 __ Move(out, value); // Use 'out' for the 'store conditional' instruction. 1836 // If we use 'value' directly, we would lose 'value' 1837 // in the case that the store fails. Whether the 1838 // store succeeds, or fails, it will load the 1839 // correct boolean value into the 'out' register. 1840 // This test isn't really necessary. We only support Primitive::kPrimInt, 1841 // Primitive::kPrimNot, and we already verified that we're working on one 1842 // of those two types. It's left here in case the code needs to support 1843 // other types in the future. 1844 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) { 1845 if (isR6) { 1846 __ ScR6(out, TMP); 1847 } else { 1848 __ ScR2(out, TMP); 1849 } 1850 } 1851 __ Beqz(out, &loop_head); // If we couldn't do the read-modify-write 1852 // cycle atomically then retry. 1853 __ Bind(&exit_loop); 1854 __ Sync(0); 1855 } 1856 1857 // boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x) 1858 void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASInt(HInvoke* invoke) { 1859 CreateIntIntIntIntIntToIntLocations(arena_, invoke); 1860 } 1861 1862 void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASInt(HInvoke* invoke) { 1863 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); 1864 } 1865 1866 // boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x) 1867 void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASObject(HInvoke* invoke) { 1868 CreateIntIntIntIntIntToIntLocations(arena_, invoke); 1869 } 1870 1871 void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASObject(HInvoke* invoke) { 1872 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); 1873 } 1874 1875 // char java.lang.String.charAt(int index) 1876 void IntrinsicLocationsBuilderMIPS::VisitStringCharAt(HInvoke* invoke) { 1877 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1878 LocationSummary::kCallOnSlowPath, 1879 kIntrinsified); 1880 locations->SetInAt(0, Location::RequiresRegister()); 1881 locations->SetInAt(1, Location::RequiresRegister()); 1882 // The inputs will be considered live at the last instruction and restored. This would overwrite 1883 // the output with kNoOutputOverlap. 1884 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); 1885 } 1886 1887 void IntrinsicCodeGeneratorMIPS::VisitStringCharAt(HInvoke* invoke) { 1888 LocationSummary* locations = invoke->GetLocations(); 1889 MipsAssembler* assembler = GetAssembler(); 1890 1891 // Location of reference to data array 1892 const int32_t value_offset = mirror::String::ValueOffset().Int32Value(); 1893 // Location of count 1894 const int32_t count_offset = mirror::String::CountOffset().Int32Value(); 1895 1896 Register obj = locations->InAt(0).AsRegister<Register>(); 1897 Register idx = locations->InAt(1).AsRegister<Register>(); 1898 Register out = locations->Out().AsRegister<Register>(); 1899 1900 // TODO: Maybe we can support range check elimination. Overall, 1901 // though, I think it's not worth the cost. 1902 // TODO: For simplicity, the index parameter is requested in a 1903 // register, so different from Quick we will not optimize the 1904 // code for constants (which would save a register). 1905 1906 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke); 1907 codegen_->AddSlowPath(slow_path); 1908 1909 // Load the string size 1910 __ Lw(TMP, obj, count_offset); 1911 codegen_->MaybeRecordImplicitNullCheck(invoke); 1912 // Revert to slow path if idx is too large, or negative 1913 __ Bgeu(idx, TMP, slow_path->GetEntryLabel()); 1914 1915 // out = obj[2*idx]. 1916 __ Sll(TMP, idx, 1); // idx * 2 1917 __ Addu(TMP, TMP, obj); // Address of char at location idx 1918 __ Lhu(out, TMP, value_offset); // Load char at location idx 1919 1920 __ Bind(slow_path->GetExitLabel()); 1921 } 1922 1923 // int java.lang.String.compareTo(String anotherString) 1924 void IntrinsicLocationsBuilderMIPS::VisitStringCompareTo(HInvoke* invoke) { 1925 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1926 LocationSummary::kCall, 1927 kIntrinsified); 1928 InvokeRuntimeCallingConvention calling_convention; 1929 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1930 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1931 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 1932 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 1933 } 1934 1935 void IntrinsicCodeGeneratorMIPS::VisitStringCompareTo(HInvoke* invoke) { 1936 MipsAssembler* assembler = GetAssembler(); 1937 LocationSummary* locations = invoke->GetLocations(); 1938 1939 // Note that the null check must have been done earlier. 1940 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 1941 1942 Register argument = locations->InAt(1).AsRegister<Register>(); 1943 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke); 1944 codegen_->AddSlowPath(slow_path); 1945 __ Beqz(argument, slow_path->GetEntryLabel()); 1946 1947 __ LoadFromOffset(kLoadWord, 1948 T9, 1949 TR, 1950 QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, 1951 pStringCompareTo).Int32Value()); 1952 __ Jalr(T9); 1953 __ Nop(); 1954 __ Bind(slow_path->GetExitLabel()); 1955 } 1956 1957 // boolean java.lang.String.equals(Object anObject) 1958 void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) { 1959 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1960 LocationSummary::kNoCall, 1961 kIntrinsified); 1962 locations->SetInAt(0, Location::RequiresRegister()); 1963 locations->SetInAt(1, Location::RequiresRegister()); 1964 locations->SetOut(Location::RequiresRegister()); 1965 1966 // Temporary registers to store lengths of strings and for calculations. 1967 locations->AddTemp(Location::RequiresRegister()); 1968 locations->AddTemp(Location::RequiresRegister()); 1969 locations->AddTemp(Location::RequiresRegister()); 1970 } 1971 1972 void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) { 1973 MipsAssembler* assembler = GetAssembler(); 1974 LocationSummary* locations = invoke->GetLocations(); 1975 1976 Register str = locations->InAt(0).AsRegister<Register>(); 1977 Register arg = locations->InAt(1).AsRegister<Register>(); 1978 Register out = locations->Out().AsRegister<Register>(); 1979 1980 Register temp1 = locations->GetTemp(0).AsRegister<Register>(); 1981 Register temp2 = locations->GetTemp(1).AsRegister<Register>(); 1982 Register temp3 = locations->GetTemp(2).AsRegister<Register>(); 1983 1984 MipsLabel loop; 1985 MipsLabel end; 1986 MipsLabel return_true; 1987 MipsLabel return_false; 1988 1989 // Get offsets of count, value, and class fields within a string object. 1990 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value(); 1991 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value(); 1992 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value(); 1993 1994 // Note that the null check must have been done earlier. 1995 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 1996 1997 // If the register containing the pointer to "this", and the register 1998 // containing the pointer to "anObject" are the same register then 1999 // "this", and "anObject" are the same object and we can 2000 // short-circuit the logic to a true result. 2001 if (str == arg) { 2002 __ LoadConst32(out, 1); 2003 return; 2004 } 2005 2006 // Check if input is null, return false if it is. 2007 __ Beqz(arg, &return_false); 2008 2009 // Reference equality check, return true if same reference. 2010 __ Beq(str, arg, &return_true); 2011 2012 // Instanceof check for the argument by comparing class fields. 2013 // All string objects must have the same type since String cannot be subclassed. 2014 // Receiver must be a string object, so its class field is equal to all strings' class fields. 2015 // If the argument is a string object, its class field must be equal to receiver's class field. 2016 __ Lw(temp1, str, class_offset); 2017 __ Lw(temp2, arg, class_offset); 2018 __ Bne(temp1, temp2, &return_false); 2019 2020 // Load lengths of this and argument strings. 2021 __ Lw(temp1, str, count_offset); 2022 __ Lw(temp2, arg, count_offset); 2023 // Check if lengths are equal, return false if they're not. 2024 __ Bne(temp1, temp2, &return_false); 2025 // Return true if both strings are empty. 2026 __ Beqz(temp1, &return_true); 2027 2028 // Don't overwrite input registers 2029 __ Move(TMP, str); 2030 __ Move(temp3, arg); 2031 2032 // Assertions that must hold in order to compare strings 2 characters at a time. 2033 DCHECK_ALIGNED(value_offset, 4); 2034 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded"); 2035 2036 // Loop to compare strings 2 characters at a time starting at the beginning of the string. 2037 // Ok to do this because strings are zero-padded. 2038 __ Bind(&loop); 2039 __ Lw(out, TMP, value_offset); 2040 __ Lw(temp2, temp3, value_offset); 2041 __ Bne(out, temp2, &return_false); 2042 __ Addiu(TMP, TMP, 4); 2043 __ Addiu(temp3, temp3, 4); 2044 __ Addiu(temp1, temp1, -2); 2045 __ Bgtz(temp1, &loop); 2046 2047 // Return true and exit the function. 2048 // If loop does not result in returning false, we return true. 2049 __ Bind(&return_true); 2050 __ LoadConst32(out, 1); 2051 __ B(&end); 2052 2053 // Return false and exit the function. 2054 __ Bind(&return_false); 2055 __ LoadConst32(out, 0); 2056 __ Bind(&end); 2057 } 2058 2059 static void GenerateStringIndexOf(HInvoke* invoke, 2060 bool start_at_zero, 2061 MipsAssembler* assembler, 2062 CodeGeneratorMIPS* codegen, 2063 ArenaAllocator* allocator) { 2064 LocationSummary* locations = invoke->GetLocations(); 2065 Register tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<Register>() : TMP; 2066 2067 // Note that the null check must have been done earlier. 2068 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 2069 2070 // Check for code points > 0xFFFF. Either a slow-path check when we 2071 // don't know statically, or directly dispatch if we have a constant. 2072 SlowPathCodeMIPS* slow_path = nullptr; 2073 if (invoke->InputAt(1)->IsIntConstant()) { 2074 if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) { 2075 // Always needs the slow-path. We could directly dispatch to it, 2076 // but this case should be rare, so for simplicity just put the 2077 // full slow-path down and branch unconditionally. 2078 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke); 2079 codegen->AddSlowPath(slow_path); 2080 __ B(slow_path->GetEntryLabel()); 2081 __ Bind(slow_path->GetExitLabel()); 2082 return; 2083 } 2084 } else { 2085 Register char_reg = locations->InAt(1).AsRegister<Register>(); 2086 // The "bltu" conditional branch tests to see if the character value 2087 // fits in a valid 16-bit (MIPS halfword) value. If it doesn't then 2088 // the character being searched for, if it exists in the string, is 2089 // encoded using UTF-16 and stored in the string as two (16-bit) 2090 // halfwords. Currently the assembly code used to implement this 2091 // intrinsic doesn't support searching for a character stored as 2092 // two halfwords so we fallback to using the generic implementation 2093 // of indexOf(). 2094 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max()); 2095 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke); 2096 codegen->AddSlowPath(slow_path); 2097 __ Bltu(tmp_reg, char_reg, slow_path->GetEntryLabel()); 2098 } 2099 2100 if (start_at_zero) { 2101 DCHECK_EQ(tmp_reg, A2); 2102 // Start-index = 0. 2103 __ Clear(tmp_reg); 2104 } 2105 2106 __ LoadFromOffset(kLoadWord, 2107 T9, 2108 TR, 2109 QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, pIndexOf).Int32Value()); 2110 __ Jalr(T9); 2111 __ Nop(); 2112 2113 if (slow_path != nullptr) { 2114 __ Bind(slow_path->GetExitLabel()); 2115 } 2116 } 2117 2118 // int java.lang.String.indexOf(int ch) 2119 void IntrinsicLocationsBuilderMIPS::VisitStringIndexOf(HInvoke* invoke) { 2120 LocationSummary* locations = new (arena_) LocationSummary(invoke, 2121 LocationSummary::kCall, 2122 kIntrinsified); 2123 // We have a hand-crafted assembly stub that follows the runtime 2124 // calling convention. So it's best to align the inputs accordingly. 2125 InvokeRuntimeCallingConvention calling_convention; 2126 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2127 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2128 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 2129 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 2130 2131 // Need a temp for slow-path codepoint compare, and need to send start-index=0. 2132 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2133 } 2134 2135 void IntrinsicCodeGeneratorMIPS::VisitStringIndexOf(HInvoke* invoke) { 2136 GenerateStringIndexOf(invoke, 2137 /* start_at_zero */ true, 2138 GetAssembler(), 2139 codegen_, 2140 GetAllocator()); 2141 } 2142 2143 // int java.lang.String.indexOf(int ch, int fromIndex) 2144 void IntrinsicLocationsBuilderMIPS::VisitStringIndexOfAfter(HInvoke* invoke) { 2145 LocationSummary* locations = new (arena_) LocationSummary(invoke, 2146 LocationSummary::kCall, 2147 kIntrinsified); 2148 // We have a hand-crafted assembly stub that follows the runtime 2149 // calling convention. So it's best to align the inputs accordingly. 2150 InvokeRuntimeCallingConvention calling_convention; 2151 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2152 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2153 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2154 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 2155 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 2156 2157 // Need a temp for slow-path codepoint compare. 2158 locations->AddTemp(Location::RequiresRegister()); 2159 } 2160 2161 void IntrinsicCodeGeneratorMIPS::VisitStringIndexOfAfter(HInvoke* invoke) { 2162 GenerateStringIndexOf(invoke, 2163 /* start_at_zero */ false, 2164 GetAssembler(), 2165 codegen_, 2166 GetAllocator()); 2167 } 2168 2169 // java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount) 2170 void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) { 2171 LocationSummary* locations = new (arena_) LocationSummary(invoke, 2172 LocationSummary::kCall, 2173 kIntrinsified); 2174 InvokeRuntimeCallingConvention calling_convention; 2175 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2176 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2177 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2178 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3))); 2179 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 2180 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 2181 } 2182 2183 void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) { 2184 MipsAssembler* assembler = GetAssembler(); 2185 LocationSummary* locations = invoke->GetLocations(); 2186 2187 Register byte_array = locations->InAt(0).AsRegister<Register>(); 2188 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke); 2189 codegen_->AddSlowPath(slow_path); 2190 __ Beqz(byte_array, slow_path->GetEntryLabel()); 2191 2192 __ LoadFromOffset(kLoadWord, 2193 T9, 2194 TR, 2195 QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, pAllocStringFromBytes).Int32Value()); 2196 __ Jalr(T9); 2197 __ Nop(); 2198 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 2199 __ Bind(slow_path->GetExitLabel()); 2200 } 2201 2202 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data) 2203 void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromChars(HInvoke* invoke) { 2204 LocationSummary* locations = new (arena_) LocationSummary(invoke, 2205 LocationSummary::kCall, 2206 kIntrinsified); 2207 InvokeRuntimeCallingConvention calling_convention; 2208 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2209 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2210 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2211 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 2212 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 2213 } 2214 2215 void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromChars(HInvoke* invoke) { 2216 MipsAssembler* assembler = GetAssembler(); 2217 2218 // No need to emit code checking whether `locations->InAt(2)` is a null 2219 // pointer, as callers of the native method 2220 // 2221 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data) 2222 // 2223 // all include a null check on `data` before calling that method. 2224 2225 __ LoadFromOffset(kLoadWord, 2226 T9, 2227 TR, 2228 QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, pAllocStringFromChars).Int32Value()); 2229 __ Jalr(T9); 2230 __ Nop(); 2231 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 2232 } 2233 2234 // java.lang.StringFactory.newStringFromString(String toCopy) 2235 void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromString(HInvoke* invoke) { 2236 LocationSummary* locations = new (arena_) LocationSummary(invoke, 2237 LocationSummary::kCall, 2238 kIntrinsified); 2239 InvokeRuntimeCallingConvention calling_convention; 2240 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2241 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); 2242 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>())); 2243 } 2244 2245 void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromString(HInvoke* invoke) { 2246 MipsAssembler* assembler = GetAssembler(); 2247 LocationSummary* locations = invoke->GetLocations(); 2248 2249 Register string_to_copy = locations->InAt(0).AsRegister<Register>(); 2250 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke); 2251 codegen_->AddSlowPath(slow_path); 2252 __ Beqz(string_to_copy, slow_path->GetEntryLabel()); 2253 2254 __ LoadFromOffset(kLoadWord, 2255 T9, 2256 TR, 2257 QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, pAllocStringFromString).Int32Value()); 2258 __ Jalr(T9); 2259 __ Nop(); 2260 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 2261 __ Bind(slow_path->GetExitLabel()); 2262 } 2263 2264 static void GenIsInfinite(LocationSummary* locations, 2265 const Primitive::Type type, 2266 const bool isR6, 2267 MipsAssembler* assembler) { 2268 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>(); 2269 Register out = locations->Out().AsRegister<Register>(); 2270 2271 DCHECK(type == Primitive::kPrimFloat || type == Primitive::kPrimDouble); 2272 2273 if (isR6) { 2274 if (type == Primitive::kPrimDouble) { 2275 __ ClassD(FTMP, in); 2276 } else { 2277 __ ClassS(FTMP, in); 2278 } 2279 __ Mfc1(out, FTMP); 2280 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity); 2281 __ Sltu(out, ZERO, out); 2282 } else { 2283 // If one, or more, of the exponent bits is zero, then the number can't be infinite. 2284 if (type == Primitive::kPrimDouble) { 2285 __ MoveFromFpuHigh(TMP, in); 2286 __ LoadConst32(AT, 0x7FF00000); 2287 } else { 2288 __ Mfc1(TMP, in); 2289 __ LoadConst32(AT, 0x7F800000); 2290 } 2291 __ Xor(TMP, TMP, AT); 2292 2293 __ Sll(TMP, TMP, 1); 2294 2295 if (type == Primitive::kPrimDouble) { 2296 __ Mfc1(AT, in); 2297 __ Or(TMP, TMP, AT); 2298 } 2299 // If any of the significand bits are one, then the number is not infinite. 2300 __ Sltiu(out, TMP, 1); 2301 } 2302 } 2303 2304 // boolean java.lang.Float.isInfinite(float) 2305 void IntrinsicLocationsBuilderMIPS::VisitFloatIsInfinite(HInvoke* invoke) { 2306 CreateFPToIntLocations(arena_, invoke); 2307 } 2308 2309 void IntrinsicCodeGeneratorMIPS::VisitFloatIsInfinite(HInvoke* invoke) { 2310 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimFloat, IsR6(), GetAssembler()); 2311 } 2312 2313 // boolean java.lang.Double.isInfinite(double) 2314 void IntrinsicLocationsBuilderMIPS::VisitDoubleIsInfinite(HInvoke* invoke) { 2315 CreateFPToIntLocations(arena_, invoke); 2316 } 2317 2318 void IntrinsicCodeGeneratorMIPS::VisitDoubleIsInfinite(HInvoke* invoke) { 2319 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimDouble, IsR6(), GetAssembler()); 2320 } 2321 2322 static void GenHighestOneBit(LocationSummary* locations, 2323 const Primitive::Type type, 2324 bool isR6, 2325 MipsAssembler* assembler) { 2326 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 2327 2328 if (type == Primitive::kPrimLong) { 2329 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 2330 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 2331 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 2332 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 2333 2334 if (isR6) { 2335 __ ClzR6(TMP, in_hi); 2336 } else { 2337 __ ClzR2(TMP, in_hi); 2338 } 2339 __ LoadConst32(AT, 0x80000000); 2340 __ Srlv(out_hi, AT, TMP); 2341 __ And(out_hi, out_hi, in_hi); 2342 if (isR6) { 2343 __ ClzR6(TMP, in_lo); 2344 } else { 2345 __ ClzR2(TMP, in_lo); 2346 } 2347 __ Srlv(out_lo, AT, TMP); 2348 __ And(out_lo, out_lo, in_lo); 2349 if (isR6) { 2350 __ Seleqz(out_lo, out_lo, out_hi); 2351 } else { 2352 __ Movn(out_lo, ZERO, out_hi); 2353 } 2354 } else { 2355 Register in = locations->InAt(0).AsRegister<Register>(); 2356 Register out = locations->Out().AsRegister<Register>(); 2357 2358 if (isR6) { 2359 __ ClzR6(TMP, in); 2360 } else { 2361 __ ClzR2(TMP, in); 2362 } 2363 __ LoadConst32(AT, 0x80000000); 2364 __ Srlv(AT, AT, TMP); // Srlv shifts in the range of [0;31] bits (lower 5 bits of arg). 2365 __ And(out, AT, in); // So this is required for 0 (=shift by 32). 2366 } 2367 } 2368 2369 // int java.lang.Integer.highestOneBit(int) 2370 void IntrinsicLocationsBuilderMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) { 2371 CreateIntToIntLocations(arena_, invoke); 2372 } 2373 2374 void IntrinsicCodeGeneratorMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) { 2375 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler()); 2376 } 2377 2378 // long java.lang.Long.highestOneBit(long) 2379 void IntrinsicLocationsBuilderMIPS::VisitLongHighestOneBit(HInvoke* invoke) { 2380 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); 2381 } 2382 2383 void IntrinsicCodeGeneratorMIPS::VisitLongHighestOneBit(HInvoke* invoke) { 2384 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler()); 2385 } 2386 2387 static void GenLowestOneBit(LocationSummary* locations, 2388 const Primitive::Type type, 2389 bool isR6, 2390 MipsAssembler* assembler) { 2391 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 2392 2393 if (type == Primitive::kPrimLong) { 2394 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); 2395 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); 2396 Register out_lo = locations->Out().AsRegisterPairLow<Register>(); 2397 Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); 2398 2399 __ Subu(TMP, ZERO, in_lo); 2400 __ And(out_lo, TMP, in_lo); 2401 __ Subu(TMP, ZERO, in_hi); 2402 __ And(out_hi, TMP, in_hi); 2403 if (isR6) { 2404 __ Seleqz(out_hi, out_hi, out_lo); 2405 } else { 2406 __ Movn(out_hi, ZERO, out_lo); 2407 } 2408 } else { 2409 Register in = locations->InAt(0).AsRegister<Register>(); 2410 Register out = locations->Out().AsRegister<Register>(); 2411 2412 __ Subu(TMP, ZERO, in); 2413 __ And(out, TMP, in); 2414 } 2415 } 2416 2417 // int java.lang.Integer.lowestOneBit(int) 2418 void IntrinsicLocationsBuilderMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) { 2419 CreateIntToIntLocations(arena_, invoke); 2420 } 2421 2422 void IntrinsicCodeGeneratorMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) { 2423 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler()); 2424 } 2425 2426 // long java.lang.Long.lowestOneBit(long) 2427 void IntrinsicLocationsBuilderMIPS::VisitLongLowestOneBit(HInvoke* invoke) { 2428 CreateIntToIntLocations(arena_, invoke); 2429 } 2430 2431 void IntrinsicCodeGeneratorMIPS::VisitLongLowestOneBit(HInvoke* invoke) { 2432 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler()); 2433 } 2434 2435 // Unimplemented intrinsics. 2436 2437 UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil) 2438 UNIMPLEMENTED_INTRINSIC(MIPS, MathFloor) 2439 UNIMPLEMENTED_INTRINSIC(MIPS, MathRint) 2440 UNIMPLEMENTED_INTRINSIC(MIPS, MathRoundDouble) 2441 UNIMPLEMENTED_INTRINSIC(MIPS, MathRoundFloat) 2442 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeCASLong) 2443 2444 UNIMPLEMENTED_INTRINSIC(MIPS, ReferenceGetReferent) 2445 UNIMPLEMENTED_INTRINSIC(MIPS, StringGetCharsNoCheck) 2446 UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopyChar) 2447 UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopy) 2448 2449 UNIMPLEMENTED_INTRINSIC(MIPS, MathCos) 2450 UNIMPLEMENTED_INTRINSIC(MIPS, MathSin) 2451 UNIMPLEMENTED_INTRINSIC(MIPS, MathAcos) 2452 UNIMPLEMENTED_INTRINSIC(MIPS, MathAsin) 2453 UNIMPLEMENTED_INTRINSIC(MIPS, MathAtan) 2454 UNIMPLEMENTED_INTRINSIC(MIPS, MathAtan2) 2455 UNIMPLEMENTED_INTRINSIC(MIPS, MathCbrt) 2456 UNIMPLEMENTED_INTRINSIC(MIPS, MathCosh) 2457 UNIMPLEMENTED_INTRINSIC(MIPS, MathExp) 2458 UNIMPLEMENTED_INTRINSIC(MIPS, MathExpm1) 2459 UNIMPLEMENTED_INTRINSIC(MIPS, MathHypot) 2460 UNIMPLEMENTED_INTRINSIC(MIPS, MathLog) 2461 UNIMPLEMENTED_INTRINSIC(MIPS, MathLog10) 2462 UNIMPLEMENTED_INTRINSIC(MIPS, MathNextAfter) 2463 UNIMPLEMENTED_INTRINSIC(MIPS, MathSinh) 2464 UNIMPLEMENTED_INTRINSIC(MIPS, MathTan) 2465 UNIMPLEMENTED_INTRINSIC(MIPS, MathTanh) 2466 2467 // 1.8. 2468 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddInt) 2469 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddLong) 2470 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetInt) 2471 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetLong) 2472 UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetObject) 2473 2474 UNREACHABLE_INTRINSICS(MIPS) 2475 2476 #undef __ 2477 2478 } // namespace mips 2479 } // namespace art 2480