Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2015 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "intrinsics_mips64.h"
     18 
     19 #include "arch/mips64/instruction_set_features_mips64.h"
     20 #include "art_method.h"
     21 #include "code_generator_mips64.h"
     22 #include "entrypoints/quick/quick_entrypoints.h"
     23 #include "intrinsics.h"
     24 #include "mirror/array-inl.h"
     25 #include "mirror/string.h"
     26 #include "thread.h"
     27 #include "utils/mips64/assembler_mips64.h"
     28 #include "utils/mips64/constants_mips64.h"
     29 
     30 namespace art {
     31 
     32 namespace mips64 {
     33 
     34 IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
     35   : arena_(codegen->GetGraph()->GetArena()) {
     36 }
     37 
     38 Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
     39   return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
     40 }
     41 
     42 ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
     43   return codegen_->GetGraph()->GetArena();
     44 }
     45 
     46 #define __ codegen->GetAssembler()->
     47 
     48 static void MoveFromReturnRegister(Location trg,
     49                                    Primitive::Type type,
     50                                    CodeGeneratorMIPS64* codegen) {
     51   if (!trg.IsValid()) {
     52     DCHECK_EQ(type, Primitive::kPrimVoid);
     53     return;
     54   }
     55 
     56   DCHECK_NE(type, Primitive::kPrimVoid);
     57 
     58   if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
     59     GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
     60     if (trg_reg != V0) {
     61       __ Move(V0, trg_reg);
     62     }
     63   } else {
     64     FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
     65     if (trg_reg != F0) {
     66       if (type == Primitive::kPrimFloat) {
     67         __ MovS(F0, trg_reg);
     68       } else {
     69         __ MovD(F0, trg_reg);
     70       }
     71     }
     72   }
     73 }
     74 
     75 static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
     76   InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
     77   IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
     78 }
     79 
     80 // Slow-path for fallback (calling the managed code to handle the
     81 // intrinsic) in an intrinsified call. This will copy the arguments
     82 // into the positions for a regular call.
     83 //
     84 // Note: The actual parameters are required to be in the locations
     85 //       given by the invoke's location summary. If an intrinsic
     86 //       modifies those locations before a slowpath call, they must be
     87 //       restored!
     88 class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
     89  public:
     90   explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
     91      : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
     92 
     93   void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
     94     CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
     95 
     96     __ Bind(GetEntryLabel());
     97 
     98     SaveLiveRegisters(codegen, invoke_->GetLocations());
     99 
    100     MoveArguments(invoke_, codegen);
    101 
    102     if (invoke_->IsInvokeStaticOrDirect()) {
    103       codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
    104                                           Location::RegisterLocation(A0));
    105     } else {
    106       codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0));
    107     }
    108     codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
    109 
    110     // Copy the result back to the expected output.
    111     Location out = invoke_->GetLocations()->Out();
    112     if (out.IsValid()) {
    113       DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
    114       DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
    115       MoveFromReturnRegister(out, invoke_->GetType(), codegen);
    116     }
    117 
    118     RestoreLiveRegisters(codegen, invoke_->GetLocations());
    119     __ Bc(GetExitLabel());
    120   }
    121 
    122   const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
    123 
    124  private:
    125   // The instruction where this slow path is happening.
    126   HInvoke* const invoke_;
    127 
    128   DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
    129 };
    130 
    131 #undef __
    132 
    133 bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
    134   Dispatch(invoke);
    135   LocationSummary* res = invoke->GetLocations();
    136   return res != nullptr && res->Intrinsified();
    137 }
    138 
    139 #define __ assembler->
    140 
    141 static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
    142   LocationSummary* locations = new (arena) LocationSummary(invoke,
    143                                                            LocationSummary::kNoCall,
    144                                                            kIntrinsified);
    145   locations->SetInAt(0, Location::RequiresFpuRegister());
    146   locations->SetOut(Location::RequiresRegister());
    147 }
    148 
    149 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
    150   FpuRegister in  = locations->InAt(0).AsFpuRegister<FpuRegister>();
    151   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    152 
    153   if (is64bit) {
    154     __ Dmfc1(out, in);
    155   } else {
    156     __ Mfc1(out, in);
    157   }
    158 }
    159 
    160 // long java.lang.Double.doubleToRawLongBits(double)
    161 void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
    162   CreateFPToIntLocations(arena_, invoke);
    163 }
    164 
    165 void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
    166   MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    167 }
    168 
    169 // int java.lang.Float.floatToRawIntBits(float)
    170 void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
    171   CreateFPToIntLocations(arena_, invoke);
    172 }
    173 
    174 void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
    175   MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    176 }
    177 
    178 static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
    179   LocationSummary* locations = new (arena) LocationSummary(invoke,
    180                                                            LocationSummary::kNoCall,
    181                                                            kIntrinsified);
    182   locations->SetInAt(0, Location::RequiresRegister());
    183   locations->SetOut(Location::RequiresFpuRegister());
    184 }
    185 
    186 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
    187   GpuRegister in  = locations->InAt(0).AsRegister<GpuRegister>();
    188   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    189 
    190   if (is64bit) {
    191     __ Dmtc1(in, out);
    192   } else {
    193     __ Mtc1(in, out);
    194   }
    195 }
    196 
    197 // double java.lang.Double.longBitsToDouble(long)
    198 void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
    199   CreateIntToFPLocations(arena_, invoke);
    200 }
    201 
    202 void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
    203   MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    204 }
    205 
    206 // float java.lang.Float.intBitsToFloat(int)
    207 void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
    208   CreateIntToFPLocations(arena_, invoke);
    209 }
    210 
    211 void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
    212   MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    213 }
    214 
    215 static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
    216   LocationSummary* locations = new (arena) LocationSummary(invoke,
    217                                                            LocationSummary::kNoCall,
    218                                                            kIntrinsified);
    219   locations->SetInAt(0, Location::RequiresRegister());
    220   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
    221 }
    222 
    223 static void GenReverseBytes(LocationSummary* locations,
    224                             Primitive::Type type,
    225                             Mips64Assembler* assembler) {
    226   GpuRegister in  = locations->InAt(0).AsRegister<GpuRegister>();
    227   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    228 
    229   switch (type) {
    230     case Primitive::kPrimShort:
    231       __ Dsbh(out, in);
    232       __ Seh(out, out);
    233       break;
    234     case Primitive::kPrimInt:
    235       __ Rotr(out, in, 16);
    236       __ Wsbh(out, out);
    237       break;
    238     case Primitive::kPrimLong:
    239       __ Dsbh(out, in);
    240       __ Dshd(out, out);
    241       break;
    242     default:
    243       LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
    244       UNREACHABLE();
    245   }
    246 }
    247 
    248 // int java.lang.Integer.reverseBytes(int)
    249 void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
    250   CreateIntToIntLocations(arena_, invoke);
    251 }
    252 
    253 void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
    254   GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
    255 }
    256 
    257 // long java.lang.Long.reverseBytes(long)
    258 void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
    259   CreateIntToIntLocations(arena_, invoke);
    260 }
    261 
    262 void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
    263   GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
    264 }
    265 
    266 // short java.lang.Short.reverseBytes(short)
    267 void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
    268   CreateIntToIntLocations(arena_, invoke);
    269 }
    270 
    271 void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
    272   GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
    273 }
    274 
    275 static void GenNumberOfLeadingZeroes(LocationSummary* locations,
    276                                      bool is64bit,
    277                                      Mips64Assembler* assembler) {
    278   GpuRegister in  = locations->InAt(0).AsRegister<GpuRegister>();
    279   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    280 
    281   if (is64bit) {
    282     __ Dclz(out, in);
    283   } else {
    284     __ Clz(out, in);
    285   }
    286 }
    287 
    288 // int java.lang.Integer.numberOfLeadingZeros(int i)
    289 void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
    290   CreateIntToIntLocations(arena_, invoke);
    291 }
    292 
    293 void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
    294   GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    295 }
    296 
    297 // int java.lang.Long.numberOfLeadingZeros(long i)
    298 void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
    299   CreateIntToIntLocations(arena_, invoke);
    300 }
    301 
    302 void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
    303   GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    304 }
    305 
    306 static void GenNumberOfTrailingZeroes(LocationSummary* locations,
    307                                       bool is64bit,
    308                                       Mips64Assembler* assembler) {
    309   Location in = locations->InAt(0);
    310   Location out = locations->Out();
    311 
    312   if (is64bit) {
    313     __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
    314     __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    315     __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    316     __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    317   } else {
    318     __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
    319     __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    320     __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    321     __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
    322   }
    323 }
    324 
    325 // int java.lang.Integer.numberOfTrailingZeros(int i)
    326 void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
    327   CreateIntToIntLocations(arena_, invoke);
    328 }
    329 
    330 void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
    331   GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    332 }
    333 
    334 // int java.lang.Long.numberOfTrailingZeros(long i)
    335 void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
    336   CreateIntToIntLocations(arena_, invoke);
    337 }
    338 
    339 void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
    340   GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    341 }
    342 
    343 static void GenReverse(LocationSummary* locations,
    344                        Primitive::Type type,
    345                        Mips64Assembler* assembler) {
    346   DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
    347 
    348   GpuRegister in  = locations->InAt(0).AsRegister<GpuRegister>();
    349   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    350 
    351   if (type == Primitive::kPrimInt) {
    352     __ Rotr(out, in, 16);
    353     __ Wsbh(out, out);
    354     __ Bitswap(out, out);
    355   } else {
    356     __ Dsbh(out, in);
    357     __ Dshd(out, out);
    358     __ Dbitswap(out, out);
    359   }
    360 }
    361 
    362 // int java.lang.Integer.reverse(int)
    363 void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
    364   CreateIntToIntLocations(arena_, invoke);
    365 }
    366 
    367 void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
    368   GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
    369 }
    370 
    371 // long java.lang.Long.reverse(long)
    372 void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
    373   CreateIntToIntLocations(arena_, invoke);
    374 }
    375 
    376 void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
    377   GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
    378 }
    379 
    380 static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
    381   LocationSummary* locations = new (arena) LocationSummary(invoke,
    382                                                            LocationSummary::kNoCall,
    383                                                            kIntrinsified);
    384   locations->SetInAt(0, Location::RequiresFpuRegister());
    385   locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
    386 }
    387 
    388 static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
    389   FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
    390   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    391 
    392   if (is64bit) {
    393     __ AbsD(out, in);
    394   } else {
    395     __ AbsS(out, in);
    396   }
    397 }
    398 
    399 // double java.lang.Math.abs(double)
    400 void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
    401   CreateFPToFPLocations(arena_, invoke);
    402 }
    403 
    404 void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
    405   MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    406 }
    407 
    408 // float java.lang.Math.abs(float)
    409 void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
    410   CreateFPToFPLocations(arena_, invoke);
    411 }
    412 
    413 void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
    414   MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    415 }
    416 
    417 static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
    418   LocationSummary* locations = new (arena) LocationSummary(invoke,
    419                                                            LocationSummary::kNoCall,
    420                                                            kIntrinsified);
    421   locations->SetInAt(0, Location::RequiresRegister());
    422   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
    423 }
    424 
    425 static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
    426   GpuRegister in  = locations->InAt(0).AsRegister<GpuRegister>();
    427   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    428 
    429   if (is64bit) {
    430     __ Dsra32(AT, in, 31);
    431     __ Xor(out, in, AT);
    432     __ Dsubu(out, out, AT);
    433   } else {
    434     __ Sra(AT, in, 31);
    435     __ Xor(out, in, AT);
    436     __ Subu(out, out, AT);
    437   }
    438 }
    439 
    440 // int java.lang.Math.abs(int)
    441 void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
    442   CreateIntToInt(arena_, invoke);
    443 }
    444 
    445 void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
    446   GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
    447 }
    448 
    449 // long java.lang.Math.abs(long)
    450 void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
    451   CreateIntToInt(arena_, invoke);
    452 }
    453 
    454 void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
    455   GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
    456 }
    457 
    458 static void GenMinMaxFP(LocationSummary* locations,
    459                         bool is_min,
    460                         Primitive::Type type,
    461                         Mips64Assembler* assembler) {
    462   FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
    463   FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
    464   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    465 
    466   Mips64Label noNaNs;
    467   Mips64Label done;
    468   FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
    469 
    470   // When Java computes min/max it prefers a NaN to a number; the
    471   // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
    472   // the inputs is a NaN and the other is a valid number, the MIPS
    473   // instruction will return the number; Java wants the NaN value
    474   // returned. This is why there is extra logic preceding the use of
    475   // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
    476   // NaN, return the NaN, otherwise return the min/max.
    477   if (type == Primitive::kPrimDouble) {
    478     __ CmpUnD(FTMP, a, b);
    479     __ Bc1eqz(FTMP, &noNaNs);
    480 
    481     // One of the inputs is a NaN
    482     __ CmpEqD(ftmp, a, a);
    483     // If a == a then b is the NaN, otherwise a is the NaN.
    484     __ SelD(ftmp, a, b);
    485 
    486     if (ftmp != out) {
    487       __ MovD(out, ftmp);
    488     }
    489 
    490     __ Bc(&done);
    491 
    492     __ Bind(&noNaNs);
    493 
    494     if (is_min) {
    495       __ MinD(out, a, b);
    496     } else {
    497       __ MaxD(out, a, b);
    498     }
    499   } else {
    500     DCHECK_EQ(type, Primitive::kPrimFloat);
    501     __ CmpUnS(FTMP, a, b);
    502     __ Bc1eqz(FTMP, &noNaNs);
    503 
    504     // One of the inputs is a NaN
    505     __ CmpEqS(ftmp, a, a);
    506     // If a == a then b is the NaN, otherwise a is the NaN.
    507     __ SelS(ftmp, a, b);
    508 
    509     if (ftmp != out) {
    510       __ MovS(out, ftmp);
    511     }
    512 
    513     __ Bc(&done);
    514 
    515     __ Bind(&noNaNs);
    516 
    517     if (is_min) {
    518       __ MinS(out, a, b);
    519     } else {
    520       __ MaxS(out, a, b);
    521     }
    522   }
    523 
    524   __ Bind(&done);
    525 }
    526 
    527 static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
    528   LocationSummary* locations = new (arena) LocationSummary(invoke,
    529                                                            LocationSummary::kNoCall,
    530                                                            kIntrinsified);
    531   locations->SetInAt(0, Location::RequiresFpuRegister());
    532   locations->SetInAt(1, Location::RequiresFpuRegister());
    533   locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
    534 }
    535 
    536 // double java.lang.Math.min(double, double)
    537 void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
    538   CreateFPFPToFPLocations(arena_, invoke);
    539 }
    540 
    541 void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
    542   GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimDouble, GetAssembler());
    543 }
    544 
    545 // float java.lang.Math.min(float, float)
    546 void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
    547   CreateFPFPToFPLocations(arena_, invoke);
    548 }
    549 
    550 void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
    551   GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimFloat, GetAssembler());
    552 }
    553 
    554 // double java.lang.Math.max(double, double)
    555 void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
    556   CreateFPFPToFPLocations(arena_, invoke);
    557 }
    558 
    559 void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
    560   GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimDouble, GetAssembler());
    561 }
    562 
    563 // float java.lang.Math.max(float, float)
    564 void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
    565   CreateFPFPToFPLocations(arena_, invoke);
    566 }
    567 
    568 void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
    569   GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimFloat, GetAssembler());
    570 }
    571 
    572 static void GenMinMax(LocationSummary* locations,
    573                       bool is_min,
    574                       Mips64Assembler* assembler) {
    575   GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
    576   GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
    577   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
    578 
    579   if (lhs == rhs) {
    580     if (out != lhs) {
    581       __ Move(out, lhs);
    582     }
    583   } else {
    584     // Some architectures, such as ARM and MIPS (prior to r6), have a
    585     // conditional move instruction which only changes the target
    586     // (output) register if the condition is true (MIPS prior to r6 had
    587     // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
    588     // change the target (output) register.  If the condition is true the
    589     // output register gets the contents of the "rs" register; otherwise,
    590     // the output register is set to zero. One consequence of this is
    591     // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
    592     // needs to use a pair of SELEQZ/SELNEZ instructions.  After
    593     // executing this pair of instructions one of the output registers
    594     // from the pair will necessarily contain zero. Then the code ORs the
    595     // output registers from the SELEQZ/SELNEZ instructions to get the
    596     // final result.
    597     //
    598     // The initial test to see if the output register is same as the
    599     // first input register is needed to make sure that value in the
    600     // first input register isn't clobbered before we've finished
    601     // computing the output value. The logic in the corresponding else
    602     // clause performs the same task but makes sure the second input
    603     // register isn't clobbered in the event that it's the same register
    604     // as the output register; the else clause also handles the case
    605     // where the output register is distinct from both the first, and the
    606     // second input registers.
    607     if (out == lhs) {
    608       __ Slt(AT, rhs, lhs);
    609       if (is_min) {
    610         __ Seleqz(out, lhs, AT);
    611         __ Selnez(AT, rhs, AT);
    612       } else {
    613         __ Selnez(out, lhs, AT);
    614         __ Seleqz(AT, rhs, AT);
    615       }
    616     } else {
    617       __ Slt(AT, lhs, rhs);
    618       if (is_min) {
    619         __ Seleqz(out, rhs, AT);
    620         __ Selnez(AT, lhs, AT);
    621       } else {
    622         __ Selnez(out, rhs, AT);
    623         __ Seleqz(AT, lhs, AT);
    624       }
    625     }
    626     __ Or(out, out, AT);
    627   }
    628 }
    629 
    630 static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
    631   LocationSummary* locations = new (arena) LocationSummary(invoke,
    632                                                            LocationSummary::kNoCall,
    633                                                            kIntrinsified);
    634   locations->SetInAt(0, Location::RequiresRegister());
    635   locations->SetInAt(1, Location::RequiresRegister());
    636   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
    637 }
    638 
    639 // int java.lang.Math.min(int, int)
    640 void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
    641   CreateIntIntToIntLocations(arena_, invoke);
    642 }
    643 
    644 void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
    645   GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
    646 }
    647 
    648 // long java.lang.Math.min(long, long)
    649 void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
    650   CreateIntIntToIntLocations(arena_, invoke);
    651 }
    652 
    653 void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
    654   GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
    655 }
    656 
    657 // int java.lang.Math.max(int, int)
    658 void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
    659   CreateIntIntToIntLocations(arena_, invoke);
    660 }
    661 
    662 void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
    663   GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
    664 }
    665 
    666 // long java.lang.Math.max(long, long)
    667 void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
    668   CreateIntIntToIntLocations(arena_, invoke);
    669 }
    670 
    671 void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
    672   GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
    673 }
    674 
    675 // double java.lang.Math.sqrt(double)
    676 void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
    677   CreateFPToFPLocations(arena_, invoke);
    678 }
    679 
    680 void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
    681   LocationSummary* locations = invoke->GetLocations();
    682   Mips64Assembler* assembler = GetAssembler();
    683   FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
    684   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    685 
    686   __ SqrtD(out, in);
    687 }
    688 
    689 static void CreateFPToFP(ArenaAllocator* arena,
    690                          HInvoke* invoke,
    691                          Location::OutputOverlap overlaps = Location::kOutputOverlap) {
    692   LocationSummary* locations = new (arena) LocationSummary(invoke,
    693                                                            LocationSummary::kNoCall,
    694                                                            kIntrinsified);
    695   locations->SetInAt(0, Location::RequiresFpuRegister());
    696   locations->SetOut(Location::RequiresFpuRegister(), overlaps);
    697 }
    698 
    699 // double java.lang.Math.rint(double)
    700 void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
    701   CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
    702 }
    703 
    704 void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
    705   LocationSummary* locations = invoke->GetLocations();
    706   Mips64Assembler* assembler = GetAssembler();
    707   FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
    708   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    709 
    710   __ RintD(out, in);
    711 }
    712 
    713 // double java.lang.Math.floor(double)
    714 void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
    715   CreateFPToFP(arena_, invoke);
    716 }
    717 
    718 const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
    719                                              kPositiveInfinity |
    720                                              kNegativeZero |
    721                                              kNegativeInfinity |
    722                                              kQuietNaN |
    723                                              kSignalingNaN;
    724 
    725 enum FloatRoundingMode {
    726   kFloor,
    727   kCeil,
    728 };
    729 
    730 static void GenRoundingMode(LocationSummary* locations,
    731                             FloatRoundingMode mode,
    732                             Mips64Assembler* assembler) {
    733   FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
    734   FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
    735 
    736   DCHECK_NE(in, out);
    737 
    738   Mips64Label done;
    739 
    740   // double floor/ceil(double in) {
    741   //     if in.isNaN || in.isInfinite || in.isZero {
    742   //         return in;
    743   //     }
    744   __ ClassD(out, in);
    745   __ Dmfc1(AT, out);
    746   __ Andi(AT, AT, kFPLeaveUnchanged);   // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
    747   __ MovD(out, in);
    748   __ Bnezc(AT, &done);
    749 
    750   //     Long outLong = floor/ceil(in);
    751   //     if outLong == Long.MAX_VALUE {
    752   //         // floor()/ceil() has almost certainly returned a value
    753   //         // which can't be successfully represented as a signed
    754   //         // 64-bit number.  Java expects that the input value will
    755   //         // be returned in these cases.
    756   //         // There is also a small probability that floor(in)/ceil(in)
    757   //         // correctly truncates/rounds up the input value to
    758   //         // Long.MAX_VALUE.  In that case, this exception handling
    759   //         // code still does the correct thing.
    760   //         return in;
    761   //     }
    762   if (mode == kFloor) {
    763     __ FloorLD(out, in);
    764   } else  if (mode == kCeil) {
    765     __ CeilLD(out, in);
    766   }
    767   __ Dmfc1(AT, out);
    768   __ MovD(out, in);
    769   __ LoadConst64(TMP, kPrimLongMax);
    770   __ Beqc(AT, TMP, &done);
    771 
    772   //     double out = outLong;
    773   //     return out;
    774   __ Dmtc1(AT, out);
    775   __ Cvtdl(out, out);
    776   __ Bind(&done);
    777   // }
    778 }
    779 
    780 void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
    781   GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
    782 }
    783 
    784 // double java.lang.Math.ceil(double)
    785 void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
    786   CreateFPToFP(arena_, invoke);
    787 }
    788 
    789 void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
    790   GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
    791 }
    792 
    793 // byte libcore.io.Memory.peekByte(long address)
    794 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
    795   CreateIntToIntLocations(arena_, invoke);
    796 }
    797 
    798 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
    799   Mips64Assembler* assembler = GetAssembler();
    800   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    801   GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
    802 
    803   __ Lb(out, adr, 0);
    804 }
    805 
    806 // short libcore.io.Memory.peekShort(long address)
    807 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
    808   CreateIntToIntLocations(arena_, invoke);
    809 }
    810 
    811 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
    812   Mips64Assembler* assembler = GetAssembler();
    813   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    814   GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
    815 
    816   __ Lh(out, adr, 0);
    817 }
    818 
    819 // int libcore.io.Memory.peekInt(long address)
    820 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
    821   CreateIntToIntLocations(arena_, invoke);
    822 }
    823 
    824 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
    825   Mips64Assembler* assembler = GetAssembler();
    826   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    827   GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
    828 
    829   __ Lw(out, adr, 0);
    830 }
    831 
    832 // long libcore.io.Memory.peekLong(long address)
    833 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
    834   CreateIntToIntLocations(arena_, invoke);
    835 }
    836 
    837 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
    838   Mips64Assembler* assembler = GetAssembler();
    839   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    840   GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
    841 
    842   __ Ld(out, adr, 0);
    843 }
    844 
    845 static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
    846   LocationSummary* locations = new (arena) LocationSummary(invoke,
    847                                                            LocationSummary::kNoCall,
    848                                                            kIntrinsified);
    849   locations->SetInAt(0, Location::RequiresRegister());
    850   locations->SetInAt(1, Location::RequiresRegister());
    851 }
    852 
    853 // void libcore.io.Memory.pokeByte(long address, byte value)
    854 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
    855   CreateIntIntToVoidLocations(arena_, invoke);
    856 }
    857 
    858 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
    859   Mips64Assembler* assembler = GetAssembler();
    860   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    861   GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
    862 
    863   __ Sb(val, adr, 0);
    864 }
    865 
    866 // void libcore.io.Memory.pokeShort(long address, short value)
    867 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
    868   CreateIntIntToVoidLocations(arena_, invoke);
    869 }
    870 
    871 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
    872   Mips64Assembler* assembler = GetAssembler();
    873   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    874   GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
    875 
    876   __ Sh(val, adr, 0);
    877 }
    878 
    879 // void libcore.io.Memory.pokeInt(long address, int value)
    880 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
    881   CreateIntIntToVoidLocations(arena_, invoke);
    882 }
    883 
    884 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
    885   Mips64Assembler* assembler = GetAssembler();
    886   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    887   GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
    888 
    889   __ Sw(val, adr, 00);
    890 }
    891 
    892 // void libcore.io.Memory.pokeLong(long address, long value)
    893 void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
    894   CreateIntIntToVoidLocations(arena_, invoke);
    895 }
    896 
    897 void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
    898   Mips64Assembler* assembler = GetAssembler();
    899   GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
    900   GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
    901 
    902   __ Sd(val, adr, 0);
    903 }
    904 
    905 // Thread java.lang.Thread.currentThread()
    906 void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
    907   LocationSummary* locations = new (arena_) LocationSummary(invoke,
    908                                                             LocationSummary::kNoCall,
    909                                                             kIntrinsified);
    910   locations->SetOut(Location::RequiresRegister());
    911 }
    912 
    913 void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
    914   Mips64Assembler* assembler = GetAssembler();
    915   GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
    916 
    917   __ LoadFromOffset(kLoadUnsignedWord,
    918                     out,
    919                     TR,
    920                     Thread::PeerOffset<kMips64PointerSize>().Int32Value());
    921 }
    922 
    923 static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
    924   LocationSummary* locations = new (arena) LocationSummary(invoke,
    925                                                            LocationSummary::kNoCall,
    926                                                            kIntrinsified);
    927   locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
    928   locations->SetInAt(1, Location::RequiresRegister());
    929   locations->SetInAt(2, Location::RequiresRegister());
    930   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
    931 }
    932 
    933 static void GenUnsafeGet(HInvoke* invoke,
    934                          Primitive::Type type,
    935                          bool is_volatile,
    936                          CodeGeneratorMIPS64* codegen) {
    937   LocationSummary* locations = invoke->GetLocations();
    938   DCHECK((type == Primitive::kPrimInt) ||
    939          (type == Primitive::kPrimLong) ||
    940          (type == Primitive::kPrimNot));
    941   Mips64Assembler* assembler = codegen->GetAssembler();
    942   // Object pointer.
    943   GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
    944   // Long offset.
    945   GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
    946   GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
    947 
    948   __ Daddu(TMP, base, offset);
    949   if (is_volatile) {
    950     __ Sync(0);
    951   }
    952   switch (type) {
    953     case Primitive::kPrimInt:
    954       __ Lw(trg, TMP, 0);
    955       break;
    956 
    957     case Primitive::kPrimNot:
    958       __ Lwu(trg, TMP, 0);
    959       break;
    960 
    961     case Primitive::kPrimLong:
    962       __ Ld(trg, TMP, 0);
    963       break;
    964 
    965     default:
    966       LOG(FATAL) << "Unsupported op size " << type;
    967       UNREACHABLE();
    968   }
    969 }
    970 
    971 // int sun.misc.Unsafe.getInt(Object o, long offset)
    972 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
    973   CreateIntIntIntToIntLocations(arena_, invoke);
    974 }
    975 
    976 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
    977   GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
    978 }
    979 
    980 // int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
    981 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
    982   CreateIntIntIntToIntLocations(arena_, invoke);
    983 }
    984 
    985 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
    986   GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
    987 }
    988 
    989 // long sun.misc.Unsafe.getLong(Object o, long offset)
    990 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
    991   CreateIntIntIntToIntLocations(arena_, invoke);
    992 }
    993 
    994 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
    995   GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
    996 }
    997 
    998 // long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
    999 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
   1000   CreateIntIntIntToIntLocations(arena_, invoke);
   1001 }
   1002 
   1003 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
   1004   GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
   1005 }
   1006 
   1007 // Object sun.misc.Unsafe.getObject(Object o, long offset)
   1008 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
   1009   CreateIntIntIntToIntLocations(arena_, invoke);
   1010 }
   1011 
   1012 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
   1013   GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
   1014 }
   1015 
   1016 // Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
   1017 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
   1018   CreateIntIntIntToIntLocations(arena_, invoke);
   1019 }
   1020 
   1021 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
   1022   GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
   1023 }
   1024 
   1025 static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
   1026   LocationSummary* locations = new (arena) LocationSummary(invoke,
   1027                                                            LocationSummary::kNoCall,
   1028                                                            kIntrinsified);
   1029   locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
   1030   locations->SetInAt(1, Location::RequiresRegister());
   1031   locations->SetInAt(2, Location::RequiresRegister());
   1032   locations->SetInAt(3, Location::RequiresRegister());
   1033 }
   1034 
   1035 static void GenUnsafePut(LocationSummary* locations,
   1036                          Primitive::Type type,
   1037                          bool is_volatile,
   1038                          bool is_ordered,
   1039                          CodeGeneratorMIPS64* codegen) {
   1040   DCHECK((type == Primitive::kPrimInt) ||
   1041          (type == Primitive::kPrimLong) ||
   1042          (type == Primitive::kPrimNot));
   1043   Mips64Assembler* assembler = codegen->GetAssembler();
   1044   // Object pointer.
   1045   GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
   1046   // Long offset.
   1047   GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
   1048   GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
   1049 
   1050   __ Daddu(TMP, base, offset);
   1051   if (is_volatile || is_ordered) {
   1052     __ Sync(0);
   1053   }
   1054   switch (type) {
   1055     case Primitive::kPrimInt:
   1056     case Primitive::kPrimNot:
   1057       __ Sw(value, TMP, 0);
   1058       break;
   1059 
   1060     case Primitive::kPrimLong:
   1061       __ Sd(value, TMP, 0);
   1062       break;
   1063 
   1064     default:
   1065       LOG(FATAL) << "Unsupported op size " << type;
   1066       UNREACHABLE();
   1067   }
   1068   if (is_volatile) {
   1069     __ Sync(0);
   1070   }
   1071 
   1072   if (type == Primitive::kPrimNot) {
   1073     bool value_can_be_null = true;  // TODO: Worth finding out this information?
   1074     codegen->MarkGCCard(base, value, value_can_be_null);
   1075   }
   1076 }
   1077 
   1078 // void sun.misc.Unsafe.putInt(Object o, long offset, int x)
   1079 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
   1080   CreateIntIntIntIntToVoid(arena_, invoke);
   1081 }
   1082 
   1083 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
   1084   GenUnsafePut(invoke->GetLocations(),
   1085                Primitive::kPrimInt,
   1086                /* is_volatile */ false,
   1087                /* is_ordered */ false,
   1088                codegen_);
   1089 }
   1090 
   1091 // void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
   1092 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
   1093   CreateIntIntIntIntToVoid(arena_, invoke);
   1094 }
   1095 
   1096 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
   1097   GenUnsafePut(invoke->GetLocations(),
   1098                Primitive::kPrimInt,
   1099                /* is_volatile */ false,
   1100                /* is_ordered */ true,
   1101                codegen_);
   1102 }
   1103 
   1104 // void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
   1105 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
   1106   CreateIntIntIntIntToVoid(arena_, invoke);
   1107 }
   1108 
   1109 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
   1110   GenUnsafePut(invoke->GetLocations(),
   1111                Primitive::kPrimInt,
   1112                /* is_volatile */ true,
   1113                /* is_ordered */ false,
   1114                codegen_);
   1115 }
   1116 
   1117 // void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
   1118 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
   1119   CreateIntIntIntIntToVoid(arena_, invoke);
   1120 }
   1121 
   1122 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
   1123   GenUnsafePut(invoke->GetLocations(),
   1124                Primitive::kPrimNot,
   1125                /* is_volatile */ false,
   1126                /* is_ordered */ false,
   1127                codegen_);
   1128 }
   1129 
   1130 // void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
   1131 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
   1132   CreateIntIntIntIntToVoid(arena_, invoke);
   1133 }
   1134 
   1135 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
   1136   GenUnsafePut(invoke->GetLocations(),
   1137                Primitive::kPrimNot,
   1138                /* is_volatile */ false,
   1139                /* is_ordered */ true,
   1140                codegen_);
   1141 }
   1142 
   1143 // void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
   1144 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
   1145   CreateIntIntIntIntToVoid(arena_, invoke);
   1146 }
   1147 
   1148 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
   1149   GenUnsafePut(invoke->GetLocations(),
   1150                Primitive::kPrimNot,
   1151                /* is_volatile */ true,
   1152                /* is_ordered */ false,
   1153                codegen_);
   1154 }
   1155 
   1156 // void sun.misc.Unsafe.putLong(Object o, long offset, long x)
   1157 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
   1158   CreateIntIntIntIntToVoid(arena_, invoke);
   1159 }
   1160 
   1161 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
   1162   GenUnsafePut(invoke->GetLocations(),
   1163                Primitive::kPrimLong,
   1164                /* is_volatile */ false,
   1165                /* is_ordered */ false,
   1166                codegen_);
   1167 }
   1168 
   1169 // void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
   1170 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
   1171   CreateIntIntIntIntToVoid(arena_, invoke);
   1172 }
   1173 
   1174 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
   1175   GenUnsafePut(invoke->GetLocations(),
   1176                Primitive::kPrimLong,
   1177                /* is_volatile */ false,
   1178                /* is_ordered */ true,
   1179                codegen_);
   1180 }
   1181 
   1182 // void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
   1183 void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
   1184   CreateIntIntIntIntToVoid(arena_, invoke);
   1185 }
   1186 
   1187 void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
   1188   GenUnsafePut(invoke->GetLocations(),
   1189                Primitive::kPrimLong,
   1190                /* is_volatile */ true,
   1191                /* is_ordered */ false,
   1192                codegen_);
   1193 }
   1194 
   1195 static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
   1196   LocationSummary* locations = new (arena) LocationSummary(invoke,
   1197                                                            LocationSummary::kNoCall,
   1198                                                            kIntrinsified);
   1199   locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
   1200   locations->SetInAt(1, Location::RequiresRegister());
   1201   locations->SetInAt(2, Location::RequiresRegister());
   1202   locations->SetInAt(3, Location::RequiresRegister());
   1203   locations->SetInAt(4, Location::RequiresRegister());
   1204 
   1205   locations->SetOut(Location::RequiresRegister());
   1206 }
   1207 
   1208 static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorMIPS64* codegen) {
   1209   Mips64Assembler* assembler = codegen->GetAssembler();
   1210   GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
   1211   GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
   1212   GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
   1213   GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
   1214   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
   1215 
   1216   DCHECK_NE(base, out);
   1217   DCHECK_NE(offset, out);
   1218   DCHECK_NE(expected, out);
   1219 
   1220   if (type == Primitive::kPrimNot) {
   1221     // Mark card for object assuming new value is stored.
   1222     bool value_can_be_null = true;  // TODO: Worth finding out this information?
   1223     codegen->MarkGCCard(base, value, value_can_be_null);
   1224   }
   1225 
   1226   // do {
   1227   //   tmp_value = [tmp_ptr] - expected;
   1228   // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
   1229   // result = tmp_value != 0;
   1230 
   1231   Mips64Label loop_head, exit_loop;
   1232   __ Daddu(TMP, base, offset);
   1233   __ Sync(0);
   1234   __ Bind(&loop_head);
   1235   if (type == Primitive::kPrimLong) {
   1236     __ Lld(out, TMP);
   1237   } else {
   1238     // Note: We will need a read barrier here, when read barrier
   1239     // support is added to the MIPS64 back end.
   1240     __ Ll(out, TMP);
   1241   }
   1242   __ Dsubu(out, out, expected);         // If we didn't get the 'expected'
   1243   __ Sltiu(out, out, 1);                // value, set 'out' to false, and
   1244   __ Beqzc(out, &exit_loop);            // return.
   1245   __ Move(out, value);  // Use 'out' for the 'store conditional' instruction.
   1246                         // If we use 'value' directly, we would lose 'value'
   1247                         // in the case that the store fails.  Whether the
   1248                         // store succeeds, or fails, it will load the
   1249                         // correct boolean value into the 'out' register.
   1250   if (type == Primitive::kPrimLong) {
   1251     __ Scd(out, TMP);
   1252   } else {
   1253     __ Sc(out, TMP);
   1254   }
   1255   __ Beqzc(out, &loop_head);    // If we couldn't do the read-modify-write
   1256                                 // cycle atomically then retry.
   1257   __ Bind(&exit_loop);
   1258   __ Sync(0);
   1259 }
   1260 
   1261 // boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
   1262 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
   1263   CreateIntIntIntIntIntToInt(arena_, invoke);
   1264 }
   1265 
   1266 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
   1267   GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
   1268 }
   1269 
   1270 // boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
   1271 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
   1272   CreateIntIntIntIntIntToInt(arena_, invoke);
   1273 }
   1274 
   1275 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
   1276   GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
   1277 }
   1278 
   1279 // boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
   1280 void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
   1281   CreateIntIntIntIntIntToInt(arena_, invoke);
   1282 }
   1283 
   1284 void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
   1285   GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
   1286 }
   1287 
   1288 // char java.lang.String.charAt(int index)
   1289 void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
   1290   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1291                                                             LocationSummary::kCallOnSlowPath,
   1292                                                             kIntrinsified);
   1293   locations->SetInAt(0, Location::RequiresRegister());
   1294   locations->SetInAt(1, Location::RequiresRegister());
   1295   locations->SetOut(Location::SameAsFirstInput());
   1296 }
   1297 
   1298 void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
   1299   LocationSummary* locations = invoke->GetLocations();
   1300   Mips64Assembler* assembler = GetAssembler();
   1301 
   1302   // Location of reference to data array
   1303   const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
   1304   // Location of count
   1305   const int32_t count_offset = mirror::String::CountOffset().Int32Value();
   1306 
   1307   GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
   1308   GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
   1309   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
   1310 
   1311   // TODO: Maybe we can support range check elimination. Overall,
   1312   //       though, I think it's not worth the cost.
   1313   // TODO: For simplicity, the index parameter is requested in a
   1314   //       register, so different from Quick we will not optimize the
   1315   //       code for constants (which would save a register).
   1316 
   1317   SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
   1318   codegen_->AddSlowPath(slow_path);
   1319 
   1320   // Load the string size
   1321   __ Lw(TMP, obj, count_offset);
   1322   codegen_->MaybeRecordImplicitNullCheck(invoke);
   1323   // Revert to slow path if idx is too large, or negative
   1324   __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
   1325 
   1326   // out = obj[2*idx].
   1327   __ Sll(TMP, idx, 1);                  // idx * 2
   1328   __ Daddu(TMP, TMP, obj);              // Address of char at location idx
   1329   __ Lhu(out, TMP, value_offset);       // Load char at location idx
   1330 
   1331   __ Bind(slow_path->GetExitLabel());
   1332 }
   1333 
   1334 // int java.lang.String.compareTo(String anotherString)
   1335 void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
   1336   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1337                                                             LocationSummary::kCall,
   1338                                                             kIntrinsified);
   1339   InvokeRuntimeCallingConvention calling_convention;
   1340   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1341   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
   1342   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1343   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1344 }
   1345 
   1346 void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
   1347   Mips64Assembler* assembler = GetAssembler();
   1348   LocationSummary* locations = invoke->GetLocations();
   1349 
   1350   // Note that the null check must have been done earlier.
   1351   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
   1352 
   1353   GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
   1354   SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
   1355   codegen_->AddSlowPath(slow_path);
   1356   __ Beqzc(argument, slow_path->GetEntryLabel());
   1357 
   1358   __ LoadFromOffset(kLoadDoubleword,
   1359                     T9,
   1360                     TR,
   1361                     QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pStringCompareTo).Int32Value());
   1362   __ Jalr(T9);
   1363   __ Nop();
   1364   __ Bind(slow_path->GetExitLabel());
   1365 }
   1366 
   1367 // boolean java.lang.String.equals(Object anObject)
   1368 void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
   1369   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1370                                                             LocationSummary::kNoCall,
   1371                                                             kIntrinsified);
   1372   locations->SetInAt(0, Location::RequiresRegister());
   1373   locations->SetInAt(1, Location::RequiresRegister());
   1374   locations->SetOut(Location::RequiresRegister());
   1375 
   1376   // Temporary registers to store lengths of strings and for calculations.
   1377   locations->AddTemp(Location::RequiresRegister());
   1378   locations->AddTemp(Location::RequiresRegister());
   1379   locations->AddTemp(Location::RequiresRegister());
   1380 }
   1381 
   1382 void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
   1383   Mips64Assembler* assembler = GetAssembler();
   1384   LocationSummary* locations = invoke->GetLocations();
   1385 
   1386   GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
   1387   GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
   1388   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
   1389 
   1390   GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
   1391   GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
   1392   GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
   1393 
   1394   Mips64Label loop;
   1395   Mips64Label end;
   1396   Mips64Label return_true;
   1397   Mips64Label return_false;
   1398 
   1399   // Get offsets of count, value, and class fields within a string object.
   1400   const int32_t count_offset = mirror::String::CountOffset().Int32Value();
   1401   const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
   1402   const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
   1403 
   1404   // Note that the null check must have been done earlier.
   1405   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
   1406 
   1407   // If the register containing the pointer to "this", and the register
   1408   // containing the pointer to "anObject" are the same register then
   1409   // "this", and "anObject" are the same object and we can
   1410   // short-circuit the logic to a true result.
   1411   if (str == arg) {
   1412     __ LoadConst64(out, 1);
   1413     return;
   1414   }
   1415 
   1416   // Check if input is null, return false if it is.
   1417   __ Beqzc(arg, &return_false);
   1418 
   1419   // Reference equality check, return true if same reference.
   1420   __ Beqc(str, arg, &return_true);
   1421 
   1422   // Instanceof check for the argument by comparing class fields.
   1423   // All string objects must have the same type since String cannot be subclassed.
   1424   // Receiver must be a string object, so its class field is equal to all strings' class fields.
   1425   // If the argument is a string object, its class field must be equal to receiver's class field.
   1426   __ Lw(temp1, str, class_offset);
   1427   __ Lw(temp2, arg, class_offset);
   1428   __ Bnec(temp1, temp2, &return_false);
   1429 
   1430   // Load lengths of this and argument strings.
   1431   __ Lw(temp1, str, count_offset);
   1432   __ Lw(temp2, arg, count_offset);
   1433   // Check if lengths are equal, return false if they're not.
   1434   __ Bnec(temp1, temp2, &return_false);
   1435   // Return true if both strings are empty.
   1436   __ Beqzc(temp1, &return_true);
   1437 
   1438   // Don't overwrite input registers
   1439   __ Move(TMP, str);
   1440   __ Move(temp3, arg);
   1441 
   1442   // Assertions that must hold in order to compare strings 4 characters at a time.
   1443   DCHECK_ALIGNED(value_offset, 8);
   1444   static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
   1445 
   1446   // Loop to compare strings 4 characters at a time starting at the beginning of the string.
   1447   // Ok to do this because strings are zero-padded to be 8-byte aligned.
   1448   __ Bind(&loop);
   1449   __ Ld(out, TMP, value_offset);
   1450   __ Ld(temp2, temp3, value_offset);
   1451   __ Bnec(out, temp2, &return_false);
   1452   __ Daddiu(TMP, TMP, 8);
   1453   __ Daddiu(temp3, temp3, 8);
   1454   __ Addiu(temp1, temp1, -4);
   1455   __ Bgtzc(temp1, &loop);
   1456 
   1457   // Return true and exit the function.
   1458   // If loop does not result in returning false, we return true.
   1459   __ Bind(&return_true);
   1460   __ LoadConst64(out, 1);
   1461   __ Bc(&end);
   1462 
   1463   // Return false and exit the function.
   1464   __ Bind(&return_false);
   1465   __ LoadConst64(out, 0);
   1466   __ Bind(&end);
   1467 }
   1468 
   1469 static void GenerateStringIndexOf(HInvoke* invoke,
   1470                                   Mips64Assembler* assembler,
   1471                                   CodeGeneratorMIPS64* codegen,
   1472                                   ArenaAllocator* allocator,
   1473                                   bool start_at_zero) {
   1474   LocationSummary* locations = invoke->GetLocations();
   1475   GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
   1476 
   1477   // Note that the null check must have been done earlier.
   1478   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
   1479 
   1480   // Check for code points > 0xFFFF. Either a slow-path check when we
   1481   // don't know statically, or directly dispatch if we have a constant.
   1482   SlowPathCodeMIPS64* slow_path = nullptr;
   1483   if (invoke->InputAt(1)->IsIntConstant()) {
   1484     if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
   1485       // Always needs the slow-path. We could directly dispatch to it,
   1486       // but this case should be rare, so for simplicity just put the
   1487       // full slow-path down and branch unconditionally.
   1488       slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
   1489       codegen->AddSlowPath(slow_path);
   1490       __ Bc(slow_path->GetEntryLabel());
   1491       __ Bind(slow_path->GetExitLabel());
   1492       return;
   1493     }
   1494   } else {
   1495     GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
   1496     __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
   1497     slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
   1498     codegen->AddSlowPath(slow_path);
   1499     __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel());    // UTF-16 required
   1500   }
   1501 
   1502   if (start_at_zero) {
   1503     DCHECK_EQ(tmp_reg, A2);
   1504     // Start-index = 0.
   1505     __ Clear(tmp_reg);
   1506   }
   1507 
   1508   __ LoadFromOffset(kLoadDoubleword,
   1509                     T9,
   1510                     TR,
   1511                     QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pIndexOf).Int32Value());
   1512   CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
   1513   __ Jalr(T9);
   1514   __ Nop();
   1515 
   1516   if (slow_path != nullptr) {
   1517     __ Bind(slow_path->GetExitLabel());
   1518   }
   1519 }
   1520 
   1521 // int java.lang.String.indexOf(int ch)
   1522 void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
   1523   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1524                                                             LocationSummary::kCall,
   1525                                                             kIntrinsified);
   1526   // We have a hand-crafted assembly stub that follows the runtime
   1527   // calling convention. So it's best to align the inputs accordingly.
   1528   InvokeRuntimeCallingConvention calling_convention;
   1529   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1530   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
   1531   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1532   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1533 
   1534   // Need a temp for slow-path codepoint compare, and need to send start-index=0.
   1535   locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
   1536 }
   1537 
   1538 void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
   1539   GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
   1540 }
   1541 
   1542 // int java.lang.String.indexOf(int ch, int fromIndex)
   1543 void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
   1544   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1545                                                             LocationSummary::kCall,
   1546                                                             kIntrinsified);
   1547   // We have a hand-crafted assembly stub that follows the runtime
   1548   // calling convention. So it's best to align the inputs accordingly.
   1549   InvokeRuntimeCallingConvention calling_convention;
   1550   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1551   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
   1552   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
   1553   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1554   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1555 }
   1556 
   1557 void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
   1558   GenerateStringIndexOf(
   1559       invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
   1560 }
   1561 
   1562 // java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
   1563 void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
   1564   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1565                                                             LocationSummary::kCall,
   1566                                                             kIntrinsified);
   1567   InvokeRuntimeCallingConvention calling_convention;
   1568   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1569   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
   1570   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
   1571   locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
   1572   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1573   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1574 }
   1575 
   1576 void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
   1577   Mips64Assembler* assembler = GetAssembler();
   1578   LocationSummary* locations = invoke->GetLocations();
   1579 
   1580   GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
   1581   SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
   1582   codegen_->AddSlowPath(slow_path);
   1583   __ Beqzc(byte_array, slow_path->GetEntryLabel());
   1584 
   1585   __ LoadFromOffset(kLoadDoubleword,
   1586                     T9,
   1587                     TR,
   1588                     QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
   1589                                             pAllocStringFromBytes).Int32Value());
   1590   CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
   1591   __ Jalr(T9);
   1592   __ Nop();
   1593   codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
   1594   __ Bind(slow_path->GetExitLabel());
   1595 }
   1596 
   1597 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
   1598 void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
   1599   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1600                                                             LocationSummary::kCall,
   1601                                                             kIntrinsified);
   1602   InvokeRuntimeCallingConvention calling_convention;
   1603   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1604   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
   1605   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
   1606   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1607   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1608 }
   1609 
   1610 void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
   1611   Mips64Assembler* assembler = GetAssembler();
   1612 
   1613   // No need to emit code checking whether `locations->InAt(2)` is a null
   1614   // pointer, as callers of the native method
   1615   //
   1616   //   java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
   1617   //
   1618   // all include a null check on `data` before calling that method.
   1619   __ LoadFromOffset(kLoadDoubleword,
   1620                     T9,
   1621                     TR,
   1622                     QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
   1623                                             pAllocStringFromChars).Int32Value());
   1624   CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
   1625   __ Jalr(T9);
   1626   __ Nop();
   1627   codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
   1628 }
   1629 
   1630 // java.lang.StringFactory.newStringFromString(String toCopy)
   1631 void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
   1632   LocationSummary* locations = new (arena_) LocationSummary(invoke,
   1633                                                             LocationSummary::kCall,
   1634                                                             kIntrinsified);
   1635   InvokeRuntimeCallingConvention calling_convention;
   1636   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
   1637   Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
   1638   locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
   1639 }
   1640 
   1641 void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
   1642   Mips64Assembler* assembler = GetAssembler();
   1643   LocationSummary* locations = invoke->GetLocations();
   1644 
   1645   GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
   1646   SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
   1647   codegen_->AddSlowPath(slow_path);
   1648   __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
   1649 
   1650   __ LoadFromOffset(kLoadDoubleword,
   1651                     T9,
   1652                     TR,
   1653                     QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
   1654                                             pAllocStringFromString).Int32Value());
   1655   CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
   1656   __ Jalr(T9);
   1657   __ Nop();
   1658   codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
   1659   __ Bind(slow_path->GetExitLabel());
   1660 }
   1661 
   1662 static void GenIsInfinite(LocationSummary* locations,
   1663                           bool is64bit,
   1664                           Mips64Assembler* assembler) {
   1665   FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
   1666   GpuRegister out = locations->Out().AsRegister<GpuRegister>();
   1667 
   1668   if (is64bit) {
   1669     __ ClassD(FTMP, in);
   1670   } else {
   1671     __ ClassS(FTMP, in);
   1672   }
   1673   __ Mfc1(out, FTMP);
   1674   __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
   1675   __ Sltu(out, ZERO, out);
   1676 }
   1677 
   1678 // boolean java.lang.Float.isInfinite(float)
   1679 void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
   1680   CreateFPToIntLocations(arena_, invoke);
   1681 }
   1682 
   1683 void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
   1684   GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
   1685 }
   1686 
   1687 // boolean java.lang.Double.isInfinite(double)
   1688 void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
   1689   CreateFPToIntLocations(arena_, invoke);
   1690 }
   1691 
   1692 void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
   1693   GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
   1694 }
   1695 
   1696 UNIMPLEMENTED_INTRINSIC(MIPS64, IntegerBitCount)
   1697 UNIMPLEMENTED_INTRINSIC(MIPS64, LongBitCount)
   1698 
   1699 UNIMPLEMENTED_INTRINSIC(MIPS64, MathRoundDouble)
   1700 UNIMPLEMENTED_INTRINSIC(MIPS64, MathRoundFloat)
   1701 
   1702 UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
   1703 UNIMPLEMENTED_INTRINSIC(MIPS64, StringGetCharsNoCheck)
   1704 UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopyChar)
   1705 UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
   1706 
   1707 UNIMPLEMENTED_INTRINSIC(MIPS64, MathCos)
   1708 UNIMPLEMENTED_INTRINSIC(MIPS64, MathSin)
   1709 UNIMPLEMENTED_INTRINSIC(MIPS64, MathAcos)
   1710 UNIMPLEMENTED_INTRINSIC(MIPS64, MathAsin)
   1711 UNIMPLEMENTED_INTRINSIC(MIPS64, MathAtan)
   1712 UNIMPLEMENTED_INTRINSIC(MIPS64, MathAtan2)
   1713 UNIMPLEMENTED_INTRINSIC(MIPS64, MathCbrt)
   1714 UNIMPLEMENTED_INTRINSIC(MIPS64, MathCosh)
   1715 UNIMPLEMENTED_INTRINSIC(MIPS64, MathExp)
   1716 UNIMPLEMENTED_INTRINSIC(MIPS64, MathExpm1)
   1717 UNIMPLEMENTED_INTRINSIC(MIPS64, MathHypot)
   1718 UNIMPLEMENTED_INTRINSIC(MIPS64, MathLog)
   1719 UNIMPLEMENTED_INTRINSIC(MIPS64, MathLog10)
   1720 UNIMPLEMENTED_INTRINSIC(MIPS64, MathNextAfter)
   1721 UNIMPLEMENTED_INTRINSIC(MIPS64, MathSinh)
   1722 UNIMPLEMENTED_INTRINSIC(MIPS64, MathTan)
   1723 UNIMPLEMENTED_INTRINSIC(MIPS64, MathTanh)
   1724 
   1725 UNIMPLEMENTED_INTRINSIC(MIPS64, IntegerHighestOneBit)
   1726 UNIMPLEMENTED_INTRINSIC(MIPS64, LongHighestOneBit)
   1727 UNIMPLEMENTED_INTRINSIC(MIPS64, IntegerLowestOneBit)
   1728 UNIMPLEMENTED_INTRINSIC(MIPS64, LongLowestOneBit)
   1729 
   1730 // 1.8.
   1731 UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
   1732 UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
   1733 UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
   1734 UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
   1735 UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
   1736 
   1737 UNREACHABLE_INTRINSICS(MIPS64)
   1738 
   1739 #undef __
   1740 
   1741 }  // namespace mips64
   1742 }  // namespace art
   1743