1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_ 18 #define ART_COMPILER_OPTIMIZING_NODES_H_ 19 20 #include <algorithm> 21 #include <array> 22 #include <type_traits> 23 24 #include "base/arena_bit_vector.h" 25 #include "base/arena_containers.h" 26 #include "base/arena_object.h" 27 #include "base/array_ref.h" 28 #include "base/iteration_range.h" 29 #include "base/stl_util.h" 30 #include "base/transform_array_ref.h" 31 #include "dex_file.h" 32 #include "dex_file_types.h" 33 #include "deoptimization_kind.h" 34 #include "entrypoints/quick/quick_entrypoints_enum.h" 35 #include "handle.h" 36 #include "handle_scope.h" 37 #include "invoke_type.h" 38 #include "intrinsics_enum.h" 39 #include "locations.h" 40 #include "method_reference.h" 41 #include "mirror/class.h" 42 #include "offsets.h" 43 #include "primitive.h" 44 #include "utils/intrusive_forward_list.h" 45 46 namespace art { 47 48 class GraphChecker; 49 class HBasicBlock; 50 class HConstructorFence; 51 class HCurrentMethod; 52 class HDoubleConstant; 53 class HEnvironment; 54 class HFloatConstant; 55 class HGraphBuilder; 56 class HGraphVisitor; 57 class HInstruction; 58 class HIntConstant; 59 class HInvoke; 60 class HLongConstant; 61 class HNullConstant; 62 class HParameterValue; 63 class HPhi; 64 class HSuspendCheck; 65 class HTryBoundary; 66 class LiveInterval; 67 class LocationSummary; 68 class SlowPathCode; 69 class SsaBuilder; 70 71 namespace mirror { 72 class DexCache; 73 } // namespace mirror 74 75 static const int kDefaultNumberOfBlocks = 8; 76 static const int kDefaultNumberOfSuccessors = 2; 77 static const int kDefaultNumberOfPredecessors = 2; 78 static const int kDefaultNumberOfExceptionalPredecessors = 0; 79 static const int kDefaultNumberOfDominatedBlocks = 1; 80 static const int kDefaultNumberOfBackEdges = 1; 81 82 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation. 83 static constexpr int32_t kMaxIntShiftDistance = 0x1f; 84 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation. 85 static constexpr int32_t kMaxLongShiftDistance = 0x3f; 86 87 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1); 88 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1); 89 90 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1); 91 92 static constexpr uint32_t kNoDexPc = -1; 93 94 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) { 95 // For the purposes of the compiler, the dex files must actually be the same object 96 // if we want to safely treat them as the same. This is especially important for JIT 97 // as custom class loaders can open the same underlying file (or memory) multiple 98 // times and provide different class resolution but no two class loaders should ever 99 // use the same DexFile object - doing so is an unsupported hack that can lead to 100 // all sorts of weird failures. 101 return &lhs == &rhs; 102 } 103 104 enum IfCondition { 105 // All types. 106 kCondEQ, // == 107 kCondNE, // != 108 // Signed integers and floating-point numbers. 109 kCondLT, // < 110 kCondLE, // <= 111 kCondGT, // > 112 kCondGE, // >= 113 // Unsigned integers. 114 kCondB, // < 115 kCondBE, // <= 116 kCondA, // > 117 kCondAE, // >= 118 // First and last aliases. 119 kCondFirst = kCondEQ, 120 kCondLast = kCondAE, 121 }; 122 123 enum GraphAnalysisResult { 124 kAnalysisSkipped, 125 kAnalysisInvalidBytecode, 126 kAnalysisFailThrowCatchLoop, 127 kAnalysisFailAmbiguousArrayOp, 128 kAnalysisSuccess, 129 }; 130 131 template <typename T> 132 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) { 133 return static_cast<typename std::make_unsigned<T>::type>(x); 134 } 135 136 class HInstructionList : public ValueObject { 137 public: 138 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {} 139 140 void AddInstruction(HInstruction* instruction); 141 void RemoveInstruction(HInstruction* instruction); 142 143 // Insert `instruction` before/after an existing instruction `cursor`. 144 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 145 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 146 147 // Return true if this list contains `instruction`. 148 bool Contains(HInstruction* instruction) const; 149 150 // Return true if `instruction1` is found before `instruction2` in 151 // this instruction list and false otherwise. Abort if none 152 // of these instructions is found. 153 bool FoundBefore(const HInstruction* instruction1, 154 const HInstruction* instruction2) const; 155 156 bool IsEmpty() const { return first_instruction_ == nullptr; } 157 void Clear() { first_instruction_ = last_instruction_ = nullptr; } 158 159 // Update the block of all instructions to be `block`. 160 void SetBlockOfInstructions(HBasicBlock* block) const; 161 162 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list); 163 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list); 164 void Add(const HInstructionList& instruction_list); 165 166 // Return the number of instructions in the list. This is an expensive operation. 167 size_t CountSize() const; 168 169 private: 170 HInstruction* first_instruction_; 171 HInstruction* last_instruction_; 172 173 friend class HBasicBlock; 174 friend class HGraph; 175 friend class HInstruction; 176 friend class HInstructionIterator; 177 friend class HInstructionIteratorHandleChanges; 178 friend class HBackwardInstructionIterator; 179 180 DISALLOW_COPY_AND_ASSIGN(HInstructionList); 181 }; 182 183 class ReferenceTypeInfo : ValueObject { 184 public: 185 typedef Handle<mirror::Class> TypeHandle; 186 187 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact); 188 189 static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) { 190 return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes()); 191 } 192 193 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) { 194 return ReferenceTypeInfo(type_handle, is_exact); 195 } 196 197 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); } 198 199 static bool IsValidHandle(TypeHandle handle) { 200 return handle.GetReference() != nullptr; 201 } 202 203 bool IsValid() const { 204 return IsValidHandle(type_handle_); 205 } 206 207 bool IsExact() const { return is_exact_; } 208 209 bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) { 210 DCHECK(IsValid()); 211 return GetTypeHandle()->IsObjectClass(); 212 } 213 214 bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) { 215 DCHECK(IsValid()); 216 return GetTypeHandle()->IsStringClass(); 217 } 218 219 bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) { 220 DCHECK(IsValid()); 221 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); 222 } 223 224 bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) { 225 DCHECK(IsValid()); 226 return GetTypeHandle()->IsInterface(); 227 } 228 229 bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { 230 DCHECK(IsValid()); 231 return GetTypeHandle()->IsArrayClass(); 232 } 233 234 bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { 235 DCHECK(IsValid()); 236 return GetTypeHandle()->IsPrimitiveArray(); 237 } 238 239 bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { 240 DCHECK(IsValid()); 241 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); 242 } 243 244 bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { 245 DCHECK(IsValid()); 246 if (!IsExact()) return false; 247 if (!IsArrayClass()) return false; 248 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); 249 } 250 251 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { 252 DCHECK(IsValid()); 253 if (!IsExact()) return false; 254 if (!IsArrayClass()) return false; 255 if (!rti.IsArrayClass()) return false; 256 return GetTypeHandle()->GetComponentType()->IsAssignableFrom( 257 rti.GetTypeHandle()->GetComponentType()); 258 } 259 260 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } 261 262 bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { 263 DCHECK(IsValid()); 264 DCHECK(rti.IsValid()); 265 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 266 } 267 268 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { 269 DCHECK(IsValid()); 270 DCHECK(rti.IsValid()); 271 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && 272 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); 273 } 274 275 // Returns true if the type information provide the same amount of details. 276 // Note that it does not mean that the instructions have the same actual type 277 // (because the type can be the result of a merge). 278 bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { 279 if (!IsValid() && !rti.IsValid()) { 280 // Invalid types are equal. 281 return true; 282 } 283 if (!IsValid() || !rti.IsValid()) { 284 // One is valid, the other not. 285 return false; 286 } 287 return IsExact() == rti.IsExact() 288 && GetTypeHandle().Get() == rti.GetTypeHandle().Get(); 289 } 290 291 private: 292 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {} 293 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact) 294 : type_handle_(type_handle), is_exact_(is_exact) { } 295 296 // The class of the object. 297 TypeHandle type_handle_; 298 // Whether or not the type is exact or a superclass of the actual type. 299 // Whether or not we have any information about this type. 300 bool is_exact_; 301 }; 302 303 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs); 304 305 // Control-flow graph of a method. Contains a list of basic blocks. 306 class HGraph : public ArenaObject<kArenaAllocGraph> { 307 public: 308 HGraph(ArenaAllocator* arena, 309 const DexFile& dex_file, 310 uint32_t method_idx, 311 InstructionSet instruction_set, 312 InvokeType invoke_type = kInvalidInvokeType, 313 bool debuggable = false, 314 bool osr = false, 315 int start_instruction_id = 0) 316 : arena_(arena), 317 blocks_(arena->Adapter(kArenaAllocBlockList)), 318 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)), 319 linear_order_(arena->Adapter(kArenaAllocLinearOrder)), 320 entry_block_(nullptr), 321 exit_block_(nullptr), 322 maximum_number_of_out_vregs_(0), 323 number_of_vregs_(0), 324 number_of_in_vregs_(0), 325 temporaries_vreg_slots_(0), 326 has_bounds_checks_(false), 327 has_try_catch_(false), 328 has_simd_(false), 329 has_loops_(false), 330 has_irreducible_loops_(false), 331 debuggable_(debuggable), 332 current_instruction_id_(start_instruction_id), 333 dex_file_(dex_file), 334 method_idx_(method_idx), 335 invoke_type_(invoke_type), 336 in_ssa_form_(false), 337 number_of_cha_guards_(0), 338 instruction_set_(instruction_set), 339 cached_null_constant_(nullptr), 340 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 341 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)), 342 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 343 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)), 344 cached_current_method_(nullptr), 345 art_method_(nullptr), 346 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()), 347 osr_(osr), 348 cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) { 349 blocks_.reserve(kDefaultNumberOfBlocks); 350 } 351 352 // Acquires and stores RTI of inexact Object to be used when creating HNullConstant. 353 void InitializeInexactObjectRTI(VariableSizedHandleScope* handles); 354 355 ArenaAllocator* GetArena() const { return arena_; } 356 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } 357 358 bool IsInSsaForm() const { return in_ssa_form_; } 359 void SetInSsaForm() { in_ssa_form_ = true; } 360 361 HBasicBlock* GetEntryBlock() const { return entry_block_; } 362 HBasicBlock* GetExitBlock() const { return exit_block_; } 363 bool HasExitBlock() const { return exit_block_ != nullptr; } 364 365 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; } 366 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; } 367 368 void AddBlock(HBasicBlock* block); 369 370 void ComputeDominanceInformation(); 371 void ClearDominanceInformation(); 372 void ClearLoopInformation(); 373 void FindBackEdges(ArenaBitVector* visited); 374 GraphAnalysisResult BuildDominatorTree(); 375 void SimplifyCFG(); 376 void SimplifyCatchBlocks(); 377 378 // Analyze all natural loops in this graph. Returns a code specifying that it 379 // was successful or the reason for failure. The method will fail if a loop 380 // is a throw-catch loop, i.e. the header is a catch block. 381 GraphAnalysisResult AnalyzeLoops() const; 382 383 // Iterate over blocks to compute try block membership. Needs reverse post 384 // order and loop information. 385 void ComputeTryBlockInformation(); 386 387 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction. 388 // Returns the instruction to replace the invoke expression or null if the 389 // invoke is for a void method. Note that the caller is responsible for replacing 390 // and removing the invoke instruction. 391 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke); 392 393 // Update the loop and try membership of `block`, which was spawned from `reference`. 394 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block` 395 // should be the new back edge. 396 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block, 397 HBasicBlock* reference, 398 bool replace_if_back_edge); 399 400 // Need to add a couple of blocks to test if the loop body is entered and 401 // put deoptimization instructions, etc. 402 void TransformLoopHeaderForBCE(HBasicBlock* header); 403 404 // Adds a new loop directly after the loop with the given header and exit. 405 // Returns the new preheader. 406 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header, 407 HBasicBlock* body, 408 HBasicBlock* exit); 409 410 // Removes `block` from the graph. Assumes `block` has been disconnected from 411 // other blocks and has no instructions or phis. 412 void DeleteDeadEmptyBlock(HBasicBlock* block); 413 414 // Splits the edge between `block` and `successor` while preserving the 415 // indices in the predecessor/successor lists. If there are multiple edges 416 // between the blocks, the lowest indices are used. 417 // Returns the new block which is empty and has the same dex pc as `successor`. 418 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor); 419 420 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor); 421 void SimplifyLoop(HBasicBlock* header); 422 423 int32_t GetNextInstructionId() { 424 CHECK_NE(current_instruction_id_, INT32_MAX); 425 return current_instruction_id_++; 426 } 427 428 int32_t GetCurrentInstructionId() const { 429 return current_instruction_id_; 430 } 431 432 void SetCurrentInstructionId(int32_t id) { 433 CHECK_GE(id, current_instruction_id_); 434 current_instruction_id_ = id; 435 } 436 437 uint16_t GetMaximumNumberOfOutVRegs() const { 438 return maximum_number_of_out_vregs_; 439 } 440 441 void SetMaximumNumberOfOutVRegs(uint16_t new_value) { 442 maximum_number_of_out_vregs_ = new_value; 443 } 444 445 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { 446 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); 447 } 448 449 void UpdateTemporariesVRegSlots(size_t slots) { 450 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); 451 } 452 453 size_t GetTemporariesVRegSlots() const { 454 DCHECK(!in_ssa_form_); 455 return temporaries_vreg_slots_; 456 } 457 458 void SetNumberOfVRegs(uint16_t number_of_vregs) { 459 number_of_vregs_ = number_of_vregs; 460 } 461 462 uint16_t GetNumberOfVRegs() const { 463 return number_of_vregs_; 464 } 465 466 void SetNumberOfInVRegs(uint16_t value) { 467 number_of_in_vregs_ = value; 468 } 469 470 uint16_t GetNumberOfInVRegs() const { 471 return number_of_in_vregs_; 472 } 473 474 uint16_t GetNumberOfLocalVRegs() const { 475 DCHECK(!in_ssa_form_); 476 return number_of_vregs_ - number_of_in_vregs_; 477 } 478 479 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const { 480 return reverse_post_order_; 481 } 482 483 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() { 484 DCHECK(GetReversePostOrder()[0] == entry_block_); 485 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1); 486 } 487 488 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const { 489 return ReverseRange(GetReversePostOrder()); 490 } 491 492 const ArenaVector<HBasicBlock*>& GetLinearOrder() const { 493 return linear_order_; 494 } 495 496 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const { 497 return ReverseRange(GetLinearOrder()); 498 } 499 500 bool HasBoundsChecks() const { 501 return has_bounds_checks_; 502 } 503 504 void SetHasBoundsChecks(bool value) { 505 has_bounds_checks_ = value; 506 } 507 508 bool IsDebuggable() const { return debuggable_; } 509 510 // Returns a constant of the given type and value. If it does not exist 511 // already, it is created and inserted into the graph. This method is only for 512 // integral types. 513 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc); 514 515 // TODO: This is problematic for the consistency of reference type propagation 516 // because it can be created anytime after the pass and thus it will be left 517 // with an invalid type. 518 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc); 519 520 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) { 521 return CreateConstant(value, &cached_int_constants_, dex_pc); 522 } 523 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) { 524 return CreateConstant(value, &cached_long_constants_, dex_pc); 525 } 526 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) { 527 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc); 528 } 529 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) { 530 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc); 531 } 532 533 HCurrentMethod* GetCurrentMethod(); 534 535 const DexFile& GetDexFile() const { 536 return dex_file_; 537 } 538 539 uint32_t GetMethodIdx() const { 540 return method_idx_; 541 } 542 543 // Get the method name (without the signature), e.g. "<init>" 544 const char* GetMethodName() const; 545 546 // Get the pretty method name (class + name + optionally signature). 547 std::string PrettyMethod(bool with_signature = true) const; 548 549 InvokeType GetInvokeType() const { 550 return invoke_type_; 551 } 552 553 InstructionSet GetInstructionSet() const { 554 return instruction_set_; 555 } 556 557 bool IsCompilingOsr() const { return osr_; } 558 559 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() { 560 return cha_single_implementation_list_; 561 } 562 563 void AddCHASingleImplementationDependency(ArtMethod* method) { 564 cha_single_implementation_list_.insert(method); 565 } 566 567 bool HasShouldDeoptimizeFlag() const { 568 return number_of_cha_guards_ != 0; 569 } 570 571 bool HasTryCatch() const { return has_try_catch_; } 572 void SetHasTryCatch(bool value) { has_try_catch_ = value; } 573 574 bool HasSIMD() const { return has_simd_; } 575 void SetHasSIMD(bool value) { has_simd_ = value; } 576 577 bool HasLoops() const { return has_loops_; } 578 void SetHasLoops(bool value) { has_loops_ = value; } 579 580 bool HasIrreducibleLoops() const { return has_irreducible_loops_; } 581 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; } 582 583 ArtMethod* GetArtMethod() const { return art_method_; } 584 void SetArtMethod(ArtMethod* method) { art_method_ = method; } 585 586 // Returns an instruction with the opposite Boolean value from 'cond'. 587 // The instruction has been inserted into the graph, either as a constant, or 588 // before cursor. 589 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor); 590 591 ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; } 592 593 uint32_t GetNumberOfCHAGuards() { return number_of_cha_guards_; } 594 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; } 595 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; } 596 597 private: 598 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const; 599 void RemoveDeadBlocks(const ArenaBitVector& visited); 600 601 template <class InstructionType, typename ValueType> 602 InstructionType* CreateConstant(ValueType value, 603 ArenaSafeMap<ValueType, InstructionType*>* cache, 604 uint32_t dex_pc = kNoDexPc) { 605 // Try to find an existing constant of the given value. 606 InstructionType* constant = nullptr; 607 auto cached_constant = cache->find(value); 608 if (cached_constant != cache->end()) { 609 constant = cached_constant->second; 610 } 611 612 // If not found or previously deleted, create and cache a new instruction. 613 // Don't bother reviving a previously deleted instruction, for simplicity. 614 if (constant == nullptr || constant->GetBlock() == nullptr) { 615 constant = new (arena_) InstructionType(value, dex_pc); 616 cache->Overwrite(value, constant); 617 InsertConstant(constant); 618 } 619 return constant; 620 } 621 622 void InsertConstant(HConstant* instruction); 623 624 // Cache a float constant into the graph. This method should only be 625 // called by the SsaBuilder when creating "equivalent" instructions. 626 void CacheFloatConstant(HFloatConstant* constant); 627 628 // See CacheFloatConstant comment. 629 void CacheDoubleConstant(HDoubleConstant* constant); 630 631 ArenaAllocator* const arena_; 632 633 // List of blocks in insertion order. 634 ArenaVector<HBasicBlock*> blocks_; 635 636 // List of blocks to perform a reverse post order tree traversal. 637 ArenaVector<HBasicBlock*> reverse_post_order_; 638 639 // List of blocks to perform a linear order tree traversal. Unlike the reverse 640 // post order, this order is not incrementally kept up-to-date. 641 ArenaVector<HBasicBlock*> linear_order_; 642 643 HBasicBlock* entry_block_; 644 HBasicBlock* exit_block_; 645 646 // The maximum number of virtual registers arguments passed to a HInvoke in this graph. 647 uint16_t maximum_number_of_out_vregs_; 648 649 // The number of virtual registers in this method. Contains the parameters. 650 uint16_t number_of_vregs_; 651 652 // The number of virtual registers used by parameters of this method. 653 uint16_t number_of_in_vregs_; 654 655 // Number of vreg size slots that the temporaries use (used in baseline compiler). 656 size_t temporaries_vreg_slots_; 657 658 // Flag whether there are bounds checks in the graph. We can skip 659 // BCE if it's false. It's only best effort to keep it up to date in 660 // the presence of code elimination so there might be false positives. 661 bool has_bounds_checks_; 662 663 // Flag whether there are try/catch blocks in the graph. We will skip 664 // try/catch-related passes if it's false. It's only best effort to keep 665 // it up to date in the presence of code elimination so there might be 666 // false positives. 667 bool has_try_catch_; 668 669 // Flag whether SIMD instructions appear in the graph. If true, the 670 // code generators may have to be more careful spilling the wider 671 // contents of SIMD registers. 672 bool has_simd_; 673 674 // Flag whether there are any loops in the graph. We can skip loop 675 // optimization if it's false. It's only best effort to keep it up 676 // to date in the presence of code elimination so there might be false 677 // positives. 678 bool has_loops_; 679 680 // Flag whether there are any irreducible loops in the graph. It's only 681 // best effort to keep it up to date in the presence of code elimination 682 // so there might be false positives. 683 bool has_irreducible_loops_; 684 685 // Indicates whether the graph should be compiled in a way that 686 // ensures full debuggability. If false, we can apply more 687 // aggressive optimizations that may limit the level of debugging. 688 const bool debuggable_; 689 690 // The current id to assign to a newly added instruction. See HInstruction.id_. 691 int32_t current_instruction_id_; 692 693 // The dex file from which the method is from. 694 const DexFile& dex_file_; 695 696 // The method index in the dex file. 697 const uint32_t method_idx_; 698 699 // If inlined, this encodes how the callee is being invoked. 700 const InvokeType invoke_type_; 701 702 // Whether the graph has been transformed to SSA form. Only used 703 // in debug mode to ensure we are not using properties only valid 704 // for non-SSA form (like the number of temporaries). 705 bool in_ssa_form_; 706 707 // Number of CHA guards in the graph. Used to short-circuit the 708 // CHA guard optimization pass when there is no CHA guard left. 709 uint32_t number_of_cha_guards_; 710 711 const InstructionSet instruction_set_; 712 713 // Cached constants. 714 HNullConstant* cached_null_constant_; 715 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_; 716 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_; 717 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_; 718 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_; 719 720 HCurrentMethod* cached_current_method_; 721 722 // The ArtMethod this graph is for. Note that for AOT, it may be null, 723 // for example for methods whose declaring class could not be resolved 724 // (such as when the superclass could not be found). 725 ArtMethod* art_method_; 726 727 // Keep the RTI of inexact Object to avoid having to pass stack handle 728 // collection pointer to passes which may create NullConstant. 729 ReferenceTypeInfo inexact_object_rti_; 730 731 // Whether we are compiling this graph for on stack replacement: this will 732 // make all loops seen as irreducible and emit special stack maps to mark 733 // compiled code entries which the interpreter can directly jump to. 734 const bool osr_; 735 736 // List of methods that are assumed to have single implementation. 737 ArenaSet<ArtMethod*> cha_single_implementation_list_; 738 739 friend class SsaBuilder; // For caching constants. 740 friend class SsaLivenessAnalysis; // For the linear order. 741 friend class HInliner; // For the reverse post order. 742 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); 743 DISALLOW_COPY_AND_ASSIGN(HGraph); 744 }; 745 746 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> { 747 public: 748 HLoopInformation(HBasicBlock* header, HGraph* graph) 749 : header_(header), 750 suspend_check_(nullptr), 751 irreducible_(false), 752 contains_irreducible_loop_(false), 753 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)), 754 // Make bit vector growable, as the number of blocks may change. 755 blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) { 756 back_edges_.reserve(kDefaultNumberOfBackEdges); 757 } 758 759 bool IsIrreducible() const { return irreducible_; } 760 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; } 761 762 void Dump(std::ostream& os); 763 764 HBasicBlock* GetHeader() const { 765 return header_; 766 } 767 768 void SetHeader(HBasicBlock* block) { 769 header_ = block; 770 } 771 772 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; } 773 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; } 774 bool HasSuspendCheck() const { return suspend_check_ != nullptr; } 775 776 void AddBackEdge(HBasicBlock* back_edge) { 777 back_edges_.push_back(back_edge); 778 } 779 780 void RemoveBackEdge(HBasicBlock* back_edge) { 781 RemoveElement(back_edges_, back_edge); 782 } 783 784 bool IsBackEdge(const HBasicBlock& block) const { 785 return ContainsElement(back_edges_, &block); 786 } 787 788 size_t NumberOfBackEdges() const { 789 return back_edges_.size(); 790 } 791 792 HBasicBlock* GetPreHeader() const; 793 794 const ArenaVector<HBasicBlock*>& GetBackEdges() const { 795 return back_edges_; 796 } 797 798 // Returns the lifetime position of the back edge that has the 799 // greatest lifetime position. 800 size_t GetLifetimeEnd() const; 801 802 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) { 803 ReplaceElement(back_edges_, existing, new_back_edge); 804 } 805 806 // Finds blocks that are part of this loop. 807 void Populate(); 808 809 // Returns whether this loop information contains `block`. 810 // Note that this loop information *must* be populated before entering this function. 811 bool Contains(const HBasicBlock& block) const; 812 813 // Returns whether this loop information is an inner loop of `other`. 814 // Note that `other` *must* be populated before entering this function. 815 bool IsIn(const HLoopInformation& other) const; 816 817 // Returns true if instruction is not defined within this loop. 818 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const; 819 820 const ArenaBitVector& GetBlocks() const { return blocks_; } 821 822 void Add(HBasicBlock* block); 823 void Remove(HBasicBlock* block); 824 825 void ClearAllBlocks() { 826 blocks_.ClearAllBits(); 827 } 828 829 bool HasBackEdgeNotDominatedByHeader() const; 830 831 bool IsPopulated() const { 832 return blocks_.GetHighestBitSet() != -1; 833 } 834 835 bool DominatesAllBackEdges(HBasicBlock* block); 836 837 bool HasExitEdge() const; 838 839 private: 840 // Internal recursive implementation of `Populate`. 841 void PopulateRecursive(HBasicBlock* block); 842 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized); 843 844 HBasicBlock* header_; 845 HSuspendCheck* suspend_check_; 846 bool irreducible_; 847 bool contains_irreducible_loop_; 848 ArenaVector<HBasicBlock*> back_edges_; 849 ArenaBitVector blocks_; 850 851 DISALLOW_COPY_AND_ASSIGN(HLoopInformation); 852 }; 853 854 // Stores try/catch information for basic blocks. 855 // Note that HGraph is constructed so that catch blocks cannot simultaneously 856 // be try blocks. 857 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> { 858 public: 859 // Try block information constructor. 860 explicit TryCatchInformation(const HTryBoundary& try_entry) 861 : try_entry_(&try_entry), 862 catch_dex_file_(nullptr), 863 catch_type_index_(DexFile::kDexNoIndex16) { 864 DCHECK(try_entry_ != nullptr); 865 } 866 867 // Catch block information constructor. 868 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file) 869 : try_entry_(nullptr), 870 catch_dex_file_(&dex_file), 871 catch_type_index_(catch_type_index) {} 872 873 bool IsTryBlock() const { return try_entry_ != nullptr; } 874 875 const HTryBoundary& GetTryEntry() const { 876 DCHECK(IsTryBlock()); 877 return *try_entry_; 878 } 879 880 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; } 881 882 bool IsCatchAllTypeIndex() const { 883 DCHECK(IsCatchBlock()); 884 return !catch_type_index_.IsValid(); 885 } 886 887 dex::TypeIndex GetCatchTypeIndex() const { 888 DCHECK(IsCatchBlock()); 889 return catch_type_index_; 890 } 891 892 const DexFile& GetCatchDexFile() const { 893 DCHECK(IsCatchBlock()); 894 return *catch_dex_file_; 895 } 896 897 private: 898 // One of possibly several TryBoundary instructions entering the block's try. 899 // Only set for try blocks. 900 const HTryBoundary* try_entry_; 901 902 // Exception type information. Only set for catch blocks. 903 const DexFile* catch_dex_file_; 904 const dex::TypeIndex catch_type_index_; 905 }; 906 907 static constexpr size_t kNoLifetime = -1; 908 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); 909 910 // A block in a method. Contains the list of instructions represented 911 // as a double linked list. Each block knows its predecessors and 912 // successors. 913 914 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { 915 public: 916 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) 917 : graph_(graph), 918 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), 919 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), 920 loop_information_(nullptr), 921 dominator_(nullptr), 922 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)), 923 block_id_(kInvalidBlockId), 924 dex_pc_(dex_pc), 925 lifetime_start_(kNoLifetime), 926 lifetime_end_(kNoLifetime), 927 try_catch_information_(nullptr) { 928 predecessors_.reserve(kDefaultNumberOfPredecessors); 929 successors_.reserve(kDefaultNumberOfSuccessors); 930 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks); 931 } 932 933 const ArenaVector<HBasicBlock*>& GetPredecessors() const { 934 return predecessors_; 935 } 936 937 const ArenaVector<HBasicBlock*>& GetSuccessors() const { 938 return successors_; 939 } 940 941 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const; 942 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const; 943 944 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) { 945 return ContainsElement(successors_, block, start_from); 946 } 947 948 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const { 949 return dominated_blocks_; 950 } 951 952 bool IsEntryBlock() const { 953 return graph_->GetEntryBlock() == this; 954 } 955 956 bool IsExitBlock() const { 957 return graph_->GetExitBlock() == this; 958 } 959 960 bool IsSingleGoto() const; 961 bool IsSingleTryBoundary() const; 962 963 // Returns true if this block emits nothing but a jump. 964 bool IsSingleJump() const { 965 HLoopInformation* loop_info = GetLoopInformation(); 966 return (IsSingleGoto() || IsSingleTryBoundary()) 967 // Back edges generate a suspend check. 968 && (loop_info == nullptr || !loop_info->IsBackEdge(*this)); 969 } 970 971 void AddBackEdge(HBasicBlock* back_edge) { 972 if (loop_information_ == nullptr) { 973 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_); 974 } 975 DCHECK_EQ(loop_information_->GetHeader(), this); 976 loop_information_->AddBackEdge(back_edge); 977 } 978 979 HGraph* GetGraph() const { return graph_; } 980 void SetGraph(HGraph* graph) { graph_ = graph; } 981 982 uint32_t GetBlockId() const { return block_id_; } 983 void SetBlockId(int id) { block_id_ = id; } 984 uint32_t GetDexPc() const { return dex_pc_; } 985 986 HBasicBlock* GetDominator() const { return dominator_; } 987 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; } 988 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); } 989 990 void RemoveDominatedBlock(HBasicBlock* block) { 991 RemoveElement(dominated_blocks_, block); 992 } 993 994 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) { 995 ReplaceElement(dominated_blocks_, existing, new_block); 996 } 997 998 void ClearDominanceInformation(); 999 1000 int NumberOfBackEdges() const { 1001 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0; 1002 } 1003 1004 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; } 1005 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; } 1006 const HInstructionList& GetInstructions() const { return instructions_; } 1007 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; } 1008 HInstruction* GetLastPhi() const { return phis_.last_instruction_; } 1009 const HInstructionList& GetPhis() const { return phis_; } 1010 1011 HInstruction* GetFirstInstructionDisregardMoves() const; 1012 1013 void AddSuccessor(HBasicBlock* block) { 1014 successors_.push_back(block); 1015 block->predecessors_.push_back(this); 1016 } 1017 1018 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) { 1019 size_t successor_index = GetSuccessorIndexOf(existing); 1020 existing->RemovePredecessor(this); 1021 new_block->predecessors_.push_back(this); 1022 successors_[successor_index] = new_block; 1023 } 1024 1025 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) { 1026 size_t predecessor_index = GetPredecessorIndexOf(existing); 1027 existing->RemoveSuccessor(this); 1028 new_block->successors_.push_back(this); 1029 predecessors_[predecessor_index] = new_block; 1030 } 1031 1032 // Insert `this` between `predecessor` and `successor. This method 1033 // preserves the indicies, and will update the first edge found between 1034 // `predecessor` and `successor`. 1035 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) { 1036 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor); 1037 size_t successor_index = predecessor->GetSuccessorIndexOf(successor); 1038 successor->predecessors_[predecessor_index] = this; 1039 predecessor->successors_[successor_index] = this; 1040 successors_.push_back(successor); 1041 predecessors_.push_back(predecessor); 1042 } 1043 1044 void RemovePredecessor(HBasicBlock* block) { 1045 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block)); 1046 } 1047 1048 void RemoveSuccessor(HBasicBlock* block) { 1049 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block)); 1050 } 1051 1052 void ClearAllPredecessors() { 1053 predecessors_.clear(); 1054 } 1055 1056 void AddPredecessor(HBasicBlock* block) { 1057 predecessors_.push_back(block); 1058 block->successors_.push_back(this); 1059 } 1060 1061 void SwapPredecessors() { 1062 DCHECK_EQ(predecessors_.size(), 2u); 1063 std::swap(predecessors_[0], predecessors_[1]); 1064 } 1065 1066 void SwapSuccessors() { 1067 DCHECK_EQ(successors_.size(), 2u); 1068 std::swap(successors_[0], successors_[1]); 1069 } 1070 1071 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const { 1072 return IndexOfElement(predecessors_, predecessor); 1073 } 1074 1075 size_t GetSuccessorIndexOf(HBasicBlock* successor) const { 1076 return IndexOfElement(successors_, successor); 1077 } 1078 1079 HBasicBlock* GetSinglePredecessor() const { 1080 DCHECK_EQ(GetPredecessors().size(), 1u); 1081 return GetPredecessors()[0]; 1082 } 1083 1084 HBasicBlock* GetSingleSuccessor() const { 1085 DCHECK_EQ(GetSuccessors().size(), 1u); 1086 return GetSuccessors()[0]; 1087 } 1088 1089 // Returns whether the first occurrence of `predecessor` in the list of 1090 // predecessors is at index `idx`. 1091 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const { 1092 DCHECK_EQ(GetPredecessors()[idx], predecessor); 1093 return GetPredecessorIndexOf(predecessor) == idx; 1094 } 1095 1096 // Create a new block between this block and its predecessors. The new block 1097 // is added to the graph, all predecessor edges are relinked to it and an edge 1098 // is created to `this`. Returns the new empty block. Reverse post order or 1099 // loop and try/catch information are not updated. 1100 HBasicBlock* CreateImmediateDominator(); 1101 1102 // Split the block into two blocks just before `cursor`. Returns the newly 1103 // created, latter block. Note that this method will add the block to the 1104 // graph, create a Goto at the end of the former block and will create an edge 1105 // between the blocks. It will not, however, update the reverse post order or 1106 // loop and try/catch information. 1107 HBasicBlock* SplitBefore(HInstruction* cursor); 1108 1109 // Split the block into two blocks just before `cursor`. Returns the newly 1110 // created block. Note that this method just updates raw block information, 1111 // like predecessors, successors, dominators, and instruction list. It does not 1112 // update the graph, reverse post order, loop information, nor make sure the 1113 // blocks are consistent (for example ending with a control flow instruction). 1114 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor); 1115 1116 // Similar to `SplitBeforeForInlining` but does it after `cursor`. 1117 HBasicBlock* SplitAfterForInlining(HInstruction* cursor); 1118 1119 // Merge `other` at the end of `this`. Successors and dominated blocks of 1120 // `other` are changed to be successors and dominated blocks of `this`. Note 1121 // that this method does not update the graph, reverse post order, loop 1122 // information, nor make sure the blocks are consistent (for example ending 1123 // with a control flow instruction). 1124 void MergeWithInlined(HBasicBlock* other); 1125 1126 // Replace `this` with `other`. Predecessors, successors, and dominated blocks 1127 // of `this` are moved to `other`. 1128 // Note that this method does not update the graph, reverse post order, loop 1129 // information, nor make sure the blocks are consistent (for example ending 1130 // with a control flow instruction). 1131 void ReplaceWith(HBasicBlock* other); 1132 1133 // Merges the instructions of `other` at the end of `this`. 1134 void MergeInstructionsWith(HBasicBlock* other); 1135 1136 // Merge `other` at the end of `this`. This method updates loops, reverse post 1137 // order, links to predecessors, successors, dominators and deletes the block 1138 // from the graph. The two blocks must be successive, i.e. `this` the only 1139 // predecessor of `other` and vice versa. 1140 void MergeWith(HBasicBlock* other); 1141 1142 // Disconnects `this` from all its predecessors, successors and dominator, 1143 // removes it from all loops it is included in and eventually from the graph. 1144 // The block must not dominate any other block. Predecessors and successors 1145 // are safely updated. 1146 void DisconnectAndDelete(); 1147 1148 void AddInstruction(HInstruction* instruction); 1149 // Insert `instruction` before/after an existing instruction `cursor`. 1150 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor); 1151 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor); 1152 // Replace instruction `initial` with `replacement` within this block. 1153 void ReplaceAndRemoveInstructionWith(HInstruction* initial, 1154 HInstruction* replacement); 1155 void AddPhi(HPhi* phi); 1156 void InsertPhiAfter(HPhi* instruction, HPhi* cursor); 1157 // RemoveInstruction and RemovePhi delete a given instruction from the respective 1158 // instruction list. With 'ensure_safety' set to true, it verifies that the 1159 // instruction is not in use and removes it from the use lists of its inputs. 1160 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true); 1161 void RemovePhi(HPhi* phi, bool ensure_safety = true); 1162 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true); 1163 1164 bool IsLoopHeader() const { 1165 return IsInLoop() && (loop_information_->GetHeader() == this); 1166 } 1167 1168 bool IsLoopPreHeaderFirstPredecessor() const { 1169 DCHECK(IsLoopHeader()); 1170 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader(); 1171 } 1172 1173 bool IsFirstPredecessorBackEdge() const { 1174 DCHECK(IsLoopHeader()); 1175 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]); 1176 } 1177 1178 HLoopInformation* GetLoopInformation() const { 1179 return loop_information_; 1180 } 1181 1182 // Set the loop_information_ on this block. Overrides the current 1183 // loop_information if it is an outer loop of the passed loop information. 1184 // Note that this method is called while creating the loop information. 1185 void SetInLoop(HLoopInformation* info) { 1186 if (IsLoopHeader()) { 1187 // Nothing to do. This just means `info` is an outer loop. 1188 } else if (!IsInLoop()) { 1189 loop_information_ = info; 1190 } else if (loop_information_->Contains(*info->GetHeader())) { 1191 // Block is currently part of an outer loop. Make it part of this inner loop. 1192 // Note that a non loop header having a loop information means this loop information 1193 // has already been populated 1194 loop_information_ = info; 1195 } else { 1196 // Block is part of an inner loop. Do not update the loop information. 1197 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()` 1198 // at this point, because this method is being called while populating `info`. 1199 } 1200 } 1201 1202 // Raw update of the loop information. 1203 void SetLoopInformation(HLoopInformation* info) { 1204 loop_information_ = info; 1205 } 1206 1207 bool IsInLoop() const { return loop_information_ != nullptr; } 1208 1209 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; } 1210 1211 void SetTryCatchInformation(TryCatchInformation* try_catch_information) { 1212 try_catch_information_ = try_catch_information; 1213 } 1214 1215 bool IsTryBlock() const { 1216 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock(); 1217 } 1218 1219 bool IsCatchBlock() const { 1220 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock(); 1221 } 1222 1223 // Returns the try entry that this block's successors should have. They will 1224 // be in the same try, unless the block ends in a try boundary. In that case, 1225 // the appropriate try entry will be returned. 1226 const HTryBoundary* ComputeTryEntryOfSuccessors() const; 1227 1228 bool HasThrowingInstructions() const; 1229 1230 // Returns whether this block dominates the blocked passed as parameter. 1231 bool Dominates(HBasicBlock* block) const; 1232 1233 size_t GetLifetimeStart() const { return lifetime_start_; } 1234 size_t GetLifetimeEnd() const { return lifetime_end_; } 1235 1236 void SetLifetimeStart(size_t start) { lifetime_start_ = start; } 1237 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; } 1238 1239 bool EndsWithControlFlowInstruction() const; 1240 bool EndsWithIf() const; 1241 bool EndsWithTryBoundary() const; 1242 bool HasSinglePhi() const; 1243 1244 private: 1245 HGraph* graph_; 1246 ArenaVector<HBasicBlock*> predecessors_; 1247 ArenaVector<HBasicBlock*> successors_; 1248 HInstructionList instructions_; 1249 HInstructionList phis_; 1250 HLoopInformation* loop_information_; 1251 HBasicBlock* dominator_; 1252 ArenaVector<HBasicBlock*> dominated_blocks_; 1253 uint32_t block_id_; 1254 // The dex program counter of the first instruction of this block. 1255 const uint32_t dex_pc_; 1256 size_t lifetime_start_; 1257 size_t lifetime_end_; 1258 TryCatchInformation* try_catch_information_; 1259 1260 friend class HGraph; 1261 friend class HInstruction; 1262 1263 DISALLOW_COPY_AND_ASSIGN(HBasicBlock); 1264 }; 1265 1266 // Iterates over the LoopInformation of all loops which contain 'block' 1267 // from the innermost to the outermost. 1268 class HLoopInformationOutwardIterator : public ValueObject { 1269 public: 1270 explicit HLoopInformationOutwardIterator(const HBasicBlock& block) 1271 : current_(block.GetLoopInformation()) {} 1272 1273 bool Done() const { return current_ == nullptr; } 1274 1275 void Advance() { 1276 DCHECK(!Done()); 1277 current_ = current_->GetPreHeader()->GetLoopInformation(); 1278 } 1279 1280 HLoopInformation* Current() const { 1281 DCHECK(!Done()); 1282 return current_; 1283 } 1284 1285 private: 1286 HLoopInformation* current_; 1287 1288 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator); 1289 }; 1290 1291 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1292 M(Above, Condition) \ 1293 M(AboveOrEqual, Condition) \ 1294 M(Add, BinaryOperation) \ 1295 M(And, BinaryOperation) \ 1296 M(ArrayGet, Instruction) \ 1297 M(ArrayLength, Instruction) \ 1298 M(ArraySet, Instruction) \ 1299 M(Below, Condition) \ 1300 M(BelowOrEqual, Condition) \ 1301 M(BooleanNot, UnaryOperation) \ 1302 M(BoundsCheck, Instruction) \ 1303 M(BoundType, Instruction) \ 1304 M(CheckCast, Instruction) \ 1305 M(ClassTableGet, Instruction) \ 1306 M(ClearException, Instruction) \ 1307 M(ClinitCheck, Instruction) \ 1308 M(Compare, BinaryOperation) \ 1309 M(ConstructorFence, Instruction) \ 1310 M(CurrentMethod, Instruction) \ 1311 M(ShouldDeoptimizeFlag, Instruction) \ 1312 M(Deoptimize, Instruction) \ 1313 M(Div, BinaryOperation) \ 1314 M(DivZeroCheck, Instruction) \ 1315 M(DoubleConstant, Constant) \ 1316 M(Equal, Condition) \ 1317 M(Exit, Instruction) \ 1318 M(FloatConstant, Constant) \ 1319 M(Goto, Instruction) \ 1320 M(GreaterThan, Condition) \ 1321 M(GreaterThanOrEqual, Condition) \ 1322 M(If, Instruction) \ 1323 M(InstanceFieldGet, Instruction) \ 1324 M(InstanceFieldSet, Instruction) \ 1325 M(InstanceOf, Instruction) \ 1326 M(IntConstant, Constant) \ 1327 M(InvokeUnresolved, Invoke) \ 1328 M(InvokeInterface, Invoke) \ 1329 M(InvokeStaticOrDirect, Invoke) \ 1330 M(InvokeVirtual, Invoke) \ 1331 M(InvokePolymorphic, Invoke) \ 1332 M(LessThan, Condition) \ 1333 M(LessThanOrEqual, Condition) \ 1334 M(LoadClass, Instruction) \ 1335 M(LoadException, Instruction) \ 1336 M(LoadString, Instruction) \ 1337 M(LongConstant, Constant) \ 1338 M(MemoryBarrier, Instruction) \ 1339 M(MonitorOperation, Instruction) \ 1340 M(Mul, BinaryOperation) \ 1341 M(NativeDebugInfo, Instruction) \ 1342 M(Neg, UnaryOperation) \ 1343 M(NewArray, Instruction) \ 1344 M(NewInstance, Instruction) \ 1345 M(Not, UnaryOperation) \ 1346 M(NotEqual, Condition) \ 1347 M(NullConstant, Instruction) \ 1348 M(NullCheck, Instruction) \ 1349 M(Or, BinaryOperation) \ 1350 M(PackedSwitch, Instruction) \ 1351 M(ParallelMove, Instruction) \ 1352 M(ParameterValue, Instruction) \ 1353 M(Phi, Instruction) \ 1354 M(Rem, BinaryOperation) \ 1355 M(Return, Instruction) \ 1356 M(ReturnVoid, Instruction) \ 1357 M(Ror, BinaryOperation) \ 1358 M(Shl, BinaryOperation) \ 1359 M(Shr, BinaryOperation) \ 1360 M(StaticFieldGet, Instruction) \ 1361 M(StaticFieldSet, Instruction) \ 1362 M(UnresolvedInstanceFieldGet, Instruction) \ 1363 M(UnresolvedInstanceFieldSet, Instruction) \ 1364 M(UnresolvedStaticFieldGet, Instruction) \ 1365 M(UnresolvedStaticFieldSet, Instruction) \ 1366 M(Select, Instruction) \ 1367 M(Sub, BinaryOperation) \ 1368 M(SuspendCheck, Instruction) \ 1369 M(Throw, Instruction) \ 1370 M(TryBoundary, Instruction) \ 1371 M(TypeConversion, Instruction) \ 1372 M(UShr, BinaryOperation) \ 1373 M(Xor, BinaryOperation) \ 1374 M(VecReplicateScalar, VecUnaryOperation) \ 1375 M(VecSumReduce, VecUnaryOperation) \ 1376 M(VecCnv, VecUnaryOperation) \ 1377 M(VecNeg, VecUnaryOperation) \ 1378 M(VecAbs, VecUnaryOperation) \ 1379 M(VecNot, VecUnaryOperation) \ 1380 M(VecAdd, VecBinaryOperation) \ 1381 M(VecHalvingAdd, VecBinaryOperation) \ 1382 M(VecSub, VecBinaryOperation) \ 1383 M(VecMul, VecBinaryOperation) \ 1384 M(VecDiv, VecBinaryOperation) \ 1385 M(VecMin, VecBinaryOperation) \ 1386 M(VecMax, VecBinaryOperation) \ 1387 M(VecAnd, VecBinaryOperation) \ 1388 M(VecAndNot, VecBinaryOperation) \ 1389 M(VecOr, VecBinaryOperation) \ 1390 M(VecXor, VecBinaryOperation) \ 1391 M(VecShl, VecBinaryOperation) \ 1392 M(VecShr, VecBinaryOperation) \ 1393 M(VecUShr, VecBinaryOperation) \ 1394 M(VecSetScalars, VecOperation) \ 1395 M(VecMultiplyAccumulate, VecOperation) \ 1396 M(VecLoad, VecMemoryOperation) \ 1397 M(VecStore, VecMemoryOperation) \ 1398 1399 /* 1400 * Instructions, shared across several (not all) architectures. 1401 */ 1402 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64) 1403 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) 1404 #else 1405 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \ 1406 M(BitwiseNegatedRight, Instruction) \ 1407 M(DataProcWithShifterOp, Instruction) \ 1408 M(MultiplyAccumulate, Instruction) \ 1409 M(IntermediateAddress, Instruction) \ 1410 M(IntermediateAddressIndex, Instruction) 1411 #endif 1412 1413 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) 1414 1415 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) 1416 1417 #ifndef ART_ENABLE_CODEGEN_mips 1418 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) 1419 #else 1420 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1421 M(MipsComputeBaseMethodAddress, Instruction) \ 1422 M(MipsPackedSwitch, Instruction) 1423 #endif 1424 1425 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) 1426 1427 #ifndef ART_ENABLE_CODEGEN_x86 1428 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) 1429 #else 1430 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1431 M(X86ComputeBaseMethodAddress, Instruction) \ 1432 M(X86LoadFromConstantTable, Instruction) \ 1433 M(X86FPNeg, Instruction) \ 1434 M(X86PackedSwitch, Instruction) 1435 #endif 1436 1437 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1438 1439 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1440 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \ 1441 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \ 1442 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \ 1443 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \ 1444 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ 1445 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \ 1446 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \ 1447 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) 1448 1449 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \ 1450 M(Condition, BinaryOperation) \ 1451 M(Constant, Instruction) \ 1452 M(UnaryOperation, Instruction) \ 1453 M(BinaryOperation, Instruction) \ 1454 M(Invoke, Instruction) \ 1455 M(VecOperation, Instruction) \ 1456 M(VecUnaryOperation, VecOperation) \ 1457 M(VecBinaryOperation, VecOperation) \ 1458 M(VecMemoryOperation, VecOperation) 1459 1460 #define FOR_EACH_INSTRUCTION(M) \ 1461 FOR_EACH_CONCRETE_INSTRUCTION(M) \ 1462 FOR_EACH_ABSTRACT_INSTRUCTION(M) 1463 1464 #define FORWARD_DECLARATION(type, super) class H##type; 1465 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION) 1466 #undef FORWARD_DECLARATION 1467 1468 #define DECLARE_INSTRUCTION(type) \ 1469 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \ 1470 const char* DebugName() const OVERRIDE { return #type; } \ 1471 bool InstructionTypeEquals(const HInstruction* other) const OVERRIDE { \ 1472 return other->Is##type(); \ 1473 } \ 1474 void Accept(HGraphVisitor* visitor) OVERRIDE 1475 1476 #define DECLARE_ABSTRACT_INSTRUCTION(type) \ 1477 bool Is##type() const { return As##type() != nullptr; } \ 1478 const H##type* As##type() const { return this; } \ 1479 H##type* As##type() { return this; } 1480 1481 template <typename T> 1482 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>, 1483 public IntrusiveForwardListNode<HUseListNode<T>> { 1484 public: 1485 // Get the instruction which has this use as one of the inputs. 1486 T GetUser() const { return user_; } 1487 // Get the position of the input record that this use corresponds to. 1488 size_t GetIndex() const { return index_; } 1489 // Set the position of the input record that this use corresponds to. 1490 void SetIndex(size_t index) { index_ = index; } 1491 1492 private: 1493 HUseListNode(T user, size_t index) 1494 : user_(user), index_(index) {} 1495 1496 T const user_; 1497 size_t index_; 1498 1499 friend class HInstruction; 1500 1501 DISALLOW_COPY_AND_ASSIGN(HUseListNode); 1502 }; 1503 1504 template <typename T> 1505 using HUseList = IntrusiveForwardList<HUseListNode<T>>; 1506 1507 // This class is used by HEnvironment and HInstruction classes to record the 1508 // instructions they use and pointers to the corresponding HUseListNodes kept 1509 // by the used instructions. 1510 template <typename T> 1511 class HUserRecord : public ValueObject { 1512 public: 1513 HUserRecord() : instruction_(nullptr), before_use_node_() {} 1514 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {} 1515 1516 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node) 1517 : HUserRecord(old_record.instruction_, before_use_node) {} 1518 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node) 1519 : instruction_(instruction), before_use_node_(before_use_node) { 1520 DCHECK(instruction_ != nullptr); 1521 } 1522 1523 HInstruction* GetInstruction() const { return instruction_; } 1524 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; } 1525 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); } 1526 1527 private: 1528 // Instruction used by the user. 1529 HInstruction* instruction_; 1530 1531 // Iterator before the corresponding entry in the use list kept by 'instruction_'. 1532 typename HUseList<T>::iterator before_use_node_; 1533 }; 1534 1535 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>. 1536 // This is used for HInstruction::GetInputs() to return a container wrapper providing 1537 // HInstruction* values even though the underlying container has HUserRecord<>s. 1538 struct HInputExtractor { 1539 HInstruction* operator()(HUserRecord<HInstruction*>& record) const { 1540 return record.GetInstruction(); 1541 } 1542 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const { 1543 return record.GetInstruction(); 1544 } 1545 }; 1546 1547 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>; 1548 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>; 1549 1550 /** 1551 * Side-effects representation. 1552 * 1553 * For write/read dependences on fields/arrays, the dependence analysis uses 1554 * type disambiguation (e.g. a float field write cannot modify the value of an 1555 * integer field read) and the access type (e.g. a reference array write cannot 1556 * modify the value of a reference field read [although it may modify the 1557 * reference fetch prior to reading the field, which is represented by its own 1558 * write/read dependence]). The analysis makes conservative points-to 1559 * assumptions on reference types (e.g. two same typed arrays are assumed to be 1560 * the same, and any reference read depends on any reference read without 1561 * further regard of its type). 1562 * 1563 * The internal representation uses 38-bit and is described in the table below. 1564 * The first line indicates the side effect, and for field/array accesses the 1565 * second line indicates the type of the access (in the order of the 1566 * Primitive::Type enum). 1567 * The two numbered lines below indicate the bit position in the bitfield (read 1568 * vertically). 1569 * 1570 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W | 1571 * +-------------+---------+---------+--------------+---------+---------+ 1572 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL| 1573 * | 3 |333333322|222222221| 1 |111111110|000000000| 1574 * | 7 |654321098|765432109| 8 |765432109|876543210| 1575 * 1576 * Note that, to ease the implementation, 'changes' bits are least significant 1577 * bits, while 'dependency' bits are most significant bits. 1578 */ 1579 class SideEffects : public ValueObject { 1580 public: 1581 SideEffects() : flags_(0) {} 1582 1583 static SideEffects None() { 1584 return SideEffects(0); 1585 } 1586 1587 static SideEffects All() { 1588 return SideEffects(kAllChangeBits | kAllDependOnBits); 1589 } 1590 1591 static SideEffects AllChanges() { 1592 return SideEffects(kAllChangeBits); 1593 } 1594 1595 static SideEffects AllDependencies() { 1596 return SideEffects(kAllDependOnBits); 1597 } 1598 1599 static SideEffects AllExceptGCDependency() { 1600 return AllWritesAndReads().Union(SideEffects::CanTriggerGC()); 1601 } 1602 1603 static SideEffects AllWritesAndReads() { 1604 return SideEffects(kAllWrites | kAllReads); 1605 } 1606 1607 static SideEffects AllWrites() { 1608 return SideEffects(kAllWrites); 1609 } 1610 1611 static SideEffects AllReads() { 1612 return SideEffects(kAllReads); 1613 } 1614 1615 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) { 1616 return is_volatile 1617 ? AllWritesAndReads() 1618 : SideEffects(TypeFlag(type, kFieldWriteOffset)); 1619 } 1620 1621 static SideEffects ArrayWriteOfType(Primitive::Type type) { 1622 return SideEffects(TypeFlag(type, kArrayWriteOffset)); 1623 } 1624 1625 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) { 1626 return is_volatile 1627 ? AllWritesAndReads() 1628 : SideEffects(TypeFlag(type, kFieldReadOffset)); 1629 } 1630 1631 static SideEffects ArrayReadOfType(Primitive::Type type) { 1632 return SideEffects(TypeFlag(type, kArrayReadOffset)); 1633 } 1634 1635 static SideEffects CanTriggerGC() { 1636 return SideEffects(1ULL << kCanTriggerGCBit); 1637 } 1638 1639 static SideEffects DependsOnGC() { 1640 return SideEffects(1ULL << kDependsOnGCBit); 1641 } 1642 1643 // Combines the side-effects of this and the other. 1644 SideEffects Union(SideEffects other) const { 1645 return SideEffects(flags_ | other.flags_); 1646 } 1647 1648 SideEffects Exclusion(SideEffects other) const { 1649 return SideEffects(flags_ & ~other.flags_); 1650 } 1651 1652 void Add(SideEffects other) { 1653 flags_ |= other.flags_; 1654 } 1655 1656 bool Includes(SideEffects other) const { 1657 return (other.flags_ & flags_) == other.flags_; 1658 } 1659 1660 bool HasSideEffects() const { 1661 return (flags_ & kAllChangeBits); 1662 } 1663 1664 bool HasDependencies() const { 1665 return (flags_ & kAllDependOnBits); 1666 } 1667 1668 // Returns true if there are no side effects or dependencies. 1669 bool DoesNothing() const { 1670 return flags_ == 0; 1671 } 1672 1673 // Returns true if something is written. 1674 bool DoesAnyWrite() const { 1675 return (flags_ & kAllWrites); 1676 } 1677 1678 // Returns true if something is read. 1679 bool DoesAnyRead() const { 1680 return (flags_ & kAllReads); 1681 } 1682 1683 // Returns true if potentially everything is written and read 1684 // (every type and every kind of access). 1685 bool DoesAllReadWrite() const { 1686 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads); 1687 } 1688 1689 bool DoesAll() const { 1690 return flags_ == (kAllChangeBits | kAllDependOnBits); 1691 } 1692 1693 // Returns true if `this` may read something written by `other`. 1694 bool MayDependOn(SideEffects other) const { 1695 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits; 1696 return (other.flags_ & depends_on_flags); 1697 } 1698 1699 // Returns string representation of flags (for debugging only). 1700 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL| 1701 std::string ToString() const { 1702 std::string flags = "|"; 1703 for (int s = kLastBit; s >= 0; s--) { 1704 bool current_bit_is_set = ((flags_ >> s) & 1) != 0; 1705 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) { 1706 // This is a bit for the GC side effect. 1707 if (current_bit_is_set) { 1708 flags += "GC"; 1709 } 1710 flags += "|"; 1711 } else { 1712 // This is a bit for the array/field analysis. 1713 // The underscore character stands for the 'can trigger GC' bit. 1714 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD"; 1715 if (current_bit_is_set) { 1716 flags += kDebug[s]; 1717 } 1718 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) || 1719 (s == kFieldReadOffset) || (s == kArrayReadOffset)) { 1720 flags += "|"; 1721 } 1722 } 1723 } 1724 return flags; 1725 } 1726 1727 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; } 1728 1729 private: 1730 static constexpr int kFieldArrayAnalysisBits = 9; 1731 1732 static constexpr int kFieldWriteOffset = 0; 1733 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits; 1734 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1; 1735 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1; 1736 1737 static constexpr int kChangeBits = kCanTriggerGCBit + 1; 1738 1739 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1; 1740 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits; 1741 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1; 1742 static constexpr int kDependsOnGCBit = kLastBitForReads + 1; 1743 1744 static constexpr int kLastBit = kDependsOnGCBit; 1745 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits; 1746 1747 // Aliases. 1748 1749 static_assert(kChangeBits == kDependOnBits, 1750 "the 'change' bits should match the 'depend on' bits."); 1751 1752 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1); 1753 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits; 1754 static constexpr uint64_t kAllWrites = 1755 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset; 1756 static constexpr uint64_t kAllReads = 1757 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset; 1758 1759 // Translates type to bit flag. 1760 static uint64_t TypeFlag(Primitive::Type type, int offset) { 1761 CHECK_NE(type, Primitive::kPrimVoid); 1762 const uint64_t one = 1; 1763 const int shift = type; // 0-based consecutive enum 1764 DCHECK_LE(kFieldWriteOffset, shift); 1765 DCHECK_LT(shift, kArrayWriteOffset); 1766 return one << (type + offset); 1767 } 1768 1769 // Private constructor on direct flags value. 1770 explicit SideEffects(uint64_t flags) : flags_(flags) {} 1771 1772 uint64_t flags_; 1773 }; 1774 1775 // A HEnvironment object contains the values of virtual registers at a given location. 1776 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> { 1777 public: 1778 ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, 1779 size_t number_of_vregs, 1780 ArtMethod* method, 1781 uint32_t dex_pc, 1782 HInstruction* holder) 1783 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)), 1784 locations_(arena->Adapter(kArenaAllocEnvironmentLocations)), 1785 parent_(nullptr), 1786 method_(method), 1787 dex_pc_(dex_pc), 1788 holder_(holder) { 1789 } 1790 1791 ALWAYS_INLINE HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder) 1792 : HEnvironment(arena, 1793 to_copy.Size(), 1794 to_copy.GetMethod(), 1795 to_copy.GetDexPc(), 1796 holder) {} 1797 1798 void AllocateLocations() { 1799 DCHECK(locations_.empty()); 1800 locations_.resize(vregs_.size()); 1801 } 1802 1803 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) { 1804 if (parent_ != nullptr) { 1805 parent_->SetAndCopyParentChain(allocator, parent); 1806 } else { 1807 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_); 1808 parent_->CopyFrom(parent); 1809 if (parent->GetParent() != nullptr) { 1810 parent_->SetAndCopyParentChain(allocator, parent->GetParent()); 1811 } 1812 } 1813 } 1814 1815 void CopyFrom(const ArenaVector<HInstruction*>& locals); 1816 void CopyFrom(HEnvironment* environment); 1817 1818 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first 1819 // input to the loop phi instead. This is for inserting instructions that 1820 // require an environment (like HDeoptimization) in the loop pre-header. 1821 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header); 1822 1823 void SetRawEnvAt(size_t index, HInstruction* instruction) { 1824 vregs_[index] = HUserRecord<HEnvironment*>(instruction); 1825 } 1826 1827 HInstruction* GetInstructionAt(size_t index) const { 1828 return vregs_[index].GetInstruction(); 1829 } 1830 1831 void RemoveAsUserOfInput(size_t index) const; 1832 1833 size_t Size() const { return vregs_.size(); } 1834 1835 HEnvironment* GetParent() const { return parent_; } 1836 1837 void SetLocationAt(size_t index, Location location) { 1838 locations_[index] = location; 1839 } 1840 1841 Location GetLocationAt(size_t index) const { 1842 return locations_[index]; 1843 } 1844 1845 uint32_t GetDexPc() const { 1846 return dex_pc_; 1847 } 1848 1849 ArtMethod* GetMethod() const { 1850 return method_; 1851 } 1852 1853 HInstruction* GetHolder() const { 1854 return holder_; 1855 } 1856 1857 1858 bool IsFromInlinedInvoke() const { 1859 return GetParent() != nullptr; 1860 } 1861 1862 private: 1863 ArenaVector<HUserRecord<HEnvironment*>> vregs_; 1864 ArenaVector<Location> locations_; 1865 HEnvironment* parent_; 1866 ArtMethod* method_; 1867 const uint32_t dex_pc_; 1868 1869 // The instruction that holds this environment. 1870 HInstruction* const holder_; 1871 1872 friend class HInstruction; 1873 1874 DISALLOW_COPY_AND_ASSIGN(HEnvironment); 1875 }; 1876 1877 class HInstruction : public ArenaObject<kArenaAllocInstruction> { 1878 public: 1879 HInstruction(SideEffects side_effects, uint32_t dex_pc) 1880 : previous_(nullptr), 1881 next_(nullptr), 1882 block_(nullptr), 1883 dex_pc_(dex_pc), 1884 id_(-1), 1885 ssa_index_(-1), 1886 packed_fields_(0u), 1887 environment_(nullptr), 1888 locations_(nullptr), 1889 live_interval_(nullptr), 1890 lifetime_position_(kNoLifetime), 1891 side_effects_(side_effects), 1892 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) { 1893 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact()); 1894 } 1895 1896 virtual ~HInstruction() {} 1897 1898 #define DECLARE_KIND(type, super) k##type, 1899 enum InstructionKind { 1900 FOR_EACH_INSTRUCTION(DECLARE_KIND) 1901 }; 1902 #undef DECLARE_KIND 1903 1904 HInstruction* GetNext() const { return next_; } 1905 HInstruction* GetPrevious() const { return previous_; } 1906 1907 HInstruction* GetNextDisregardingMoves() const; 1908 HInstruction* GetPreviousDisregardingMoves() const; 1909 1910 HBasicBlock* GetBlock() const { return block_; } 1911 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); } 1912 void SetBlock(HBasicBlock* block) { block_ = block; } 1913 bool IsInBlock() const { return block_ != nullptr; } 1914 bool IsInLoop() const { return block_->IsInLoop(); } 1915 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); } 1916 bool IsIrreducibleLoopHeaderPhi() const { 1917 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible(); 1918 } 1919 1920 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0; 1921 1922 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const { 1923 // One virtual method is enough, just const_cast<> and then re-add the const. 1924 return ArrayRef<const HUserRecord<HInstruction*>>( 1925 const_cast<HInstruction*>(this)->GetInputRecords()); 1926 } 1927 1928 HInputsRef GetInputs() { 1929 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor()); 1930 } 1931 1932 HConstInputsRef GetInputs() const { 1933 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor()); 1934 } 1935 1936 size_t InputCount() const { return GetInputRecords().size(); } 1937 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); } 1938 1939 bool HasInput(HInstruction* input) const { 1940 for (const HInstruction* i : GetInputs()) { 1941 if (i == input) { 1942 return true; 1943 } 1944 } 1945 return false; 1946 } 1947 1948 void SetRawInputAt(size_t index, HInstruction* input) { 1949 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input)); 1950 } 1951 1952 virtual void Accept(HGraphVisitor* visitor) = 0; 1953 virtual const char* DebugName() const = 0; 1954 1955 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; } 1956 1957 virtual bool NeedsEnvironment() const { return false; } 1958 1959 uint32_t GetDexPc() const { return dex_pc_; } 1960 1961 virtual bool IsControlFlow() const { return false; } 1962 1963 // Can the instruction throw? 1964 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance), 1965 // could throw OOME, but it is still OK to remove them if they are unused. 1966 virtual bool CanThrow() const { return false; } 1967 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); } 1968 1969 bool HasSideEffects() const { return side_effects_.HasSideEffects(); } 1970 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); } 1971 1972 // Does not apply for all instructions, but having this at top level greatly 1973 // simplifies the null check elimination. 1974 // TODO: Consider merging can_be_null into ReferenceTypeInfo. 1975 virtual bool CanBeNull() const { 1976 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types"; 1977 return true; 1978 } 1979 1980 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { 1981 return false; 1982 } 1983 1984 virtual bool IsActualObject() const { 1985 return GetType() == Primitive::kPrimNot; 1986 } 1987 1988 void SetReferenceTypeInfo(ReferenceTypeInfo rti); 1989 1990 ReferenceTypeInfo GetReferenceTypeInfo() const { 1991 DCHECK_EQ(GetType(), Primitive::kPrimNot); 1992 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_, 1993 GetPackedFlag<kFlagReferenceTypeIsExact>()); 1994 } 1995 1996 void AddUseAt(HInstruction* user, size_t index) { 1997 DCHECK(user != nullptr); 1998 // Note: fixup_end remains valid across push_front(). 1999 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin(); 2000 HUseListNode<HInstruction*>* new_node = 2001 new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index); 2002 uses_.push_front(*new_node); 2003 FixUpUserRecordsAfterUseInsertion(fixup_end); 2004 } 2005 2006 void AddEnvUseAt(HEnvironment* user, size_t index) { 2007 DCHECK(user != nullptr); 2008 // Note: env_fixup_end remains valid across push_front(). 2009 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin(); 2010 HUseListNode<HEnvironment*>* new_node = 2011 new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index); 2012 env_uses_.push_front(*new_node); 2013 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end); 2014 } 2015 2016 void RemoveAsUserOfInput(size_t input) { 2017 HUserRecord<HInstruction*> input_use = InputRecordAt(input); 2018 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode(); 2019 input_use.GetInstruction()->uses_.erase_after(before_use_node); 2020 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node); 2021 } 2022 2023 void RemoveAsUserOfAllInputs() { 2024 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) { 2025 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode(); 2026 input_use.GetInstruction()->uses_.erase_after(before_use_node); 2027 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node); 2028 } 2029 } 2030 2031 const HUseList<HInstruction*>& GetUses() const { return uses_; } 2032 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; } 2033 2034 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); } 2035 bool HasEnvironmentUses() const { return !env_uses_.empty(); } 2036 bool HasNonEnvironmentUses() const { return !uses_.empty(); } 2037 bool HasOnlyOneNonEnvironmentUse() const { 2038 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement(); 2039 } 2040 2041 bool IsRemovable() const { 2042 return 2043 !DoesAnyWrite() && 2044 !CanThrow() && 2045 !IsSuspendCheck() && 2046 !IsControlFlow() && 2047 !IsNativeDebugInfo() && 2048 !IsParameterValue() && 2049 // If we added an explicit barrier then we should keep it. 2050 !IsMemoryBarrier() && 2051 !IsConstructorFence(); 2052 } 2053 2054 bool IsDeadAndRemovable() const { 2055 return IsRemovable() && !HasUses(); 2056 } 2057 2058 // Does this instruction strictly dominate `other_instruction`? 2059 // Returns false if this instruction and `other_instruction` are the same. 2060 // Aborts if this instruction and `other_instruction` are both phis. 2061 bool StrictlyDominates(HInstruction* other_instruction) const; 2062 2063 int GetId() const { return id_; } 2064 void SetId(int id) { id_ = id; } 2065 2066 int GetSsaIndex() const { return ssa_index_; } 2067 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; } 2068 bool HasSsaIndex() const { return ssa_index_ != -1; } 2069 2070 bool HasEnvironment() const { return environment_ != nullptr; } 2071 HEnvironment* GetEnvironment() const { return environment_; } 2072 // Set the `environment_` field. Raw because this method does not 2073 // update the uses lists. 2074 void SetRawEnvironment(HEnvironment* environment) { 2075 DCHECK(environment_ == nullptr); 2076 DCHECK_EQ(environment->GetHolder(), this); 2077 environment_ = environment; 2078 } 2079 2080 void InsertRawEnvironment(HEnvironment* environment) { 2081 DCHECK(environment_ != nullptr); 2082 DCHECK_EQ(environment->GetHolder(), this); 2083 DCHECK(environment->GetParent() == nullptr); 2084 environment->parent_ = environment_; 2085 environment_ = environment; 2086 } 2087 2088 void RemoveEnvironment(); 2089 2090 // Set the environment of this instruction, copying it from `environment`. While 2091 // copying, the uses lists are being updated. 2092 void CopyEnvironmentFrom(HEnvironment* environment) { 2093 DCHECK(environment_ == nullptr); 2094 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 2095 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 2096 environment_->CopyFrom(environment); 2097 if (environment->GetParent() != nullptr) { 2098 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 2099 } 2100 } 2101 2102 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment, 2103 HBasicBlock* block) { 2104 DCHECK(environment_ == nullptr); 2105 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena(); 2106 environment_ = new (allocator) HEnvironment(allocator, *environment, this); 2107 environment_->CopyFromWithLoopPhiAdjustment(environment, block); 2108 if (environment->GetParent() != nullptr) { 2109 environment_->SetAndCopyParentChain(allocator, environment->GetParent()); 2110 } 2111 } 2112 2113 // Returns the number of entries in the environment. Typically, that is the 2114 // number of dex registers in a method. It could be more in case of inlining. 2115 size_t EnvironmentSize() const; 2116 2117 LocationSummary* GetLocations() const { return locations_; } 2118 void SetLocations(LocationSummary* locations) { locations_ = locations; } 2119 2120 void ReplaceWith(HInstruction* instruction); 2121 void ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement); 2122 void ReplaceInput(HInstruction* replacement, size_t index); 2123 2124 // This is almost the same as doing `ReplaceWith()`. But in this helper, the 2125 // uses of this instruction by `other` are *not* updated. 2126 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) { 2127 ReplaceWith(other); 2128 other->ReplaceInput(this, use_index); 2129 } 2130 2131 // Move `this` instruction before `cursor` 2132 void MoveBefore(HInstruction* cursor, bool do_checks = true); 2133 2134 // Move `this` before its first user and out of any loops. If there is no 2135 // out-of-loop user that dominates all other users, move the instruction 2136 // to the end of the out-of-loop common dominator of the user's blocks. 2137 // 2138 // This can be used only on non-throwing instructions with no side effects that 2139 // have at least one use but no environment uses. 2140 void MoveBeforeFirstUserAndOutOfLoops(); 2141 2142 #define INSTRUCTION_TYPE_CHECK(type, super) \ 2143 bool Is##type() const; \ 2144 const H##type* As##type() const; \ 2145 H##type* As##type(); 2146 2147 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 2148 #undef INSTRUCTION_TYPE_CHECK 2149 2150 #define INSTRUCTION_TYPE_CHECK(type, super) \ 2151 bool Is##type() const { return (As##type() != nullptr); } \ 2152 virtual const H##type* As##type() const { return nullptr; } \ 2153 virtual H##type* As##type() { return nullptr; } 2154 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 2155 #undef INSTRUCTION_TYPE_CHECK 2156 2157 // Returns whether the instruction can be moved within the graph. 2158 // TODO: this method is used by LICM and GVN with possibly different 2159 // meanings? split and rename? 2160 virtual bool CanBeMoved() const { return false; } 2161 2162 // Returns whether the two instructions are of the same kind. 2163 virtual bool InstructionTypeEquals(const HInstruction* other ATTRIBUTE_UNUSED) const { 2164 return false; 2165 } 2166 2167 // Returns whether any data encoded in the two instructions is equal. 2168 // This method does not look at the inputs. Both instructions must be 2169 // of the same type, otherwise the method has undefined behavior. 2170 virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const { 2171 return false; 2172 } 2173 2174 // Returns whether two instructions are equal, that is: 2175 // 1) They have the same type and contain the same data (InstructionDataEquals). 2176 // 2) Their inputs are identical. 2177 bool Equals(const HInstruction* other) const; 2178 2179 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744) 2180 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide 2181 // the virtual function because the __attribute__((__pure__)) doesn't really 2182 // apply the strong requirement for virtual functions, preventing optimizations. 2183 InstructionKind GetKind() const PURE; 2184 virtual InstructionKind GetKindInternal() const = 0; 2185 2186 virtual size_t ComputeHashCode() const { 2187 size_t result = GetKind(); 2188 for (const HInstruction* input : GetInputs()) { 2189 result = (result * 31) + input->GetId(); 2190 } 2191 return result; 2192 } 2193 2194 SideEffects GetSideEffects() const { return side_effects_; } 2195 void SetSideEffects(SideEffects other) { side_effects_ = other; } 2196 void AddSideEffects(SideEffects other) { side_effects_.Add(other); } 2197 2198 size_t GetLifetimePosition() const { return lifetime_position_; } 2199 void SetLifetimePosition(size_t position) { lifetime_position_ = position; } 2200 LiveInterval* GetLiveInterval() const { return live_interval_; } 2201 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; } 2202 bool HasLiveInterval() const { return live_interval_ != nullptr; } 2203 2204 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); } 2205 2206 // Returns whether the code generation of the instruction will require to have access 2207 // to the current method. Such instructions are: 2208 // (1): Instructions that require an environment, as calling the runtime requires 2209 // to walk the stack and have the current method stored at a specific stack address. 2210 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass 2211 // to access the dex cache. 2212 bool NeedsCurrentMethod() const { 2213 return NeedsEnvironment() || IsCurrentMethod(); 2214 } 2215 2216 // Returns whether the code generation of the instruction will require to have access 2217 // to the dex cache of the current method's declaring class via the current method. 2218 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; } 2219 2220 // Does this instruction have any use in an environment before 2221 // control flow hits 'other'? 2222 bool HasAnyEnvironmentUseBefore(HInstruction* other); 2223 2224 // Remove all references to environment uses of this instruction. 2225 // The caller must ensure that this is safe to do. 2226 void RemoveEnvironmentUsers(); 2227 2228 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); } 2229 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); } 2230 2231 protected: 2232 // If set, the machine code for this instruction is assumed to be generated by 2233 // its users. Used by liveness analysis to compute use positions accordingly. 2234 static constexpr size_t kFlagEmittedAtUseSite = 0u; 2235 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1; 2236 static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1; 2237 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte; 2238 2239 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const { 2240 return GetInputRecords()[i]; 2241 } 2242 2243 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) { 2244 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords(); 2245 input_records[index] = input; 2246 } 2247 2248 uint32_t GetPackedFields() const { 2249 return packed_fields_; 2250 } 2251 2252 template <size_t flag> 2253 bool GetPackedFlag() const { 2254 return (packed_fields_ & (1u << flag)) != 0u; 2255 } 2256 2257 template <size_t flag> 2258 void SetPackedFlag(bool value = true) { 2259 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag); 2260 } 2261 2262 template <typename BitFieldType> 2263 typename BitFieldType::value_type GetPackedField() const { 2264 return BitFieldType::Decode(packed_fields_); 2265 } 2266 2267 template <typename BitFieldType> 2268 void SetPackedField(typename BitFieldType::value_type value) { 2269 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value))); 2270 packed_fields_ = BitFieldType::Update(value, packed_fields_); 2271 } 2272 2273 private: 2274 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) { 2275 auto before_use_node = uses_.before_begin(); 2276 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) { 2277 HInstruction* user = use_node->GetUser(); 2278 size_t input_index = use_node->GetIndex(); 2279 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node)); 2280 before_use_node = use_node; 2281 } 2282 } 2283 2284 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) { 2285 auto next = ++HUseList<HInstruction*>::iterator(before_use_node); 2286 if (next != uses_.end()) { 2287 HInstruction* next_user = next->GetUser(); 2288 size_t next_index = next->GetIndex(); 2289 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this); 2290 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node)); 2291 } 2292 } 2293 2294 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) { 2295 auto before_env_use_node = env_uses_.before_begin(); 2296 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) { 2297 HEnvironment* user = env_use_node->GetUser(); 2298 size_t input_index = env_use_node->GetIndex(); 2299 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node); 2300 before_env_use_node = env_use_node; 2301 } 2302 } 2303 2304 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) { 2305 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node); 2306 if (next != env_uses_.end()) { 2307 HEnvironment* next_user = next->GetUser(); 2308 size_t next_index = next->GetIndex(); 2309 DCHECK(next_user->vregs_[next_index].GetInstruction() == this); 2310 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node); 2311 } 2312 } 2313 2314 HInstruction* previous_; 2315 HInstruction* next_; 2316 HBasicBlock* block_; 2317 const uint32_t dex_pc_; 2318 2319 // An instruction gets an id when it is added to the graph. 2320 // It reflects creation order. A negative id means the instruction 2321 // has not been added to the graph. 2322 int id_; 2323 2324 // When doing liveness analysis, instructions that have uses get an SSA index. 2325 int ssa_index_; 2326 2327 // Packed fields. 2328 uint32_t packed_fields_; 2329 2330 // List of instructions that have this instruction as input. 2331 HUseList<HInstruction*> uses_; 2332 2333 // List of environments that contain this instruction. 2334 HUseList<HEnvironment*> env_uses_; 2335 2336 // The environment associated with this instruction. Not null if the instruction 2337 // might jump out of the method. 2338 HEnvironment* environment_; 2339 2340 // Set by the code generator. 2341 LocationSummary* locations_; 2342 2343 // Set by the liveness analysis. 2344 LiveInterval* live_interval_; 2345 2346 // Set by the liveness analysis, this is the position in a linear 2347 // order of blocks where this instruction's live interval start. 2348 size_t lifetime_position_; 2349 2350 SideEffects side_effects_; 2351 2352 // The reference handle part of the reference type info. 2353 // The IsExact() flag is stored in packed fields. 2354 // TODO: for primitive types this should be marked as invalid. 2355 ReferenceTypeInfo::TypeHandle reference_type_handle_; 2356 2357 friend class GraphChecker; 2358 friend class HBasicBlock; 2359 friend class HEnvironment; 2360 friend class HGraph; 2361 friend class HInstructionList; 2362 2363 DISALLOW_COPY_AND_ASSIGN(HInstruction); 2364 }; 2365 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs); 2366 2367 // Iterates over the instructions, while preserving the next instruction 2368 // in case the current instruction gets removed from the list by the user 2369 // of this iterator. 2370 class HInstructionIterator : public ValueObject { 2371 public: 2372 explicit HInstructionIterator(const HInstructionList& instructions) 2373 : instruction_(instructions.first_instruction_) { 2374 next_ = Done() ? nullptr : instruction_->GetNext(); 2375 } 2376 2377 bool Done() const { return instruction_ == nullptr; } 2378 HInstruction* Current() const { return instruction_; } 2379 void Advance() { 2380 instruction_ = next_; 2381 next_ = Done() ? nullptr : instruction_->GetNext(); 2382 } 2383 2384 private: 2385 HInstruction* instruction_; 2386 HInstruction* next_; 2387 2388 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator); 2389 }; 2390 2391 // Iterates over the instructions without saving the next instruction, 2392 // therefore handling changes in the graph potentially made by the user 2393 // of this iterator. 2394 class HInstructionIteratorHandleChanges : public ValueObject { 2395 public: 2396 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions) 2397 : instruction_(instructions.first_instruction_) { 2398 } 2399 2400 bool Done() const { return instruction_ == nullptr; } 2401 HInstruction* Current() const { return instruction_; } 2402 void Advance() { 2403 instruction_ = instruction_->GetNext(); 2404 } 2405 2406 private: 2407 HInstruction* instruction_; 2408 2409 DISALLOW_COPY_AND_ASSIGN(HInstructionIteratorHandleChanges); 2410 }; 2411 2412 2413 class HBackwardInstructionIterator : public ValueObject { 2414 public: 2415 explicit HBackwardInstructionIterator(const HInstructionList& instructions) 2416 : instruction_(instructions.last_instruction_) { 2417 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2418 } 2419 2420 bool Done() const { return instruction_ == nullptr; } 2421 HInstruction* Current() const { return instruction_; } 2422 void Advance() { 2423 instruction_ = next_; 2424 next_ = Done() ? nullptr : instruction_->GetPrevious(); 2425 } 2426 2427 private: 2428 HInstruction* instruction_; 2429 HInstruction* next_; 2430 2431 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator); 2432 }; 2433 2434 class HVariableInputSizeInstruction : public HInstruction { 2435 public: 2436 using HInstruction::GetInputRecords; // Keep the const version visible. 2437 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE { 2438 return ArrayRef<HUserRecord<HInstruction*>>(inputs_); 2439 } 2440 2441 void AddInput(HInstruction* input); 2442 void InsertInputAt(size_t index, HInstruction* input); 2443 void RemoveInputAt(size_t index); 2444 2445 // Removes all the inputs. 2446 // Also removes this instructions from each input's use list 2447 // (for non-environment uses only). 2448 void RemoveAllInputs(); 2449 2450 protected: 2451 HVariableInputSizeInstruction(SideEffects side_effects, 2452 uint32_t dex_pc, 2453 ArenaAllocator* arena, 2454 size_t number_of_inputs, 2455 ArenaAllocKind kind) 2456 : HInstruction(side_effects, dex_pc), 2457 inputs_(number_of_inputs, arena->Adapter(kind)) {} 2458 2459 ArenaVector<HUserRecord<HInstruction*>> inputs_; 2460 2461 private: 2462 DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction); 2463 }; 2464 2465 template<size_t N> 2466 class HTemplateInstruction: public HInstruction { 2467 public: 2468 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc) 2469 : HInstruction(side_effects, dex_pc), inputs_() {} 2470 virtual ~HTemplateInstruction() {} 2471 2472 using HInstruction::GetInputRecords; // Keep the const version visible. 2473 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL { 2474 return ArrayRef<HUserRecord<HInstruction*>>(inputs_); 2475 } 2476 2477 private: 2478 std::array<HUserRecord<HInstruction*>, N> inputs_; 2479 2480 friend class SsaBuilder; 2481 }; 2482 2483 // HTemplateInstruction specialization for N=0. 2484 template<> 2485 class HTemplateInstruction<0>: public HInstruction { 2486 public: 2487 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc) 2488 : HInstruction(side_effects, dex_pc) {} 2489 2490 virtual ~HTemplateInstruction() {} 2491 2492 using HInstruction::GetInputRecords; // Keep the const version visible. 2493 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL { 2494 return ArrayRef<HUserRecord<HInstruction*>>(); 2495 } 2496 2497 private: 2498 friend class SsaBuilder; 2499 }; 2500 2501 template<intptr_t N> 2502 class HExpression : public HTemplateInstruction<N> { 2503 public: 2504 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) 2505 : HTemplateInstruction<N>(side_effects, dex_pc) { 2506 this->template SetPackedField<TypeField>(type); 2507 } 2508 virtual ~HExpression() {} 2509 2510 Primitive::Type GetType() const OVERRIDE { 2511 return TypeField::Decode(this->GetPackedFields()); 2512 } 2513 2514 protected: 2515 static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits; 2516 static constexpr size_t kFieldTypeSize = 2517 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 2518 static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize; 2519 static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits, 2520 "Too many packed fields."); 2521 using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>; 2522 }; 2523 2524 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow 2525 // instruction that branches to the exit block. 2526 class HReturnVoid FINAL : public HTemplateInstruction<0> { 2527 public: 2528 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc) 2529 : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2530 2531 bool IsControlFlow() const OVERRIDE { return true; } 2532 2533 DECLARE_INSTRUCTION(ReturnVoid); 2534 2535 private: 2536 DISALLOW_COPY_AND_ASSIGN(HReturnVoid); 2537 }; 2538 2539 // Represents dex's RETURN opcodes. A HReturn is a control flow 2540 // instruction that branches to the exit block. 2541 class HReturn FINAL : public HTemplateInstruction<1> { 2542 public: 2543 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc) 2544 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2545 SetRawInputAt(0, value); 2546 } 2547 2548 bool IsControlFlow() const OVERRIDE { return true; } 2549 2550 DECLARE_INSTRUCTION(Return); 2551 2552 private: 2553 DISALLOW_COPY_AND_ASSIGN(HReturn); 2554 }; 2555 2556 class HPhi FINAL : public HVariableInputSizeInstruction { 2557 public: 2558 HPhi(ArenaAllocator* arena, 2559 uint32_t reg_number, 2560 size_t number_of_inputs, 2561 Primitive::Type type, 2562 uint32_t dex_pc = kNoDexPc) 2563 : HVariableInputSizeInstruction( 2564 SideEffects::None(), 2565 dex_pc, 2566 arena, 2567 number_of_inputs, 2568 kArenaAllocPhiInputs), 2569 reg_number_(reg_number) { 2570 SetPackedField<TypeField>(ToPhiType(type)); 2571 DCHECK_NE(GetType(), Primitive::kPrimVoid); 2572 // Phis are constructed live and marked dead if conflicting or unused. 2573 // Individual steps of SsaBuilder should assume that if a phi has been 2574 // marked dead, it can be ignored and will be removed by SsaPhiElimination. 2575 SetPackedFlag<kFlagIsLive>(true); 2576 SetPackedFlag<kFlagCanBeNull>(true); 2577 } 2578 2579 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. 2580 static Primitive::Type ToPhiType(Primitive::Type type) { 2581 return Primitive::PrimitiveKind(type); 2582 } 2583 2584 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); } 2585 2586 Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); } 2587 void SetType(Primitive::Type new_type) { 2588 // Make sure that only valid type changes occur. The following are allowed: 2589 // (1) int -> float/ref (primitive type propagation), 2590 // (2) long -> double (primitive type propagation). 2591 DCHECK(GetType() == new_type || 2592 (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || 2593 (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || 2594 (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); 2595 SetPackedField<TypeField>(new_type); 2596 } 2597 2598 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } 2599 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); } 2600 2601 uint32_t GetRegNumber() const { return reg_number_; } 2602 2603 void SetDead() { SetPackedFlag<kFlagIsLive>(false); } 2604 void SetLive() { SetPackedFlag<kFlagIsLive>(true); } 2605 bool IsDead() const { return !IsLive(); } 2606 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); } 2607 2608 bool IsVRegEquivalentOf(const HInstruction* other) const { 2609 return other != nullptr 2610 && other->IsPhi() 2611 && other->AsPhi()->GetBlock() == GetBlock() 2612 && other->AsPhi()->GetRegNumber() == GetRegNumber(); 2613 } 2614 2615 bool HasEquivalentPhi() const { 2616 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) { 2617 return true; 2618 } 2619 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) { 2620 return true; 2621 } 2622 return false; 2623 } 2624 2625 // Returns the next equivalent phi (starting from the current one) or null if there is none. 2626 // An equivalent phi is a phi having the same dex register and type. 2627 // It assumes that phis with the same dex register are adjacent. 2628 HPhi* GetNextEquivalentPhiWithSameType() { 2629 HInstruction* next = GetNext(); 2630 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) { 2631 if (next->GetType() == GetType()) { 2632 return next->AsPhi(); 2633 } 2634 next = next->GetNext(); 2635 } 2636 return nullptr; 2637 } 2638 2639 DECLARE_INSTRUCTION(Phi); 2640 2641 private: 2642 static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits; 2643 static constexpr size_t kFieldTypeSize = 2644 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 2645 static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize; 2646 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1; 2647 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1; 2648 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 2649 using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>; 2650 2651 const uint32_t reg_number_; 2652 2653 DISALLOW_COPY_AND_ASSIGN(HPhi); 2654 }; 2655 2656 // The exit instruction is the only instruction of the exit block. 2657 // Instructions aborting the method (HThrow and HReturn) must branch to the 2658 // exit block. 2659 class HExit FINAL : public HTemplateInstruction<0> { 2660 public: 2661 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2662 2663 bool IsControlFlow() const OVERRIDE { return true; } 2664 2665 DECLARE_INSTRUCTION(Exit); 2666 2667 private: 2668 DISALLOW_COPY_AND_ASSIGN(HExit); 2669 }; 2670 2671 // Jumps from one block to another. 2672 class HGoto FINAL : public HTemplateInstruction<0> { 2673 public: 2674 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {} 2675 2676 bool IsControlFlow() const OVERRIDE { return true; } 2677 2678 HBasicBlock* GetSuccessor() const { 2679 return GetBlock()->GetSingleSuccessor(); 2680 } 2681 2682 DECLARE_INSTRUCTION(Goto); 2683 2684 private: 2685 DISALLOW_COPY_AND_ASSIGN(HGoto); 2686 }; 2687 2688 class HConstant : public HExpression<0> { 2689 public: 2690 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 2691 : HExpression(type, SideEffects::None(), dex_pc) {} 2692 2693 bool CanBeMoved() const OVERRIDE { return true; } 2694 2695 // Is this constant -1 in the arithmetic sense? 2696 virtual bool IsMinusOne() const { return false; } 2697 // Is this constant 0 in the arithmetic sense? 2698 virtual bool IsArithmeticZero() const { return false; } 2699 // Is this constant a 0-bit pattern? 2700 virtual bool IsZeroBitPattern() const { return false; } 2701 // Is this constant 1 in the arithmetic sense? 2702 virtual bool IsOne() const { return false; } 2703 2704 virtual uint64_t GetValueAsUint64() const = 0; 2705 2706 DECLARE_ABSTRACT_INSTRUCTION(Constant); 2707 2708 private: 2709 DISALLOW_COPY_AND_ASSIGN(HConstant); 2710 }; 2711 2712 class HNullConstant FINAL : public HConstant { 2713 public: 2714 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 2715 return true; 2716 } 2717 2718 uint64_t GetValueAsUint64() const OVERRIDE { return 0; } 2719 2720 size_t ComputeHashCode() const OVERRIDE { return 0; } 2721 2722 // The null constant representation is a 0-bit pattern. 2723 virtual bool IsZeroBitPattern() const { return true; } 2724 2725 DECLARE_INSTRUCTION(NullConstant); 2726 2727 private: 2728 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {} 2729 2730 friend class HGraph; 2731 DISALLOW_COPY_AND_ASSIGN(HNullConstant); 2732 }; 2733 2734 // Constants of the type int. Those can be from Dex instructions, or 2735 // synthesized (for example with the if-eqz instruction). 2736 class HIntConstant FINAL : public HConstant { 2737 public: 2738 int32_t GetValue() const { return value_; } 2739 2740 uint64_t GetValueAsUint64() const OVERRIDE { 2741 return static_cast<uint64_t>(static_cast<uint32_t>(value_)); 2742 } 2743 2744 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 2745 DCHECK(other->IsIntConstant()) << other->DebugName(); 2746 return other->AsIntConstant()->value_ == value_; 2747 } 2748 2749 size_t ComputeHashCode() const OVERRIDE { return GetValue(); } 2750 2751 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2752 bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; } 2753 bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; } 2754 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2755 2756 // Integer constants are used to encode Boolean values as well, 2757 // where 1 means true and 0 means false. 2758 bool IsTrue() const { return GetValue() == 1; } 2759 bool IsFalse() const { return GetValue() == 0; } 2760 2761 DECLARE_INSTRUCTION(IntConstant); 2762 2763 private: 2764 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2765 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {} 2766 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc) 2767 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {} 2768 2769 const int32_t value_; 2770 2771 friend class HGraph; 2772 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore); 2773 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast); 2774 DISALLOW_COPY_AND_ASSIGN(HIntConstant); 2775 }; 2776 2777 class HLongConstant FINAL : public HConstant { 2778 public: 2779 int64_t GetValue() const { return value_; } 2780 2781 uint64_t GetValueAsUint64() const OVERRIDE { return value_; } 2782 2783 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 2784 DCHECK(other->IsLongConstant()) << other->DebugName(); 2785 return other->AsLongConstant()->value_ == value_; 2786 } 2787 2788 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2789 2790 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; } 2791 bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; } 2792 bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; } 2793 bool IsOne() const OVERRIDE { return GetValue() == 1; } 2794 2795 DECLARE_INSTRUCTION(LongConstant); 2796 2797 private: 2798 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2799 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {} 2800 2801 const int64_t value_; 2802 2803 friend class HGraph; 2804 DISALLOW_COPY_AND_ASSIGN(HLongConstant); 2805 }; 2806 2807 class HFloatConstant FINAL : public HConstant { 2808 public: 2809 float GetValue() const { return value_; } 2810 2811 uint64_t GetValueAsUint64() const OVERRIDE { 2812 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_)); 2813 } 2814 2815 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 2816 DCHECK(other->IsFloatConstant()) << other->DebugName(); 2817 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64(); 2818 } 2819 2820 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2821 2822 bool IsMinusOne() const OVERRIDE { 2823 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f)); 2824 } 2825 bool IsArithmeticZero() const OVERRIDE { 2826 return std::fpclassify(value_) == FP_ZERO; 2827 } 2828 bool IsArithmeticPositiveZero() const { 2829 return IsArithmeticZero() && !std::signbit(value_); 2830 } 2831 bool IsArithmeticNegativeZero() const { 2832 return IsArithmeticZero() && std::signbit(value_); 2833 } 2834 bool IsZeroBitPattern() const OVERRIDE { 2835 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f); 2836 } 2837 bool IsOne() const OVERRIDE { 2838 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f); 2839 } 2840 bool IsNaN() const { 2841 return std::isnan(value_); 2842 } 2843 2844 DECLARE_INSTRUCTION(FloatConstant); 2845 2846 private: 2847 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc) 2848 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {} 2849 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc) 2850 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {} 2851 2852 const float value_; 2853 2854 // Only the SsaBuilder and HGraph can create floating-point constants. 2855 friend class SsaBuilder; 2856 friend class HGraph; 2857 DISALLOW_COPY_AND_ASSIGN(HFloatConstant); 2858 }; 2859 2860 class HDoubleConstant FINAL : public HConstant { 2861 public: 2862 double GetValue() const { return value_; } 2863 2864 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); } 2865 2866 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 2867 DCHECK(other->IsDoubleConstant()) << other->DebugName(); 2868 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64(); 2869 } 2870 2871 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); } 2872 2873 bool IsMinusOne() const OVERRIDE { 2874 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0)); 2875 } 2876 bool IsArithmeticZero() const OVERRIDE { 2877 return std::fpclassify(value_) == FP_ZERO; 2878 } 2879 bool IsArithmeticPositiveZero() const { 2880 return IsArithmeticZero() && !std::signbit(value_); 2881 } 2882 bool IsArithmeticNegativeZero() const { 2883 return IsArithmeticZero() && std::signbit(value_); 2884 } 2885 bool IsZeroBitPattern() const OVERRIDE { 2886 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0)); 2887 } 2888 bool IsOne() const OVERRIDE { 2889 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0); 2890 } 2891 bool IsNaN() const { 2892 return std::isnan(value_); 2893 } 2894 2895 DECLARE_INSTRUCTION(DoubleConstant); 2896 2897 private: 2898 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) 2899 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {} 2900 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc) 2901 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {} 2902 2903 const double value_; 2904 2905 // Only the SsaBuilder and HGraph can create floating-point constants. 2906 friend class SsaBuilder; 2907 friend class HGraph; 2908 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant); 2909 }; 2910 2911 // Conditional branch. A block ending with an HIf instruction must have 2912 // two successors. 2913 class HIf FINAL : public HTemplateInstruction<1> { 2914 public: 2915 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc) 2916 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2917 SetRawInputAt(0, input); 2918 } 2919 2920 bool IsControlFlow() const OVERRIDE { return true; } 2921 2922 HBasicBlock* IfTrueSuccessor() const { 2923 return GetBlock()->GetSuccessors()[0]; 2924 } 2925 2926 HBasicBlock* IfFalseSuccessor() const { 2927 return GetBlock()->GetSuccessors()[1]; 2928 } 2929 2930 DECLARE_INSTRUCTION(If); 2931 2932 private: 2933 DISALLOW_COPY_AND_ASSIGN(HIf); 2934 }; 2935 2936 2937 // Abstract instruction which marks the beginning and/or end of a try block and 2938 // links it to the respective exception handlers. Behaves the same as a Goto in 2939 // non-exceptional control flow. 2940 // Normal-flow successor is stored at index zero, exception handlers under 2941 // higher indices in no particular order. 2942 class HTryBoundary FINAL : public HTemplateInstruction<0> { 2943 public: 2944 enum class BoundaryKind { 2945 kEntry, 2946 kExit, 2947 kLast = kExit 2948 }; 2949 2950 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) 2951 : HTemplateInstruction(SideEffects::None(), dex_pc) { 2952 SetPackedField<BoundaryKindField>(kind); 2953 } 2954 2955 bool IsControlFlow() const OVERRIDE { return true; } 2956 2957 // Returns the block's non-exceptional successor (index zero). 2958 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; } 2959 2960 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const { 2961 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u); 2962 } 2963 2964 // Returns whether `handler` is among its exception handlers (non-zero index 2965 // successors). 2966 bool HasExceptionHandler(const HBasicBlock& handler) const { 2967 DCHECK(handler.IsCatchBlock()); 2968 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */); 2969 } 2970 2971 // If not present already, adds `handler` to its block's list of exception 2972 // handlers. 2973 void AddExceptionHandler(HBasicBlock* handler) { 2974 if (!HasExceptionHandler(*handler)) { 2975 GetBlock()->AddSuccessor(handler); 2976 } 2977 } 2978 2979 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); } 2980 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; } 2981 2982 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; 2983 2984 DECLARE_INSTRUCTION(TryBoundary); 2985 2986 private: 2987 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits; 2988 static constexpr size_t kFieldBoundaryKindSize = 2989 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast)); 2990 static constexpr size_t kNumberOfTryBoundaryPackedBits = 2991 kFieldBoundaryKind + kFieldBoundaryKindSize; 2992 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits, 2993 "Too many packed fields."); 2994 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>; 2995 2996 DISALLOW_COPY_AND_ASSIGN(HTryBoundary); 2997 }; 2998 2999 // Deoptimize to interpreter, upon checking a condition. 3000 class HDeoptimize FINAL : public HVariableInputSizeInstruction { 3001 public: 3002 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move 3003 // across. 3004 HDeoptimize(ArenaAllocator* arena, HInstruction* cond, DeoptimizationKind kind, uint32_t dex_pc) 3005 : HVariableInputSizeInstruction( 3006 SideEffects::All(), 3007 dex_pc, 3008 arena, 3009 /* number_of_inputs */ 1, 3010 kArenaAllocMisc) { 3011 SetPackedFlag<kFieldCanBeMoved>(false); 3012 SetPackedField<DeoptimizeKindField>(kind); 3013 SetRawInputAt(0, cond); 3014 } 3015 3016 // Use this constructor when the `HDeoptimize` guards an instruction, and any user 3017 // that relies on the deoptimization to pass should have its input be the `HDeoptimize` 3018 // instead of `guard`. 3019 // We set CanTriggerGC to prevent any intermediate address to be live 3020 // at the point of the `HDeoptimize`. 3021 HDeoptimize(ArenaAllocator* arena, 3022 HInstruction* cond, 3023 HInstruction* guard, 3024 DeoptimizationKind kind, 3025 uint32_t dex_pc) 3026 : HVariableInputSizeInstruction( 3027 SideEffects::CanTriggerGC(), 3028 dex_pc, 3029 arena, 3030 /* number_of_inputs */ 2, 3031 kArenaAllocMisc) { 3032 SetPackedFlag<kFieldCanBeMoved>(true); 3033 SetPackedField<DeoptimizeKindField>(kind); 3034 SetRawInputAt(0, cond); 3035 SetRawInputAt(1, guard); 3036 } 3037 3038 bool CanBeMoved() const OVERRIDE { return GetPackedFlag<kFieldCanBeMoved>(); } 3039 3040 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 3041 return (other->CanBeMoved() == CanBeMoved()) && (other->AsDeoptimize()->GetKind() == GetKind()); 3042 } 3043 3044 bool NeedsEnvironment() const OVERRIDE { return true; } 3045 3046 bool CanThrow() const OVERRIDE { return true; } 3047 3048 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); } 3049 3050 Primitive::Type GetType() const OVERRIDE { 3051 return GuardsAnInput() ? GuardedInput()->GetType() : Primitive::kPrimVoid; 3052 } 3053 3054 bool GuardsAnInput() const { 3055 return InputCount() == 2; 3056 } 3057 3058 HInstruction* GuardedInput() const { 3059 DCHECK(GuardsAnInput()); 3060 return InputAt(1); 3061 } 3062 3063 void RemoveGuard() { 3064 RemoveInputAt(1); 3065 } 3066 3067 DECLARE_INSTRUCTION(Deoptimize); 3068 3069 private: 3070 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits; 3071 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1; 3072 static constexpr size_t kFieldDeoptimizeKindSize = 3073 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast)); 3074 static constexpr size_t kNumberOfDeoptimizePackedBits = 3075 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize; 3076 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits, 3077 "Too many packed fields."); 3078 using DeoptimizeKindField = 3079 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>; 3080 3081 DISALLOW_COPY_AND_ASSIGN(HDeoptimize); 3082 }; 3083 3084 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization. 3085 // The compiled code checks this flag value in a guard before devirtualized call and 3086 // if it's true, starts to do deoptimization. 3087 // It has a 4-byte slot on stack. 3088 // TODO: allocate a register for this flag. 3089 class HShouldDeoptimizeFlag FINAL : public HVariableInputSizeInstruction { 3090 public: 3091 // CHA guards are only optimized in a separate pass and it has no side effects 3092 // with regard to other passes. 3093 HShouldDeoptimizeFlag(ArenaAllocator* arena, uint32_t dex_pc) 3094 : HVariableInputSizeInstruction(SideEffects::None(), dex_pc, arena, 0, kArenaAllocCHA) { 3095 } 3096 3097 Primitive::Type GetType() const OVERRIDE { return Primitive::kPrimInt; } 3098 3099 // We do all CHA guard elimination/motion in a single pass, after which there is no 3100 // further guard elimination/motion since a guard might have been used for justification 3101 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved 3102 // to avoid other optimizations trying to move it. 3103 bool CanBeMoved() const OVERRIDE { return false; } 3104 3105 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag); 3106 3107 private: 3108 DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag); 3109 }; 3110 3111 // Represents the ArtMethod that was passed as a first argument to 3112 // the method. It is used by instructions that depend on it, like 3113 // instructions that work with the dex cache. 3114 class HCurrentMethod FINAL : public HExpression<0> { 3115 public: 3116 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc) 3117 : HExpression(type, SideEffects::None(), dex_pc) {} 3118 3119 DECLARE_INSTRUCTION(CurrentMethod); 3120 3121 private: 3122 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod); 3123 }; 3124 3125 // Fetches an ArtMethod from the virtual table or the interface method table 3126 // of a class. 3127 class HClassTableGet FINAL : public HExpression<1> { 3128 public: 3129 enum class TableKind { 3130 kVTable, 3131 kIMTable, 3132 kLast = kIMTable 3133 }; 3134 HClassTableGet(HInstruction* cls, 3135 Primitive::Type type, 3136 TableKind kind, 3137 size_t index, 3138 uint32_t dex_pc) 3139 : HExpression(type, SideEffects::None(), dex_pc), 3140 index_(index) { 3141 SetPackedField<TableKindField>(kind); 3142 SetRawInputAt(0, cls); 3143 } 3144 3145 bool CanBeMoved() const OVERRIDE { return true; } 3146 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 3147 return other->AsClassTableGet()->GetIndex() == index_ && 3148 other->AsClassTableGet()->GetPackedFields() == GetPackedFields(); 3149 } 3150 3151 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); } 3152 size_t GetIndex() const { return index_; } 3153 3154 DECLARE_INSTRUCTION(ClassTableGet); 3155 3156 private: 3157 static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits; 3158 static constexpr size_t kFieldTableKindSize = 3159 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast)); 3160 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize; 3161 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits, 3162 "Too many packed fields."); 3163 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>; 3164 3165 // The index of the ArtMethod in the table. 3166 const size_t index_; 3167 3168 DISALLOW_COPY_AND_ASSIGN(HClassTableGet); 3169 }; 3170 3171 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will 3172 // have one successor for each entry in the switch table, and the final successor 3173 // will be the block containing the next Dex opcode. 3174 class HPackedSwitch FINAL : public HTemplateInstruction<1> { 3175 public: 3176 HPackedSwitch(int32_t start_value, 3177 uint32_t num_entries, 3178 HInstruction* input, 3179 uint32_t dex_pc = kNoDexPc) 3180 : HTemplateInstruction(SideEffects::None(), dex_pc), 3181 start_value_(start_value), 3182 num_entries_(num_entries) { 3183 SetRawInputAt(0, input); 3184 } 3185 3186 bool IsControlFlow() const OVERRIDE { return true; } 3187 3188 int32_t GetStartValue() const { return start_value_; } 3189 3190 uint32_t GetNumEntries() const { return num_entries_; } 3191 3192 HBasicBlock* GetDefaultBlock() const { 3193 // Last entry is the default block. 3194 return GetBlock()->GetSuccessors()[num_entries_]; 3195 } 3196 DECLARE_INSTRUCTION(PackedSwitch); 3197 3198 private: 3199 const int32_t start_value_; 3200 const uint32_t num_entries_; 3201 3202 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch); 3203 }; 3204 3205 class HUnaryOperation : public HExpression<1> { 3206 public: 3207 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 3208 : HExpression(result_type, SideEffects::None(), dex_pc) { 3209 SetRawInputAt(0, input); 3210 } 3211 3212 HInstruction* GetInput() const { return InputAt(0); } 3213 Primitive::Type GetResultType() const { return GetType(); } 3214 3215 bool CanBeMoved() const OVERRIDE { return true; } 3216 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3217 return true; 3218 } 3219 3220 // Try to statically evaluate `this` and return a HConstant 3221 // containing the result of this evaluation. If `this` cannot 3222 // be evaluated as a constant, return null. 3223 HConstant* TryStaticEvaluation() const; 3224 3225 // Apply this operation to `x`. 3226 virtual HConstant* Evaluate(HIntConstant* x) const = 0; 3227 virtual HConstant* Evaluate(HLongConstant* x) const = 0; 3228 virtual HConstant* Evaluate(HFloatConstant* x) const = 0; 3229 virtual HConstant* Evaluate(HDoubleConstant* x) const = 0; 3230 3231 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation); 3232 3233 private: 3234 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation); 3235 }; 3236 3237 class HBinaryOperation : public HExpression<2> { 3238 public: 3239 HBinaryOperation(Primitive::Type result_type, 3240 HInstruction* left, 3241 HInstruction* right, 3242 SideEffects side_effects = SideEffects::None(), 3243 uint32_t dex_pc = kNoDexPc) 3244 : HExpression(result_type, side_effects, dex_pc) { 3245 SetRawInputAt(0, left); 3246 SetRawInputAt(1, right); 3247 } 3248 3249 HInstruction* GetLeft() const { return InputAt(0); } 3250 HInstruction* GetRight() const { return InputAt(1); } 3251 Primitive::Type GetResultType() const { return GetType(); } 3252 3253 virtual bool IsCommutative() const { return false; } 3254 3255 // Put constant on the right. 3256 // Returns whether order is changed. 3257 bool OrderInputsWithConstantOnTheRight() { 3258 HInstruction* left = InputAt(0); 3259 HInstruction* right = InputAt(1); 3260 if (left->IsConstant() && !right->IsConstant()) { 3261 ReplaceInput(right, 0); 3262 ReplaceInput(left, 1); 3263 return true; 3264 } 3265 return false; 3266 } 3267 3268 // Order inputs by instruction id, but favor constant on the right side. 3269 // This helps GVN for commutative ops. 3270 void OrderInputs() { 3271 DCHECK(IsCommutative()); 3272 HInstruction* left = InputAt(0); 3273 HInstruction* right = InputAt(1); 3274 if (left == right || (!left->IsConstant() && right->IsConstant())) { 3275 return; 3276 } 3277 if (OrderInputsWithConstantOnTheRight()) { 3278 return; 3279 } 3280 // Order according to instruction id. 3281 if (left->GetId() > right->GetId()) { 3282 ReplaceInput(right, 0); 3283 ReplaceInput(left, 1); 3284 } 3285 } 3286 3287 bool CanBeMoved() const OVERRIDE { return true; } 3288 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 3289 return true; 3290 } 3291 3292 // Try to statically evaluate `this` and return a HConstant 3293 // containing the result of this evaluation. If `this` cannot 3294 // be evaluated as a constant, return null. 3295 HConstant* TryStaticEvaluation() const; 3296 3297 // Apply this operation to `x` and `y`. 3298 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 3299 HNullConstant* y ATTRIBUTE_UNUSED) const { 3300 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case."; 3301 UNREACHABLE(); 3302 } 3303 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; 3304 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; 3305 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, 3306 HIntConstant* y ATTRIBUTE_UNUSED) const { 3307 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case."; 3308 UNREACHABLE(); 3309 } 3310 virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0; 3311 virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0; 3312 3313 // Returns an input that can legally be used as the right input and is 3314 // constant, or null. 3315 HConstant* GetConstantRight() const; 3316 3317 // If `GetConstantRight()` returns one of the input, this returns the other 3318 // one. Otherwise it returns null. 3319 HInstruction* GetLeastConstantLeft() const; 3320 3321 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation); 3322 3323 private: 3324 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation); 3325 }; 3326 3327 // The comparison bias applies for floating point operations and indicates how NaN 3328 // comparisons are treated: 3329 enum class ComparisonBias { 3330 kNoBias, // bias is not applicable (i.e. for long operation) 3331 kGtBias, // return 1 for NaN comparisons 3332 kLtBias, // return -1 for NaN comparisons 3333 kLast = kLtBias 3334 }; 3335 3336 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs); 3337 3338 class HCondition : public HBinaryOperation { 3339 public: 3340 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3341 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) { 3342 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias); 3343 } 3344 3345 // For code generation purposes, returns whether this instruction is just before 3346 // `instruction`, and disregard moves in between. 3347 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const; 3348 3349 DECLARE_ABSTRACT_INSTRUCTION(Condition); 3350 3351 virtual IfCondition GetCondition() const = 0; 3352 3353 virtual IfCondition GetOppositeCondition() const = 0; 3354 3355 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; } 3356 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; } 3357 3358 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); } 3359 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); } 3360 3361 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 3362 return GetPackedFields() == other->AsCondition()->GetPackedFields(); 3363 } 3364 3365 bool IsFPConditionTrueIfNaN() const { 3366 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); 3367 IfCondition if_cond = GetCondition(); 3368 if (if_cond == kCondNE) { 3369 return true; 3370 } else if (if_cond == kCondEQ) { 3371 return false; 3372 } 3373 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias(); 3374 } 3375 3376 bool IsFPConditionFalseIfNaN() const { 3377 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); 3378 IfCondition if_cond = GetCondition(); 3379 if (if_cond == kCondEQ) { 3380 return true; 3381 } else if (if_cond == kCondNE) { 3382 return false; 3383 } 3384 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias(); 3385 } 3386 3387 protected: 3388 // Needed if we merge a HCompare into a HCondition. 3389 static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits; 3390 static constexpr size_t kFieldComparisonBiasSize = 3391 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast)); 3392 static constexpr size_t kNumberOfConditionPackedBits = 3393 kFieldComparisonBias + kFieldComparisonBiasSize; 3394 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 3395 using ComparisonBiasField = 3396 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>; 3397 3398 template <typename T> 3399 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); } 3400 3401 template <typename T> 3402 int32_t CompareFP(T x, T y) const { 3403 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); 3404 DCHECK_NE(GetBias(), ComparisonBias::kNoBias); 3405 // Handle the bias. 3406 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y); 3407 } 3408 3409 // Return an integer constant containing the result of a condition evaluated at compile time. 3410 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const { 3411 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc); 3412 } 3413 3414 private: 3415 DISALLOW_COPY_AND_ASSIGN(HCondition); 3416 }; 3417 3418 // Instruction to check if two inputs are equal to each other. 3419 class HEqual FINAL : public HCondition { 3420 public: 3421 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3422 : HCondition(first, second, dex_pc) {} 3423 3424 bool IsCommutative() const OVERRIDE { return true; } 3425 3426 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 3427 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3428 return MakeConstantCondition(true, GetDexPc()); 3429 } 3430 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3431 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3432 } 3433 // In the following Evaluate methods, a HCompare instruction has 3434 // been merged into this HEqual instruction; evaluate it as 3435 // `Compare(x, y) == 0`. 3436 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3437 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), 3438 GetDexPc()); 3439 } 3440 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3441 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3442 } 3443 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3444 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3445 } 3446 3447 DECLARE_INSTRUCTION(Equal); 3448 3449 IfCondition GetCondition() const OVERRIDE { 3450 return kCondEQ; 3451 } 3452 3453 IfCondition GetOppositeCondition() const OVERRIDE { 3454 return kCondNE; 3455 } 3456 3457 private: 3458 template <typename T> static bool Compute(T x, T y) { return x == y; } 3459 3460 DISALLOW_COPY_AND_ASSIGN(HEqual); 3461 }; 3462 3463 class HNotEqual FINAL : public HCondition { 3464 public: 3465 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3466 : HCondition(first, second, dex_pc) {} 3467 3468 bool IsCommutative() const OVERRIDE { return true; } 3469 3470 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, 3471 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3472 return MakeConstantCondition(false, GetDexPc()); 3473 } 3474 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3475 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3476 } 3477 // In the following Evaluate methods, a HCompare instruction has 3478 // been merged into this HNotEqual instruction; evaluate it as 3479 // `Compare(x, y) != 0`. 3480 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3481 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3482 } 3483 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3484 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3485 } 3486 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3487 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3488 } 3489 3490 DECLARE_INSTRUCTION(NotEqual); 3491 3492 IfCondition GetCondition() const OVERRIDE { 3493 return kCondNE; 3494 } 3495 3496 IfCondition GetOppositeCondition() const OVERRIDE { 3497 return kCondEQ; 3498 } 3499 3500 private: 3501 template <typename T> static bool Compute(T x, T y) { return x != y; } 3502 3503 DISALLOW_COPY_AND_ASSIGN(HNotEqual); 3504 }; 3505 3506 class HLessThan FINAL : public HCondition { 3507 public: 3508 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3509 : HCondition(first, second, dex_pc) {} 3510 3511 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3512 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3513 } 3514 // In the following Evaluate methods, a HCompare instruction has 3515 // been merged into this HLessThan instruction; evaluate it as 3516 // `Compare(x, y) < 0`. 3517 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3518 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3519 } 3520 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3521 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3522 } 3523 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3524 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3525 } 3526 3527 DECLARE_INSTRUCTION(LessThan); 3528 3529 IfCondition GetCondition() const OVERRIDE { 3530 return kCondLT; 3531 } 3532 3533 IfCondition GetOppositeCondition() const OVERRIDE { 3534 return kCondGE; 3535 } 3536 3537 private: 3538 template <typename T> static bool Compute(T x, T y) { return x < y; } 3539 3540 DISALLOW_COPY_AND_ASSIGN(HLessThan); 3541 }; 3542 3543 class HLessThanOrEqual FINAL : public HCondition { 3544 public: 3545 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3546 : HCondition(first, second, dex_pc) {} 3547 3548 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3549 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3550 } 3551 // In the following Evaluate methods, a HCompare instruction has 3552 // been merged into this HLessThanOrEqual instruction; evaluate it as 3553 // `Compare(x, y) <= 0`. 3554 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3555 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3556 } 3557 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3558 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3559 } 3560 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3561 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3562 } 3563 3564 DECLARE_INSTRUCTION(LessThanOrEqual); 3565 3566 IfCondition GetCondition() const OVERRIDE { 3567 return kCondLE; 3568 } 3569 3570 IfCondition GetOppositeCondition() const OVERRIDE { 3571 return kCondGT; 3572 } 3573 3574 private: 3575 template <typename T> static bool Compute(T x, T y) { return x <= y; } 3576 3577 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual); 3578 }; 3579 3580 class HGreaterThan FINAL : public HCondition { 3581 public: 3582 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3583 : HCondition(first, second, dex_pc) {} 3584 3585 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3586 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3587 } 3588 // In the following Evaluate methods, a HCompare instruction has 3589 // been merged into this HGreaterThan instruction; evaluate it as 3590 // `Compare(x, y) > 0`. 3591 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3592 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3593 } 3594 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3595 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3596 } 3597 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3598 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3599 } 3600 3601 DECLARE_INSTRUCTION(GreaterThan); 3602 3603 IfCondition GetCondition() const OVERRIDE { 3604 return kCondGT; 3605 } 3606 3607 IfCondition GetOppositeCondition() const OVERRIDE { 3608 return kCondLE; 3609 } 3610 3611 private: 3612 template <typename T> static bool Compute(T x, T y) { return x > y; } 3613 3614 DISALLOW_COPY_AND_ASSIGN(HGreaterThan); 3615 }; 3616 3617 class HGreaterThanOrEqual FINAL : public HCondition { 3618 public: 3619 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3620 : HCondition(first, second, dex_pc) {} 3621 3622 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3623 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3624 } 3625 // In the following Evaluate methods, a HCompare instruction has 3626 // been merged into this HGreaterThanOrEqual instruction; evaluate it as 3627 // `Compare(x, y) >= 0`. 3628 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3629 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3630 } 3631 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3632 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3633 } 3634 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3635 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc()); 3636 } 3637 3638 DECLARE_INSTRUCTION(GreaterThanOrEqual); 3639 3640 IfCondition GetCondition() const OVERRIDE { 3641 return kCondGE; 3642 } 3643 3644 IfCondition GetOppositeCondition() const OVERRIDE { 3645 return kCondLT; 3646 } 3647 3648 private: 3649 template <typename T> static bool Compute(T x, T y) { return x >= y; } 3650 3651 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual); 3652 }; 3653 3654 class HBelow FINAL : public HCondition { 3655 public: 3656 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3657 : HCondition(first, second, dex_pc) {} 3658 3659 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3660 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3661 } 3662 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3663 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3664 } 3665 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 3666 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3667 LOG(FATAL) << DebugName() << " is not defined for float values"; 3668 UNREACHABLE(); 3669 } 3670 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 3671 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3672 LOG(FATAL) << DebugName() << " is not defined for double values"; 3673 UNREACHABLE(); 3674 } 3675 3676 DECLARE_INSTRUCTION(Below); 3677 3678 IfCondition GetCondition() const OVERRIDE { 3679 return kCondB; 3680 } 3681 3682 IfCondition GetOppositeCondition() const OVERRIDE { 3683 return kCondAE; 3684 } 3685 3686 private: 3687 template <typename T> static bool Compute(T x, T y) { 3688 return MakeUnsigned(x) < MakeUnsigned(y); 3689 } 3690 3691 DISALLOW_COPY_AND_ASSIGN(HBelow); 3692 }; 3693 3694 class HBelowOrEqual FINAL : public HCondition { 3695 public: 3696 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3697 : HCondition(first, second, dex_pc) {} 3698 3699 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3700 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3701 } 3702 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3703 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3704 } 3705 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 3706 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3707 LOG(FATAL) << DebugName() << " is not defined for float values"; 3708 UNREACHABLE(); 3709 } 3710 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 3711 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3712 LOG(FATAL) << DebugName() << " is not defined for double values"; 3713 UNREACHABLE(); 3714 } 3715 3716 DECLARE_INSTRUCTION(BelowOrEqual); 3717 3718 IfCondition GetCondition() const OVERRIDE { 3719 return kCondBE; 3720 } 3721 3722 IfCondition GetOppositeCondition() const OVERRIDE { 3723 return kCondA; 3724 } 3725 3726 private: 3727 template <typename T> static bool Compute(T x, T y) { 3728 return MakeUnsigned(x) <= MakeUnsigned(y); 3729 } 3730 3731 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual); 3732 }; 3733 3734 class HAbove FINAL : public HCondition { 3735 public: 3736 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3737 : HCondition(first, second, dex_pc) {} 3738 3739 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3740 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3741 } 3742 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3743 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3744 } 3745 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 3746 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3747 LOG(FATAL) << DebugName() << " is not defined for float values"; 3748 UNREACHABLE(); 3749 } 3750 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 3751 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3752 LOG(FATAL) << DebugName() << " is not defined for double values"; 3753 UNREACHABLE(); 3754 } 3755 3756 DECLARE_INSTRUCTION(Above); 3757 3758 IfCondition GetCondition() const OVERRIDE { 3759 return kCondA; 3760 } 3761 3762 IfCondition GetOppositeCondition() const OVERRIDE { 3763 return kCondBE; 3764 } 3765 3766 private: 3767 template <typename T> static bool Compute(T x, T y) { 3768 return MakeUnsigned(x) > MakeUnsigned(y); 3769 } 3770 3771 DISALLOW_COPY_AND_ASSIGN(HAbove); 3772 }; 3773 3774 class HAboveOrEqual FINAL : public HCondition { 3775 public: 3776 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) 3777 : HCondition(first, second, dex_pc) {} 3778 3779 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3780 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3781 } 3782 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3783 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3784 } 3785 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 3786 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3787 LOG(FATAL) << DebugName() << " is not defined for float values"; 3788 UNREACHABLE(); 3789 } 3790 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 3791 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 3792 LOG(FATAL) << DebugName() << " is not defined for double values"; 3793 UNREACHABLE(); 3794 } 3795 3796 DECLARE_INSTRUCTION(AboveOrEqual); 3797 3798 IfCondition GetCondition() const OVERRIDE { 3799 return kCondAE; 3800 } 3801 3802 IfCondition GetOppositeCondition() const OVERRIDE { 3803 return kCondB; 3804 } 3805 3806 private: 3807 template <typename T> static bool Compute(T x, T y) { 3808 return MakeUnsigned(x) >= MakeUnsigned(y); 3809 } 3810 3811 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual); 3812 }; 3813 3814 // Instruction to check how two inputs compare to each other. 3815 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1. 3816 class HCompare FINAL : public HBinaryOperation { 3817 public: 3818 // Note that `comparison_type` is the type of comparison performed 3819 // between the comparison's inputs, not the type of the instantiated 3820 // HCompare instruction (which is always Primitive::kPrimInt). 3821 HCompare(Primitive::Type comparison_type, 3822 HInstruction* first, 3823 HInstruction* second, 3824 ComparisonBias bias, 3825 uint32_t dex_pc) 3826 : HBinaryOperation(Primitive::kPrimInt, 3827 first, 3828 second, 3829 SideEffectsForArchRuntimeCalls(comparison_type), 3830 dex_pc) { 3831 SetPackedField<ComparisonBiasField>(bias); 3832 DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType())); 3833 DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType())); 3834 } 3835 3836 template <typename T> 3837 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); } 3838 3839 template <typename T> 3840 int32_t ComputeFP(T x, T y) const { 3841 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); 3842 DCHECK_NE(GetBias(), ComparisonBias::kNoBias); 3843 // Handle the bias. 3844 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y); 3845 } 3846 3847 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 3848 // Note that there is no "cmp-int" Dex instruction so we shouldn't 3849 // reach this code path when processing a freshly built HIR 3850 // graph. However HCompare integer instructions can be synthesized 3851 // by the instruction simplifier to implement IntegerCompare and 3852 // IntegerSignum intrinsics, so we have to handle this case. 3853 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3854 } 3855 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 3856 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc()); 3857 } 3858 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 3859 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 3860 } 3861 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 3862 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 3863 } 3864 3865 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 3866 return GetPackedFields() == other->AsCompare()->GetPackedFields(); 3867 } 3868 3869 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); } 3870 3871 // Does this compare instruction have a "gt bias" (vs an "lt bias")? 3872 // Only meaningful for floating-point comparisons. 3873 bool IsGtBias() const { 3874 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); 3875 return GetBias() == ComparisonBias::kGtBias; 3876 } 3877 3878 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) { 3879 // Comparisons do not require a runtime call in any back end. 3880 return SideEffects::None(); 3881 } 3882 3883 DECLARE_INSTRUCTION(Compare); 3884 3885 protected: 3886 static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits; 3887 static constexpr size_t kFieldComparisonBiasSize = 3888 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast)); 3889 static constexpr size_t kNumberOfComparePackedBits = 3890 kFieldComparisonBias + kFieldComparisonBiasSize; 3891 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 3892 using ComparisonBiasField = 3893 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>; 3894 3895 // Return an integer constant containing the result of a comparison evaluated at compile time. 3896 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const { 3897 DCHECK(value == -1 || value == 0 || value == 1) << value; 3898 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc); 3899 } 3900 3901 private: 3902 DISALLOW_COPY_AND_ASSIGN(HCompare); 3903 }; 3904 3905 class HNewInstance FINAL : public HExpression<1> { 3906 public: 3907 HNewInstance(HInstruction* cls, 3908 uint32_t dex_pc, 3909 dex::TypeIndex type_index, 3910 const DexFile& dex_file, 3911 bool finalizable, 3912 QuickEntrypointEnum entrypoint) 3913 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), 3914 type_index_(type_index), 3915 dex_file_(dex_file), 3916 entrypoint_(entrypoint) { 3917 SetPackedFlag<kFlagFinalizable>(finalizable); 3918 SetRawInputAt(0, cls); 3919 } 3920 3921 dex::TypeIndex GetTypeIndex() const { return type_index_; } 3922 const DexFile& GetDexFile() const { return dex_file_; } 3923 3924 // Calls runtime so needs an environment. 3925 bool NeedsEnvironment() const OVERRIDE { return true; } 3926 3927 // Can throw errors when out-of-memory or if it's not instantiable/accessible. 3928 bool CanThrow() const OVERRIDE { return true; } 3929 3930 bool NeedsChecks() const { 3931 return entrypoint_ == kQuickAllocObjectWithChecks; 3932 } 3933 3934 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); } 3935 3936 bool CanBeNull() const OVERRIDE { return false; } 3937 3938 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; } 3939 3940 void SetEntrypoint(QuickEntrypointEnum entrypoint) { 3941 entrypoint_ = entrypoint; 3942 } 3943 3944 HLoadClass* GetLoadClass() const { 3945 HInstruction* input = InputAt(0); 3946 if (input->IsClinitCheck()) { 3947 input = input->InputAt(0); 3948 } 3949 DCHECK(input->IsLoadClass()); 3950 return input->AsLoadClass(); 3951 } 3952 3953 bool IsStringAlloc() const; 3954 3955 DECLARE_INSTRUCTION(NewInstance); 3956 3957 private: 3958 static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits; 3959 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1; 3960 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits, 3961 "Too many packed fields."); 3962 3963 const dex::TypeIndex type_index_; 3964 const DexFile& dex_file_; 3965 QuickEntrypointEnum entrypoint_; 3966 3967 DISALLOW_COPY_AND_ASSIGN(HNewInstance); 3968 }; 3969 3970 enum IntrinsicNeedsEnvironmentOrCache { 3971 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. 3972 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. 3973 }; 3974 3975 enum IntrinsicSideEffects { 3976 kNoSideEffects, // Intrinsic does not have any heap memory side effects. 3977 kReadSideEffects, // Intrinsic may read heap memory. 3978 kWriteSideEffects, // Intrinsic may write heap memory. 3979 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC. 3980 }; 3981 3982 enum IntrinsicExceptions { 3983 kNoThrow, // Intrinsic does not throw any exceptions. 3984 kCanThrow // Intrinsic may throw exceptions. 3985 }; 3986 3987 class HInvoke : public HVariableInputSizeInstruction { 3988 public: 3989 bool NeedsEnvironment() const OVERRIDE; 3990 3991 void SetArgumentAt(size_t index, HInstruction* argument) { 3992 SetRawInputAt(index, argument); 3993 } 3994 3995 // Return the number of arguments. This number can be lower than 3996 // the number of inputs returned by InputCount(), as some invoke 3997 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument 3998 // inputs at the end of their list of inputs. 3999 uint32_t GetNumberOfArguments() const { return number_of_arguments_; } 4000 4001 Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); } 4002 4003 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 4004 4005 InvokeType GetInvokeType() const { 4006 return GetPackedField<InvokeTypeField>(); 4007 } 4008 4009 Intrinsics GetIntrinsic() const { 4010 return intrinsic_; 4011 } 4012 4013 void SetIntrinsic(Intrinsics intrinsic, 4014 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 4015 IntrinsicSideEffects side_effects, 4016 IntrinsicExceptions exceptions); 4017 4018 bool IsFromInlinedInvoke() const { 4019 return GetEnvironment()->IsFromInlinedInvoke(); 4020 } 4021 4022 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); } 4023 4024 bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); } 4025 4026 bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); } 4027 4028 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 4029 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; 4030 } 4031 4032 uint32_t* GetIntrinsicOptimizations() { 4033 return &intrinsic_optimizations_; 4034 } 4035 4036 const uint32_t* GetIntrinsicOptimizations() const { 4037 return &intrinsic_optimizations_; 4038 } 4039 4040 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } 4041 4042 ArtMethod* GetResolvedMethod() const { return resolved_method_; } 4043 void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; } 4044 4045 DECLARE_ABSTRACT_INSTRUCTION(Invoke); 4046 4047 protected: 4048 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits; 4049 static constexpr size_t kFieldInvokeTypeSize = 4050 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType)); 4051 static constexpr size_t kFieldReturnType = 4052 kFieldInvokeType + kFieldInvokeTypeSize; 4053 static constexpr size_t kFieldReturnTypeSize = 4054 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 4055 static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize; 4056 static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1; 4057 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 4058 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>; 4059 using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>; 4060 4061 HInvoke(ArenaAllocator* arena, 4062 uint32_t number_of_arguments, 4063 uint32_t number_of_other_inputs, 4064 Primitive::Type return_type, 4065 uint32_t dex_pc, 4066 uint32_t dex_method_index, 4067 ArtMethod* resolved_method, 4068 InvokeType invoke_type) 4069 : HVariableInputSizeInstruction( 4070 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays. 4071 dex_pc, 4072 arena, 4073 number_of_arguments + number_of_other_inputs, 4074 kArenaAllocInvokeInputs), 4075 number_of_arguments_(number_of_arguments), 4076 resolved_method_(resolved_method), 4077 dex_method_index_(dex_method_index), 4078 intrinsic_(Intrinsics::kNone), 4079 intrinsic_optimizations_(0) { 4080 SetPackedField<ReturnTypeField>(return_type); 4081 SetPackedField<InvokeTypeField>(invoke_type); 4082 SetPackedFlag<kFlagCanThrow>(true); 4083 } 4084 4085 uint32_t number_of_arguments_; 4086 ArtMethod* resolved_method_; 4087 const uint32_t dex_method_index_; 4088 Intrinsics intrinsic_; 4089 4090 // A magic word holding optimizations for intrinsics. See intrinsics.h. 4091 uint32_t intrinsic_optimizations_; 4092 4093 private: 4094 DISALLOW_COPY_AND_ASSIGN(HInvoke); 4095 }; 4096 4097 class HInvokeUnresolved FINAL : public HInvoke { 4098 public: 4099 HInvokeUnresolved(ArenaAllocator* arena, 4100 uint32_t number_of_arguments, 4101 Primitive::Type return_type, 4102 uint32_t dex_pc, 4103 uint32_t dex_method_index, 4104 InvokeType invoke_type) 4105 : HInvoke(arena, 4106 number_of_arguments, 4107 0u /* number_of_other_inputs */, 4108 return_type, 4109 dex_pc, 4110 dex_method_index, 4111 nullptr, 4112 invoke_type) { 4113 } 4114 4115 DECLARE_INSTRUCTION(InvokeUnresolved); 4116 4117 private: 4118 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved); 4119 }; 4120 4121 class HInvokePolymorphic FINAL : public HInvoke { 4122 public: 4123 HInvokePolymorphic(ArenaAllocator* arena, 4124 uint32_t number_of_arguments, 4125 Primitive::Type return_type, 4126 uint32_t dex_pc, 4127 uint32_t dex_method_index) 4128 : HInvoke(arena, 4129 number_of_arguments, 4130 0u /* number_of_other_inputs */, 4131 return_type, 4132 dex_pc, 4133 dex_method_index, 4134 nullptr, 4135 kVirtual) {} 4136 4137 DECLARE_INSTRUCTION(InvokePolymorphic); 4138 4139 private: 4140 DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic); 4141 }; 4142 4143 class HInvokeStaticOrDirect FINAL : public HInvoke { 4144 public: 4145 // Requirements of this method call regarding the class 4146 // initialization (clinit) check of its declaring class. 4147 enum class ClinitCheckRequirement { 4148 kNone, // Class already initialized. 4149 kExplicit, // Static call having explicit clinit check as last input. 4150 kImplicit, // Static call implicitly requiring a clinit check. 4151 kLast = kImplicit 4152 }; 4153 4154 // Determines how to load the target ArtMethod*. 4155 enum class MethodLoadKind { 4156 // Use a String init ArtMethod* loaded from Thread entrypoints. 4157 kStringInit, 4158 4159 // Use the method's own ArtMethod* loaded by the register allocator. 4160 kRecursive, 4161 4162 // Use PC-relative boot image ArtMethod* address that will be known at link time. 4163 // Used for boot image methods referenced by boot image code. 4164 kBootImageLinkTimePcRelative, 4165 4166 // Use ArtMethod* at a known address, embed the direct address in the code. 4167 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls. 4168 kDirectAddress, 4169 4170 // Load from an entry in the .bss section using a PC-relative load. 4171 // Used for classes outside boot image when .bss is accessible with a PC-relative load. 4172 kBssEntry, 4173 4174 // Make a runtime call to resolve and call the method. This is the last-resort-kind 4175 // used when other kinds are unimplemented on a particular architecture. 4176 kRuntimeCall, 4177 }; 4178 4179 // Determines the location of the code pointer. 4180 enum class CodePtrLocation { 4181 // Recursive call, use local PC-relative call instruction. 4182 kCallSelf, 4183 4184 // Use code pointer from the ArtMethod*. 4185 // Used when we don't know the target code. This is also the last-resort-kind used when 4186 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture. 4187 kCallArtMethod, 4188 }; 4189 4190 struct DispatchInfo { 4191 MethodLoadKind method_load_kind; 4192 CodePtrLocation code_ptr_location; 4193 // The method load data holds 4194 // - thread entrypoint offset for kStringInit method if this is a string init invoke. 4195 // Note that there are multiple string init methods, each having its own offset. 4196 // - the method address for kDirectAddress 4197 uint64_t method_load_data; 4198 }; 4199 4200 HInvokeStaticOrDirect(ArenaAllocator* arena, 4201 uint32_t number_of_arguments, 4202 Primitive::Type return_type, 4203 uint32_t dex_pc, 4204 uint32_t method_index, 4205 ArtMethod* resolved_method, 4206 DispatchInfo dispatch_info, 4207 InvokeType invoke_type, 4208 MethodReference target_method, 4209 ClinitCheckRequirement clinit_check_requirement) 4210 : HInvoke(arena, 4211 number_of_arguments, 4212 // There is potentially one extra argument for the HCurrentMethod node, and 4213 // potentially one other if the clinit check is explicit, and potentially 4214 // one other if the method is a string factory. 4215 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) + 4216 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u), 4217 return_type, 4218 dex_pc, 4219 method_index, 4220 resolved_method, 4221 invoke_type), 4222 target_method_(target_method), 4223 dispatch_info_(dispatch_info) { 4224 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement); 4225 } 4226 4227 void SetDispatchInfo(const DispatchInfo& dispatch_info) { 4228 bool had_current_method_input = HasCurrentMethodInput(); 4229 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind); 4230 4231 // Using the current method is the default and once we find a better 4232 // method load kind, we should not go back to using the current method. 4233 DCHECK(had_current_method_input || !needs_current_method_input); 4234 4235 if (had_current_method_input && !needs_current_method_input) { 4236 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod()); 4237 RemoveInputAt(GetSpecialInputIndex()); 4238 } 4239 dispatch_info_ = dispatch_info; 4240 } 4241 4242 DispatchInfo GetDispatchInfo() const { 4243 return dispatch_info_; 4244 } 4245 4246 void AddSpecialInput(HInstruction* input) { 4247 // We allow only one special input. 4248 DCHECK(!IsStringInit() && !HasCurrentMethodInput()); 4249 DCHECK(InputCount() == GetSpecialInputIndex() || 4250 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck())); 4251 InsertInputAt(GetSpecialInputIndex(), input); 4252 } 4253 4254 using HInstruction::GetInputRecords; // Keep the const version visible. 4255 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE { 4256 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords(); 4257 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) { 4258 DCHECK(!input_records.empty()); 4259 DCHECK_GT(input_records.size(), GetNumberOfArguments()); 4260 HInstruction* last_input = input_records.back().GetInstruction(); 4261 // Note: `last_input` may be null during arguments setup. 4262 if (last_input != nullptr) { 4263 // `last_input` is the last input of a static invoke marked as having 4264 // an explicit clinit check. It must either be: 4265 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or 4266 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation. 4267 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName(); 4268 } 4269 } 4270 return input_records; 4271 } 4272 4273 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 4274 // We access the method via the dex cache so we can't do an implicit null check. 4275 // TODO: for intrinsics we can generate implicit null checks. 4276 return false; 4277 } 4278 4279 bool CanBeNull() const OVERRIDE { 4280 return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit(); 4281 } 4282 4283 // Get the index of the special input, if any. 4284 // 4285 // If the invoke HasCurrentMethodInput(), the "special input" is the current 4286 // method pointer; otherwise there may be one platform-specific special input, 4287 // such as PC-relative addressing base. 4288 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } 4289 bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); } 4290 4291 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } 4292 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } 4293 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } 4294 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE; 4295 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; } 4296 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; } 4297 bool HasPcRelativeMethodLoadKind() const { 4298 return GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative || 4299 GetMethodLoadKind() == MethodLoadKind::kBssEntry; 4300 } 4301 bool HasCurrentMethodInput() const { 4302 // This function can be called only after the invoke has been fully initialized by the builder. 4303 if (NeedsCurrentMethodInput(GetMethodLoadKind())) { 4304 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 4305 return true; 4306 } else { 4307 DCHECK(InputCount() == GetSpecialInputIndex() || 4308 !InputAt(GetSpecialInputIndex())->IsCurrentMethod()); 4309 return false; 4310 } 4311 } 4312 4313 QuickEntrypointEnum GetStringInitEntryPoint() const { 4314 DCHECK(IsStringInit()); 4315 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data); 4316 } 4317 4318 uint64_t GetMethodAddress() const { 4319 DCHECK(HasMethodAddress()); 4320 return dispatch_info_.method_load_data; 4321 } 4322 4323 const DexFile& GetDexFileForPcRelativeDexCache() const; 4324 4325 ClinitCheckRequirement GetClinitCheckRequirement() const { 4326 return GetPackedField<ClinitCheckRequirementField>(); 4327 } 4328 4329 // Is this instruction a call to a static method? 4330 bool IsStatic() const { 4331 return GetInvokeType() == kStatic; 4332 } 4333 4334 MethodReference GetTargetMethod() const { 4335 return target_method_; 4336 } 4337 4338 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by 4339 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck) 4340 // instruction; only relevant for static calls with explicit clinit check. 4341 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) { 4342 DCHECK(IsStaticWithExplicitClinitCheck()); 4343 size_t last_input_index = inputs_.size() - 1u; 4344 HInstruction* last_input = inputs_.back().GetInstruction(); 4345 DCHECK(last_input != nullptr); 4346 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); 4347 RemoveAsUserOfInput(last_input_index); 4348 inputs_.pop_back(); 4349 SetPackedField<ClinitCheckRequirementField>(new_requirement); 4350 DCHECK(!IsStaticWithExplicitClinitCheck()); 4351 } 4352 4353 // Is this a call to a static method whose declaring class has an 4354 // explicit initialization check in the graph? 4355 bool IsStaticWithExplicitClinitCheck() const { 4356 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit); 4357 } 4358 4359 // Is this a call to a static method whose declaring class has an 4360 // implicit intialization check requirement? 4361 bool IsStaticWithImplicitClinitCheck() const { 4362 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit); 4363 } 4364 4365 // Does this method load kind need the current method as an input? 4366 static bool NeedsCurrentMethodInput(MethodLoadKind kind) { 4367 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kRuntimeCall; 4368 } 4369 4370 DECLARE_INSTRUCTION(InvokeStaticOrDirect); 4371 4372 private: 4373 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits; 4374 static constexpr size_t kFieldClinitCheckRequirementSize = 4375 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast)); 4376 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits = 4377 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize; 4378 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits, 4379 "Too many packed fields."); 4380 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement, 4381 kFieldClinitCheckRequirement, 4382 kFieldClinitCheckRequirementSize>; 4383 4384 // Cached values of the resolved method, to avoid needing the mutator lock. 4385 MethodReference target_method_; 4386 DispatchInfo dispatch_info_; 4387 4388 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect); 4389 }; 4390 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs); 4391 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs); 4392 4393 class HInvokeVirtual FINAL : public HInvoke { 4394 public: 4395 HInvokeVirtual(ArenaAllocator* arena, 4396 uint32_t number_of_arguments, 4397 Primitive::Type return_type, 4398 uint32_t dex_pc, 4399 uint32_t dex_method_index, 4400 ArtMethod* resolved_method, 4401 uint32_t vtable_index) 4402 : HInvoke(arena, 4403 number_of_arguments, 4404 0u, 4405 return_type, 4406 dex_pc, 4407 dex_method_index, 4408 resolved_method, 4409 kVirtual), 4410 vtable_index_(vtable_index) {} 4411 4412 bool CanBeNull() const OVERRIDE { 4413 switch (GetIntrinsic()) { 4414 case Intrinsics::kThreadCurrentThread: 4415 case Intrinsics::kStringBufferAppend: 4416 case Intrinsics::kStringBufferToString: 4417 case Intrinsics::kStringBuilderAppend: 4418 case Intrinsics::kStringBuilderToString: 4419 return false; 4420 default: 4421 return HInvoke::CanBeNull(); 4422 } 4423 } 4424 4425 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4426 // TODO: Add implicit null checks in intrinsics. 4427 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 4428 } 4429 4430 uint32_t GetVTableIndex() const { return vtable_index_; } 4431 4432 DECLARE_INSTRUCTION(InvokeVirtual); 4433 4434 private: 4435 // Cached value of the resolved method, to avoid needing the mutator lock. 4436 const uint32_t vtable_index_; 4437 4438 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); 4439 }; 4440 4441 class HInvokeInterface FINAL : public HInvoke { 4442 public: 4443 HInvokeInterface(ArenaAllocator* arena, 4444 uint32_t number_of_arguments, 4445 Primitive::Type return_type, 4446 uint32_t dex_pc, 4447 uint32_t dex_method_index, 4448 ArtMethod* resolved_method, 4449 uint32_t imt_index) 4450 : HInvoke(arena, 4451 number_of_arguments, 4452 0u, 4453 return_type, 4454 dex_pc, 4455 dex_method_index, 4456 resolved_method, 4457 kInterface), 4458 imt_index_(imt_index) {} 4459 4460 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 4461 // TODO: Add implicit null checks in intrinsics. 4462 return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); 4463 } 4464 4465 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { 4466 // The assembly stub currently needs it. 4467 return true; 4468 } 4469 4470 uint32_t GetImtIndex() const { return imt_index_; } 4471 uint32_t GetDexMethodIndex() const { return dex_method_index_; } 4472 4473 DECLARE_INSTRUCTION(InvokeInterface); 4474 4475 private: 4476 // Cached value of the resolved method, to avoid needing the mutator lock. 4477 const uint32_t imt_index_; 4478 4479 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); 4480 }; 4481 4482 class HNeg FINAL : public HUnaryOperation { 4483 public: 4484 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 4485 : HUnaryOperation(result_type, input, dex_pc) { 4486 DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType())); 4487 } 4488 4489 template <typename T> static T Compute(T x) { return -x; } 4490 4491 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 4492 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 4493 } 4494 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 4495 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 4496 } 4497 HConstant* Evaluate(HFloatConstant* x) const OVERRIDE { 4498 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc()); 4499 } 4500 HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE { 4501 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc()); 4502 } 4503 4504 DECLARE_INSTRUCTION(Neg); 4505 4506 private: 4507 DISALLOW_COPY_AND_ASSIGN(HNeg); 4508 }; 4509 4510 class HNewArray FINAL : public HExpression<2> { 4511 public: 4512 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc) 4513 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc) { 4514 SetRawInputAt(0, cls); 4515 SetRawInputAt(1, length); 4516 } 4517 4518 // Calls runtime so needs an environment. 4519 bool NeedsEnvironment() const OVERRIDE { return true; } 4520 4521 // May throw NegativeArraySizeException, OutOfMemoryError, etc. 4522 bool CanThrow() const OVERRIDE { return true; } 4523 4524 bool CanBeNull() const OVERRIDE { return false; } 4525 4526 HLoadClass* GetLoadClass() const { 4527 DCHECK(InputAt(0)->IsLoadClass()); 4528 return InputAt(0)->AsLoadClass(); 4529 } 4530 4531 HInstruction* GetLength() const { 4532 return InputAt(1); 4533 } 4534 4535 DECLARE_INSTRUCTION(NewArray); 4536 4537 private: 4538 DISALLOW_COPY_AND_ASSIGN(HNewArray); 4539 }; 4540 4541 class HAdd FINAL : public HBinaryOperation { 4542 public: 4543 HAdd(Primitive::Type result_type, 4544 HInstruction* left, 4545 HInstruction* right, 4546 uint32_t dex_pc = kNoDexPc) 4547 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4548 4549 bool IsCommutative() const OVERRIDE { return true; } 4550 4551 template <typename T> static T Compute(T x, T y) { return x + y; } 4552 4553 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4554 return GetBlock()->GetGraph()->GetIntConstant( 4555 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4556 } 4557 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4558 return GetBlock()->GetGraph()->GetLongConstant( 4559 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4560 } 4561 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 4562 return GetBlock()->GetGraph()->GetFloatConstant( 4563 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4564 } 4565 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 4566 return GetBlock()->GetGraph()->GetDoubleConstant( 4567 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4568 } 4569 4570 DECLARE_INSTRUCTION(Add); 4571 4572 private: 4573 DISALLOW_COPY_AND_ASSIGN(HAdd); 4574 }; 4575 4576 class HSub FINAL : public HBinaryOperation { 4577 public: 4578 HSub(Primitive::Type result_type, 4579 HInstruction* left, 4580 HInstruction* right, 4581 uint32_t dex_pc = kNoDexPc) 4582 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4583 4584 template <typename T> static T Compute(T x, T y) { return x - y; } 4585 4586 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4587 return GetBlock()->GetGraph()->GetIntConstant( 4588 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4589 } 4590 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4591 return GetBlock()->GetGraph()->GetLongConstant( 4592 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4593 } 4594 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 4595 return GetBlock()->GetGraph()->GetFloatConstant( 4596 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4597 } 4598 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 4599 return GetBlock()->GetGraph()->GetDoubleConstant( 4600 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4601 } 4602 4603 DECLARE_INSTRUCTION(Sub); 4604 4605 private: 4606 DISALLOW_COPY_AND_ASSIGN(HSub); 4607 }; 4608 4609 class HMul FINAL : public HBinaryOperation { 4610 public: 4611 HMul(Primitive::Type result_type, 4612 HInstruction* left, 4613 HInstruction* right, 4614 uint32_t dex_pc = kNoDexPc) 4615 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4616 4617 bool IsCommutative() const OVERRIDE { return true; } 4618 4619 template <typename T> static T Compute(T x, T y) { return x * y; } 4620 4621 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4622 return GetBlock()->GetGraph()->GetIntConstant( 4623 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4624 } 4625 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4626 return GetBlock()->GetGraph()->GetLongConstant( 4627 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4628 } 4629 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 4630 return GetBlock()->GetGraph()->GetFloatConstant( 4631 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4632 } 4633 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 4634 return GetBlock()->GetGraph()->GetDoubleConstant( 4635 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4636 } 4637 4638 DECLARE_INSTRUCTION(Mul); 4639 4640 private: 4641 DISALLOW_COPY_AND_ASSIGN(HMul); 4642 }; 4643 4644 class HDiv FINAL : public HBinaryOperation { 4645 public: 4646 HDiv(Primitive::Type result_type, 4647 HInstruction* left, 4648 HInstruction* right, 4649 uint32_t dex_pc) 4650 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4651 4652 template <typename T> 4653 T ComputeIntegral(T x, T y) const { 4654 DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType(); 4655 // Our graph structure ensures we never have 0 for `y` during 4656 // constant folding. 4657 DCHECK_NE(y, 0); 4658 // Special case -1 to avoid getting a SIGFPE on x86(_64). 4659 return (y == -1) ? -x : x / y; 4660 } 4661 4662 template <typename T> 4663 T ComputeFP(T x, T y) const { 4664 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType(); 4665 return x / y; 4666 } 4667 4668 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4669 return GetBlock()->GetGraph()->GetIntConstant( 4670 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); 4671 } 4672 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4673 return GetBlock()->GetGraph()->GetLongConstant( 4674 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); 4675 } 4676 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 4677 return GetBlock()->GetGraph()->GetFloatConstant( 4678 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 4679 } 4680 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 4681 return GetBlock()->GetGraph()->GetDoubleConstant( 4682 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 4683 } 4684 4685 DECLARE_INSTRUCTION(Div); 4686 4687 private: 4688 DISALLOW_COPY_AND_ASSIGN(HDiv); 4689 }; 4690 4691 class HRem FINAL : public HBinaryOperation { 4692 public: 4693 HRem(Primitive::Type result_type, 4694 HInstruction* left, 4695 HInstruction* right, 4696 uint32_t dex_pc) 4697 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4698 4699 template <typename T> 4700 T ComputeIntegral(T x, T y) const { 4701 DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType(); 4702 // Our graph structure ensures we never have 0 for `y` during 4703 // constant folding. 4704 DCHECK_NE(y, 0); 4705 // Special case -1 to avoid getting a SIGFPE on x86(_64). 4706 return (y == -1) ? 0 : x % y; 4707 } 4708 4709 template <typename T> 4710 T ComputeFP(T x, T y) const { 4711 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType(); 4712 return std::fmod(x, y); 4713 } 4714 4715 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4716 return GetBlock()->GetGraph()->GetIntConstant( 4717 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); 4718 } 4719 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4720 return GetBlock()->GetGraph()->GetLongConstant( 4721 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); 4722 } 4723 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE { 4724 return GetBlock()->GetGraph()->GetFloatConstant( 4725 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 4726 } 4727 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE { 4728 return GetBlock()->GetGraph()->GetDoubleConstant( 4729 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); 4730 } 4731 4732 DECLARE_INSTRUCTION(Rem); 4733 4734 private: 4735 DISALLOW_COPY_AND_ASSIGN(HRem); 4736 }; 4737 4738 class HDivZeroCheck FINAL : public HExpression<1> { 4739 public: 4740 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException` 4741 // constructor. 4742 HDivZeroCheck(HInstruction* value, uint32_t dex_pc) 4743 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 4744 SetRawInputAt(0, value); 4745 } 4746 4747 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); } 4748 4749 bool CanBeMoved() const OVERRIDE { return true; } 4750 4751 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 4752 return true; 4753 } 4754 4755 bool NeedsEnvironment() const OVERRIDE { return true; } 4756 bool CanThrow() const OVERRIDE { return true; } 4757 4758 DECLARE_INSTRUCTION(DivZeroCheck); 4759 4760 private: 4761 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck); 4762 }; 4763 4764 class HShl FINAL : public HBinaryOperation { 4765 public: 4766 HShl(Primitive::Type result_type, 4767 HInstruction* value, 4768 HInstruction* distance, 4769 uint32_t dex_pc = kNoDexPc) 4770 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) { 4771 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType())); 4772 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType())); 4773 } 4774 4775 template <typename T> 4776 static T Compute(T value, int32_t distance, int32_t max_shift_distance) { 4777 return value << (distance & max_shift_distance); 4778 } 4779 4780 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE { 4781 return GetBlock()->GetGraph()->GetIntConstant( 4782 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc()); 4783 } 4784 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE { 4785 return GetBlock()->GetGraph()->GetLongConstant( 4786 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); 4787 } 4788 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, 4789 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4790 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; 4791 UNREACHABLE(); 4792 } 4793 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, 4794 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4795 LOG(FATAL) << DebugName() << " is not defined for float values"; 4796 UNREACHABLE(); 4797 } 4798 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, 4799 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4800 LOG(FATAL) << DebugName() << " is not defined for double values"; 4801 UNREACHABLE(); 4802 } 4803 4804 DECLARE_INSTRUCTION(Shl); 4805 4806 private: 4807 DISALLOW_COPY_AND_ASSIGN(HShl); 4808 }; 4809 4810 class HShr FINAL : public HBinaryOperation { 4811 public: 4812 HShr(Primitive::Type result_type, 4813 HInstruction* value, 4814 HInstruction* distance, 4815 uint32_t dex_pc = kNoDexPc) 4816 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) { 4817 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType())); 4818 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType())); 4819 } 4820 4821 template <typename T> 4822 static T Compute(T value, int32_t distance, int32_t max_shift_distance) { 4823 return value >> (distance & max_shift_distance); 4824 } 4825 4826 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE { 4827 return GetBlock()->GetGraph()->GetIntConstant( 4828 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc()); 4829 } 4830 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE { 4831 return GetBlock()->GetGraph()->GetLongConstant( 4832 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); 4833 } 4834 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, 4835 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4836 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; 4837 UNREACHABLE(); 4838 } 4839 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, 4840 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4841 LOG(FATAL) << DebugName() << " is not defined for float values"; 4842 UNREACHABLE(); 4843 } 4844 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, 4845 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4846 LOG(FATAL) << DebugName() << " is not defined for double values"; 4847 UNREACHABLE(); 4848 } 4849 4850 DECLARE_INSTRUCTION(Shr); 4851 4852 private: 4853 DISALLOW_COPY_AND_ASSIGN(HShr); 4854 }; 4855 4856 class HUShr FINAL : public HBinaryOperation { 4857 public: 4858 HUShr(Primitive::Type result_type, 4859 HInstruction* value, 4860 HInstruction* distance, 4861 uint32_t dex_pc = kNoDexPc) 4862 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) { 4863 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType())); 4864 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType())); 4865 } 4866 4867 template <typename T> 4868 static T Compute(T value, int32_t distance, int32_t max_shift_distance) { 4869 typedef typename std::make_unsigned<T>::type V; 4870 V ux = static_cast<V>(value); 4871 return static_cast<T>(ux >> (distance & max_shift_distance)); 4872 } 4873 4874 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE { 4875 return GetBlock()->GetGraph()->GetIntConstant( 4876 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc()); 4877 } 4878 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE { 4879 return GetBlock()->GetGraph()->GetLongConstant( 4880 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); 4881 } 4882 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, 4883 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4884 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; 4885 UNREACHABLE(); 4886 } 4887 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, 4888 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4889 LOG(FATAL) << DebugName() << " is not defined for float values"; 4890 UNREACHABLE(); 4891 } 4892 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, 4893 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 4894 LOG(FATAL) << DebugName() << " is not defined for double values"; 4895 UNREACHABLE(); 4896 } 4897 4898 DECLARE_INSTRUCTION(UShr); 4899 4900 private: 4901 DISALLOW_COPY_AND_ASSIGN(HUShr); 4902 }; 4903 4904 class HAnd FINAL : public HBinaryOperation { 4905 public: 4906 HAnd(Primitive::Type result_type, 4907 HInstruction* left, 4908 HInstruction* right, 4909 uint32_t dex_pc = kNoDexPc) 4910 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4911 4912 bool IsCommutative() const OVERRIDE { return true; } 4913 4914 template <typename T> static T Compute(T x, T y) { return x & y; } 4915 4916 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4917 return GetBlock()->GetGraph()->GetIntConstant( 4918 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4919 } 4920 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4921 return GetBlock()->GetGraph()->GetLongConstant( 4922 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4923 } 4924 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 4925 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 4926 LOG(FATAL) << DebugName() << " is not defined for float values"; 4927 UNREACHABLE(); 4928 } 4929 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 4930 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 4931 LOG(FATAL) << DebugName() << " is not defined for double values"; 4932 UNREACHABLE(); 4933 } 4934 4935 DECLARE_INSTRUCTION(And); 4936 4937 private: 4938 DISALLOW_COPY_AND_ASSIGN(HAnd); 4939 }; 4940 4941 class HOr FINAL : public HBinaryOperation { 4942 public: 4943 HOr(Primitive::Type result_type, 4944 HInstruction* left, 4945 HInstruction* right, 4946 uint32_t dex_pc = kNoDexPc) 4947 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4948 4949 bool IsCommutative() const OVERRIDE { return true; } 4950 4951 template <typename T> static T Compute(T x, T y) { return x | y; } 4952 4953 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4954 return GetBlock()->GetGraph()->GetIntConstant( 4955 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4956 } 4957 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4958 return GetBlock()->GetGraph()->GetLongConstant( 4959 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4960 } 4961 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 4962 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 4963 LOG(FATAL) << DebugName() << " is not defined for float values"; 4964 UNREACHABLE(); 4965 } 4966 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 4967 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 4968 LOG(FATAL) << DebugName() << " is not defined for double values"; 4969 UNREACHABLE(); 4970 } 4971 4972 DECLARE_INSTRUCTION(Or); 4973 4974 private: 4975 DISALLOW_COPY_AND_ASSIGN(HOr); 4976 }; 4977 4978 class HXor FINAL : public HBinaryOperation { 4979 public: 4980 HXor(Primitive::Type result_type, 4981 HInstruction* left, 4982 HInstruction* right, 4983 uint32_t dex_pc = kNoDexPc) 4984 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} 4985 4986 bool IsCommutative() const OVERRIDE { return true; } 4987 4988 template <typename T> static T Compute(T x, T y) { return x ^ y; } 4989 4990 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE { 4991 return GetBlock()->GetGraph()->GetIntConstant( 4992 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4993 } 4994 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE { 4995 return GetBlock()->GetGraph()->GetLongConstant( 4996 Compute(x->GetValue(), y->GetValue()), GetDexPc()); 4997 } 4998 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, 4999 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 5000 LOG(FATAL) << DebugName() << " is not defined for float values"; 5001 UNREACHABLE(); 5002 } 5003 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, 5004 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE { 5005 LOG(FATAL) << DebugName() << " is not defined for double values"; 5006 UNREACHABLE(); 5007 } 5008 5009 DECLARE_INSTRUCTION(Xor); 5010 5011 private: 5012 DISALLOW_COPY_AND_ASSIGN(HXor); 5013 }; 5014 5015 class HRor FINAL : public HBinaryOperation { 5016 public: 5017 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance) 5018 : HBinaryOperation(result_type, value, distance) { 5019 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType())); 5020 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType())); 5021 } 5022 5023 template <typename T> 5024 static T Compute(T value, int32_t distance, int32_t max_shift_value) { 5025 typedef typename std::make_unsigned<T>::type V; 5026 V ux = static_cast<V>(value); 5027 if ((distance & max_shift_value) == 0) { 5028 return static_cast<T>(ux); 5029 } else { 5030 const V reg_bits = sizeof(T) * 8; 5031 return static_cast<T>(ux >> (distance & max_shift_value)) | 5032 (value << (reg_bits - (distance & max_shift_value))); 5033 } 5034 } 5035 5036 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE { 5037 return GetBlock()->GetGraph()->GetIntConstant( 5038 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc()); 5039 } 5040 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE { 5041 return GetBlock()->GetGraph()->GetLongConstant( 5042 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); 5043 } 5044 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, 5045 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 5046 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; 5047 UNREACHABLE(); 5048 } 5049 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, 5050 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 5051 LOG(FATAL) << DebugName() << " is not defined for float values"; 5052 UNREACHABLE(); 5053 } 5054 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, 5055 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE { 5056 LOG(FATAL) << DebugName() << " is not defined for double values"; 5057 UNREACHABLE(); 5058 } 5059 5060 DECLARE_INSTRUCTION(Ror); 5061 5062 private: 5063 DISALLOW_COPY_AND_ASSIGN(HRor); 5064 }; 5065 5066 // The value of a parameter in this method. Its location depends on 5067 // the calling convention. 5068 class HParameterValue FINAL : public HExpression<0> { 5069 public: 5070 HParameterValue(const DexFile& dex_file, 5071 dex::TypeIndex type_index, 5072 uint8_t index, 5073 Primitive::Type parameter_type, 5074 bool is_this = false) 5075 : HExpression(parameter_type, SideEffects::None(), kNoDexPc), 5076 dex_file_(dex_file), 5077 type_index_(type_index), 5078 index_(index) { 5079 SetPackedFlag<kFlagIsThis>(is_this); 5080 SetPackedFlag<kFlagCanBeNull>(!is_this); 5081 } 5082 5083 const DexFile& GetDexFile() const { return dex_file_; } 5084 dex::TypeIndex GetTypeIndex() const { return type_index_; } 5085 uint8_t GetIndex() const { return index_; } 5086 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); } 5087 5088 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } 5089 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); } 5090 5091 DECLARE_INSTRUCTION(ParameterValue); 5092 5093 private: 5094 // Whether or not the parameter value corresponds to 'this' argument. 5095 static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits; 5096 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1; 5097 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1; 5098 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits, 5099 "Too many packed fields."); 5100 5101 const DexFile& dex_file_; 5102 const dex::TypeIndex type_index_; 5103 // The index of this parameter in the parameters list. Must be less 5104 // than HGraph::number_of_in_vregs_. 5105 const uint8_t index_; 5106 5107 DISALLOW_COPY_AND_ASSIGN(HParameterValue); 5108 }; 5109 5110 class HNot FINAL : public HUnaryOperation { 5111 public: 5112 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc) 5113 : HUnaryOperation(result_type, input, dex_pc) {} 5114 5115 bool CanBeMoved() const OVERRIDE { return true; } 5116 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5117 return true; 5118 } 5119 5120 template <typename T> static T Compute(T x) { return ~x; } 5121 5122 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 5123 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 5124 } 5125 HConstant* Evaluate(HLongConstant* x) const OVERRIDE { 5126 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); 5127 } 5128 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 5129 LOG(FATAL) << DebugName() << " is not defined for float values"; 5130 UNREACHABLE(); 5131 } 5132 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 5133 LOG(FATAL) << DebugName() << " is not defined for double values"; 5134 UNREACHABLE(); 5135 } 5136 5137 DECLARE_INSTRUCTION(Not); 5138 5139 private: 5140 DISALLOW_COPY_AND_ASSIGN(HNot); 5141 }; 5142 5143 class HBooleanNot FINAL : public HUnaryOperation { 5144 public: 5145 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc) 5146 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {} 5147 5148 bool CanBeMoved() const OVERRIDE { return true; } 5149 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5150 return true; 5151 } 5152 5153 template <typename T> static bool Compute(T x) { 5154 DCHECK(IsUint<1>(x)) << x; 5155 return !x; 5156 } 5157 5158 HConstant* Evaluate(HIntConstant* x) const OVERRIDE { 5159 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); 5160 } 5161 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 5162 LOG(FATAL) << DebugName() << " is not defined for long values"; 5163 UNREACHABLE(); 5164 } 5165 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 5166 LOG(FATAL) << DebugName() << " is not defined for float values"; 5167 UNREACHABLE(); 5168 } 5169 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE { 5170 LOG(FATAL) << DebugName() << " is not defined for double values"; 5171 UNREACHABLE(); 5172 } 5173 5174 DECLARE_INSTRUCTION(BooleanNot); 5175 5176 private: 5177 DISALLOW_COPY_AND_ASSIGN(HBooleanNot); 5178 }; 5179 5180 class HTypeConversion FINAL : public HExpression<1> { 5181 public: 5182 // Instantiate a type conversion of `input` to `result_type`. 5183 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) 5184 : HExpression(result_type, SideEffects::None(), dex_pc) { 5185 SetRawInputAt(0, input); 5186 // Invariant: We should never generate a conversion to a Boolean value. 5187 DCHECK_NE(Primitive::kPrimBoolean, result_type); 5188 } 5189 5190 HInstruction* GetInput() const { return InputAt(0); } 5191 Primitive::Type GetInputType() const { return GetInput()->GetType(); } 5192 Primitive::Type GetResultType() const { return GetType(); } 5193 5194 bool CanBeMoved() const OVERRIDE { return true; } 5195 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5196 return true; 5197 } 5198 5199 // Try to statically evaluate the conversion and return a HConstant 5200 // containing the result. If the input cannot be converted, return nullptr. 5201 HConstant* TryStaticEvaluation() const; 5202 5203 DECLARE_INSTRUCTION(TypeConversion); 5204 5205 private: 5206 DISALLOW_COPY_AND_ASSIGN(HTypeConversion); 5207 }; 5208 5209 static constexpr uint32_t kNoRegNumber = -1; 5210 5211 class HNullCheck FINAL : public HExpression<1> { 5212 public: 5213 // `HNullCheck` can trigger GC, as it may call the `NullPointerException` 5214 // constructor. 5215 HNullCheck(HInstruction* value, uint32_t dex_pc) 5216 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 5217 SetRawInputAt(0, value); 5218 } 5219 5220 bool CanBeMoved() const OVERRIDE { return true; } 5221 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5222 return true; 5223 } 5224 5225 bool NeedsEnvironment() const OVERRIDE { return true; } 5226 5227 bool CanThrow() const OVERRIDE { return true; } 5228 5229 bool CanBeNull() const OVERRIDE { return false; } 5230 5231 5232 DECLARE_INSTRUCTION(NullCheck); 5233 5234 private: 5235 DISALLOW_COPY_AND_ASSIGN(HNullCheck); 5236 }; 5237 5238 // Embeds an ArtField and all the information required by the compiler. We cache 5239 // that information to avoid requiring the mutator lock every time we need it. 5240 class FieldInfo : public ValueObject { 5241 public: 5242 FieldInfo(ArtField* field, 5243 MemberOffset field_offset, 5244 Primitive::Type field_type, 5245 bool is_volatile, 5246 uint32_t index, 5247 uint16_t declaring_class_def_index, 5248 const DexFile& dex_file) 5249 : field_(field), 5250 field_offset_(field_offset), 5251 field_type_(field_type), 5252 is_volatile_(is_volatile), 5253 index_(index), 5254 declaring_class_def_index_(declaring_class_def_index), 5255 dex_file_(dex_file) {} 5256 5257 ArtField* GetField() const { return field_; } 5258 MemberOffset GetFieldOffset() const { return field_offset_; } 5259 Primitive::Type GetFieldType() const { return field_type_; } 5260 uint32_t GetFieldIndex() const { return index_; } 5261 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;} 5262 const DexFile& GetDexFile() const { return dex_file_; } 5263 bool IsVolatile() const { return is_volatile_; } 5264 5265 private: 5266 ArtField* const field_; 5267 const MemberOffset field_offset_; 5268 const Primitive::Type field_type_; 5269 const bool is_volatile_; 5270 const uint32_t index_; 5271 const uint16_t declaring_class_def_index_; 5272 const DexFile& dex_file_; 5273 }; 5274 5275 class HInstanceFieldGet FINAL : public HExpression<1> { 5276 public: 5277 HInstanceFieldGet(HInstruction* value, 5278 ArtField* field, 5279 Primitive::Type field_type, 5280 MemberOffset field_offset, 5281 bool is_volatile, 5282 uint32_t field_idx, 5283 uint16_t declaring_class_def_index, 5284 const DexFile& dex_file, 5285 uint32_t dex_pc) 5286 : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 5287 field_info_(field, 5288 field_offset, 5289 field_type, 5290 is_volatile, 5291 field_idx, 5292 declaring_class_def_index, 5293 dex_file) { 5294 SetRawInputAt(0, value); 5295 } 5296 5297 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 5298 5299 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 5300 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet(); 5301 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 5302 } 5303 5304 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 5305 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value()); 5306 } 5307 5308 size_t ComputeHashCode() const OVERRIDE { 5309 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 5310 } 5311 5312 const FieldInfo& GetFieldInfo() const { return field_info_; } 5313 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5314 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5315 bool IsVolatile() const { return field_info_.IsVolatile(); } 5316 5317 DECLARE_INSTRUCTION(InstanceFieldGet); 5318 5319 private: 5320 const FieldInfo field_info_; 5321 5322 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet); 5323 }; 5324 5325 class HInstanceFieldSet FINAL : public HTemplateInstruction<2> { 5326 public: 5327 HInstanceFieldSet(HInstruction* object, 5328 HInstruction* value, 5329 ArtField* field, 5330 Primitive::Type field_type, 5331 MemberOffset field_offset, 5332 bool is_volatile, 5333 uint32_t field_idx, 5334 uint16_t declaring_class_def_index, 5335 const DexFile& dex_file, 5336 uint32_t dex_pc) 5337 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 5338 field_info_(field, 5339 field_offset, 5340 field_type, 5341 is_volatile, 5342 field_idx, 5343 declaring_class_def_index, 5344 dex_file) { 5345 SetPackedFlag<kFlagValueCanBeNull>(true); 5346 SetRawInputAt(0, object); 5347 SetRawInputAt(1, value); 5348 } 5349 5350 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 5351 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value()); 5352 } 5353 5354 const FieldInfo& GetFieldInfo() const { return field_info_; } 5355 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 5356 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 5357 bool IsVolatile() const { return field_info_.IsVolatile(); } 5358 HInstruction* GetValue() const { return InputAt(1); } 5359 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } 5360 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); } 5361 5362 DECLARE_INSTRUCTION(InstanceFieldSet); 5363 5364 private: 5365 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits; 5366 static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1; 5367 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits, 5368 "Too many packed fields."); 5369 5370 const FieldInfo field_info_; 5371 5372 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); 5373 }; 5374 5375 class HArrayGet FINAL : public HExpression<2> { 5376 public: 5377 HArrayGet(HInstruction* array, 5378 HInstruction* index, 5379 Primitive::Type type, 5380 uint32_t dex_pc, 5381 bool is_string_char_at = false) 5382 : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) { 5383 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at); 5384 SetRawInputAt(0, array); 5385 SetRawInputAt(1, index); 5386 } 5387 5388 bool CanBeMoved() const OVERRIDE { return true; } 5389 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5390 return true; 5391 } 5392 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 5393 // TODO: We can be smarter here. 5394 // Currently, unless the array is the result of NewArray, the array access is always 5395 // preceded by some form of null NullCheck necessary for the bounds check, usually 5396 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for 5397 // dynamic BCE. There are cases when these could be removed to produce better code. 5398 // If we ever add optimizations to do so we should allow an implicit check here 5399 // (as long as the address falls in the first page). 5400 // 5401 // As an example of such fancy optimization, we could eliminate BoundsCheck for 5402 // a = cond ? new int[1] : null; 5403 // a[0]; // The Phi does not need bounds check for either input. 5404 return false; 5405 } 5406 5407 bool IsEquivalentOf(HArrayGet* other) const { 5408 bool result = (GetDexPc() == other->GetDexPc()); 5409 if (kIsDebugBuild && result) { 5410 DCHECK_EQ(GetBlock(), other->GetBlock()); 5411 DCHECK_EQ(GetArray(), other->GetArray()); 5412 DCHECK_EQ(GetIndex(), other->GetIndex()); 5413 if (Primitive::IsIntOrLongType(GetType())) { 5414 DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType(); 5415 } else { 5416 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType(); 5417 DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType(); 5418 } 5419 } 5420 return result; 5421 } 5422 5423 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); } 5424 5425 HInstruction* GetArray() const { return InputAt(0); } 5426 HInstruction* GetIndex() const { return InputAt(1); } 5427 5428 DECLARE_INSTRUCTION(ArrayGet); 5429 5430 private: 5431 // We treat a String as an array, creating the HArrayGet from String.charAt() 5432 // intrinsic in the instruction simplifier. We can always determine whether 5433 // a particular HArrayGet is actually a String.charAt() by looking at the type 5434 // of the input but that requires holding the mutator lock, so we prefer to use 5435 // a flag, so that code generators don't need to do the locking. 5436 static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits; 5437 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1; 5438 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits, 5439 "Too many packed fields."); 5440 5441 DISALLOW_COPY_AND_ASSIGN(HArrayGet); 5442 }; 5443 5444 class HArraySet FINAL : public HTemplateInstruction<3> { 5445 public: 5446 HArraySet(HInstruction* array, 5447 HInstruction* index, 5448 HInstruction* value, 5449 Primitive::Type expected_component_type, 5450 uint32_t dex_pc) 5451 : HTemplateInstruction(SideEffects::None(), dex_pc) { 5452 SetPackedField<ExpectedComponentTypeField>(expected_component_type); 5453 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot); 5454 SetPackedFlag<kFlagValueCanBeNull>(true); 5455 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false); 5456 SetRawInputAt(0, array); 5457 SetRawInputAt(1, index); 5458 SetRawInputAt(2, value); 5459 // Make a best guess now, may be refined during SSA building. 5460 ComputeSideEffects(); 5461 } 5462 5463 bool NeedsEnvironment() const OVERRIDE { 5464 // We call a runtime method to throw ArrayStoreException. 5465 return NeedsTypeCheck(); 5466 } 5467 5468 // Can throw ArrayStoreException. 5469 bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); } 5470 5471 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { 5472 // TODO: Same as for ArrayGet. 5473 return false; 5474 } 5475 5476 void ClearNeedsTypeCheck() { 5477 SetPackedFlag<kFlagNeedsTypeCheck>(false); 5478 } 5479 5480 void ClearValueCanBeNull() { 5481 SetPackedFlag<kFlagValueCanBeNull>(false); 5482 } 5483 5484 void SetStaticTypeOfArrayIsObjectArray() { 5485 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true); 5486 } 5487 5488 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } 5489 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); } 5490 bool StaticTypeOfArrayIsObjectArray() const { 5491 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(); 5492 } 5493 5494 HInstruction* GetArray() const { return InputAt(0); } 5495 HInstruction* GetIndex() const { return InputAt(1); } 5496 HInstruction* GetValue() const { return InputAt(2); } 5497 5498 Primitive::Type GetComponentType() const { 5499 // The Dex format does not type floating point index operations. Since the 5500 // `expected_component_type_` is set during building and can therefore not 5501 // be correct, we also check what is the value type. If it is a floating 5502 // point type, we must use that type. 5503 Primitive::Type value_type = GetValue()->GetType(); 5504 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) 5505 ? value_type 5506 : GetRawExpectedComponentType(); 5507 } 5508 5509 Primitive::Type GetRawExpectedComponentType() const { 5510 return GetPackedField<ExpectedComponentTypeField>(); 5511 } 5512 5513 void ComputeSideEffects() { 5514 Primitive::Type type = GetComponentType(); 5515 SetSideEffects(SideEffects::ArrayWriteOfType(type).Union( 5516 SideEffectsForArchRuntimeCalls(type))); 5517 } 5518 5519 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { 5520 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None(); 5521 } 5522 5523 DECLARE_INSTRUCTION(ArraySet); 5524 5525 private: 5526 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits; 5527 static constexpr size_t kFieldExpectedComponentTypeSize = 5528 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 5529 static constexpr size_t kFlagNeedsTypeCheck = 5530 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize; 5531 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1; 5532 // Cached information for the reference_type_info_ so that codegen 5533 // does not need to inspect the static type. 5534 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1; 5535 static constexpr size_t kNumberOfArraySetPackedBits = 5536 kFlagStaticTypeOfArrayIsObjectArray + 1; 5537 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 5538 using ExpectedComponentTypeField = 5539 BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>; 5540 5541 DISALLOW_COPY_AND_ASSIGN(HArraySet); 5542 }; 5543 5544 class HArrayLength FINAL : public HExpression<1> { 5545 public: 5546 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false) 5547 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) { 5548 SetPackedFlag<kFlagIsStringLength>(is_string_length); 5549 // Note that arrays do not change length, so the instruction does not 5550 // depend on any write. 5551 SetRawInputAt(0, array); 5552 } 5553 5554 bool CanBeMoved() const OVERRIDE { return true; } 5555 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5556 return true; 5557 } 5558 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { 5559 return obj == InputAt(0); 5560 } 5561 5562 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); } 5563 5564 DECLARE_INSTRUCTION(ArrayLength); 5565 5566 private: 5567 // We treat a String as an array, creating the HArrayLength from String.length() 5568 // or String.isEmpty() intrinsic in the instruction simplifier. We can always 5569 // determine whether a particular HArrayLength is actually a String.length() by 5570 // looking at the type of the input but that requires holding the mutator lock, so 5571 // we prefer to use a flag, so that code generators don't need to do the locking. 5572 static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits; 5573 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1; 5574 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits, 5575 "Too many packed fields."); 5576 5577 DISALLOW_COPY_AND_ASSIGN(HArrayLength); 5578 }; 5579 5580 class HBoundsCheck FINAL : public HExpression<2> { 5581 public: 5582 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException` 5583 // constructor. 5584 HBoundsCheck(HInstruction* index, 5585 HInstruction* length, 5586 uint32_t dex_pc, 5587 bool string_char_at = false) 5588 : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) { 5589 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType())); 5590 SetPackedFlag<kFlagIsStringCharAt>(string_char_at); 5591 SetRawInputAt(0, index); 5592 SetRawInputAt(1, length); 5593 } 5594 5595 bool CanBeMoved() const OVERRIDE { return true; } 5596 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 5597 return true; 5598 } 5599 5600 bool NeedsEnvironment() const OVERRIDE { return true; } 5601 5602 bool CanThrow() const OVERRIDE { return true; } 5603 5604 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); } 5605 5606 HInstruction* GetIndex() const { return InputAt(0); } 5607 5608 DECLARE_INSTRUCTION(BoundsCheck); 5609 5610 private: 5611 static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits; 5612 5613 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck); 5614 }; 5615 5616 class HSuspendCheck FINAL : public HTemplateInstruction<0> { 5617 public: 5618 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc) 5619 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {} 5620 5621 bool NeedsEnvironment() const OVERRIDE { 5622 return true; 5623 } 5624 5625 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; } 5626 SlowPathCode* GetSlowPath() const { return slow_path_; } 5627 5628 DECLARE_INSTRUCTION(SuspendCheck); 5629 5630 private: 5631 // Only used for code generation, in order to share the same slow path between back edges 5632 // of a same loop. 5633 SlowPathCode* slow_path_; 5634 5635 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck); 5636 }; 5637 5638 // Pseudo-instruction which provides the native debugger with mapping information. 5639 // It ensures that we can generate line number and local variables at this point. 5640 class HNativeDebugInfo : public HTemplateInstruction<0> { 5641 public: 5642 explicit HNativeDebugInfo(uint32_t dex_pc) 5643 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {} 5644 5645 bool NeedsEnvironment() const OVERRIDE { 5646 return true; 5647 } 5648 5649 DECLARE_INSTRUCTION(NativeDebugInfo); 5650 5651 private: 5652 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo); 5653 }; 5654 5655 /** 5656 * Instruction to load a Class object. 5657 */ 5658 class HLoadClass FINAL : public HInstruction { 5659 public: 5660 // Determines how to load the Class. 5661 enum class LoadKind { 5662 // We cannot load this class. See HSharpening::SharpenLoadClass. 5663 kInvalid = -1, 5664 5665 // Use the Class* from the method's own ArtMethod*. 5666 kReferrersClass, 5667 5668 // Use PC-relative boot image Class* address that will be known at link time. 5669 // Used for boot image classes referenced by boot image code. 5670 kBootImageLinkTimePcRelative, 5671 5672 // Use a known boot image Class* address, embedded in the code by the codegen. 5673 // Used for boot image classes referenced by apps in AOT- and JIT-compiled code. 5674 kBootImageAddress, 5675 5676 // Load from an entry in the .bss section using a PC-relative load. 5677 // Used for classes outside boot image when .bss is accessible with a PC-relative load. 5678 kBssEntry, 5679 5680 // Load from the root table associated with the JIT compiled method. 5681 kJitTableAddress, 5682 5683 // Load using a simple runtime call. This is the fall-back load kind when 5684 // the codegen is unable to use another appropriate kind. 5685 kRuntimeCall, 5686 5687 kLast = kRuntimeCall 5688 }; 5689 5690 HLoadClass(HCurrentMethod* current_method, 5691 dex::TypeIndex type_index, 5692 const DexFile& dex_file, 5693 Handle<mirror::Class> klass, 5694 bool is_referrers_class, 5695 uint32_t dex_pc, 5696 bool needs_access_check) 5697 : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc), 5698 special_input_(HUserRecord<HInstruction*>(current_method)), 5699 type_index_(type_index), 5700 dex_file_(dex_file), 5701 klass_(klass), 5702 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { 5703 // Referrers class should not need access check. We never inline unverified 5704 // methods so we can't possibly end up in this situation. 5705 DCHECK(!is_referrers_class || !needs_access_check); 5706 5707 SetPackedField<LoadKindField>( 5708 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall); 5709 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check); 5710 SetPackedFlag<kFlagIsInBootImage>(false); 5711 SetPackedFlag<kFlagGenerateClInitCheck>(false); 5712 } 5713 5714 void SetLoadKind(LoadKind load_kind); 5715 5716 LoadKind GetLoadKind() const { 5717 return GetPackedField<LoadKindField>(); 5718 } 5719 5720 bool CanBeMoved() const OVERRIDE { return true; } 5721 5722 bool InstructionDataEquals(const HInstruction* other) const; 5723 5724 size_t ComputeHashCode() const OVERRIDE { return type_index_.index_; } 5725 5726 bool CanBeNull() const OVERRIDE { return false; } 5727 5728 bool NeedsEnvironment() const OVERRIDE { 5729 return CanCallRuntime(); 5730 } 5731 5732 void SetMustGenerateClinitCheck(bool generate_clinit_check) { 5733 // The entrypoint the code generator is going to call does not do 5734 // clinit of the class. 5735 DCHECK(!NeedsAccessCheck()); 5736 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check); 5737 } 5738 5739 bool CanCallRuntime() const { 5740 return NeedsAccessCheck() || 5741 MustGenerateClinitCheck() || 5742 GetLoadKind() == LoadKind::kRuntimeCall || 5743 GetLoadKind() == LoadKind::kBssEntry; 5744 } 5745 5746 bool CanThrow() const OVERRIDE { 5747 return NeedsAccessCheck() || 5748 MustGenerateClinitCheck() || 5749 // If the class is in the boot image, the lookup in the runtime call cannot throw. 5750 // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and 5751 // PIC and subsequently avoids a DCE behavior dependency on the PIC option. 5752 ((GetLoadKind() == LoadKind::kRuntimeCall || 5753 GetLoadKind() == LoadKind::kBssEntry) && 5754 !IsInBootImage()); 5755 } 5756 5757 ReferenceTypeInfo GetLoadedClassRTI() { 5758 return loaded_class_rti_; 5759 } 5760 5761 void SetLoadedClassRTI(ReferenceTypeInfo rti) { 5762 // Make sure we only set exact types (the loaded class should never be merged). 5763 DCHECK(rti.IsExact()); 5764 loaded_class_rti_ = rti; 5765 } 5766 5767 dex::TypeIndex GetTypeIndex() const { return type_index_; } 5768 const DexFile& GetDexFile() const { return dex_file_; } 5769 5770 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { 5771 return GetLoadKind() == LoadKind::kRuntimeCall; 5772 } 5773 5774 static SideEffects SideEffectsForArchRuntimeCalls() { 5775 return SideEffects::CanTriggerGC(); 5776 } 5777 5778 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; } 5779 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); } 5780 bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); } 5781 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); } 5782 5783 void MarkInBootImage() { 5784 SetPackedFlag<kFlagIsInBootImage>(true); 5785 } 5786 5787 void AddSpecialInput(HInstruction* special_input); 5788 5789 using HInstruction::GetInputRecords; // Keep the const version visible. 5790 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL { 5791 return ArrayRef<HUserRecord<HInstruction*>>( 5792 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u); 5793 } 5794 5795 Primitive::Type GetType() const OVERRIDE { 5796 return Primitive::kPrimNot; 5797 } 5798 5799 Handle<mirror::Class> GetClass() const { 5800 return klass_; 5801 } 5802 5803 DECLARE_INSTRUCTION(LoadClass); 5804 5805 private: 5806 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits; 5807 static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1; 5808 // Whether this instruction must generate the initialization check. 5809 // Used for code generation. 5810 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1; 5811 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1; 5812 static constexpr size_t kFieldLoadKindSize = 5813 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast)); 5814 static constexpr size_t kNumberOfLoadClassPackedBits = kFieldLoadKind + kFieldLoadKindSize; 5815 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields."); 5816 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>; 5817 5818 static bool HasTypeReference(LoadKind load_kind) { 5819 return load_kind == LoadKind::kReferrersClass || 5820 load_kind == LoadKind::kBootImageLinkTimePcRelative || 5821 load_kind == LoadKind::kBssEntry || 5822 load_kind == LoadKind::kRuntimeCall; 5823 } 5824 5825 void SetLoadKindInternal(LoadKind load_kind); 5826 5827 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass. 5828 // For other load kinds it's empty or possibly some architecture-specific instruction 5829 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative. 5830 HUserRecord<HInstruction*> special_input_; 5831 5832 // A type index and dex file where the class can be accessed. The dex file can be: 5833 // - The compiling method's dex file if the class is defined there too. 5834 // - The compiling method's dex file if the class is referenced there. 5835 // - The dex file where the class is defined. When the load kind can only be 5836 // kBssEntry or kRuntimeCall, we cannot emit code for this `HLoadClass`. 5837 const dex::TypeIndex type_index_; 5838 const DexFile& dex_file_; 5839 5840 Handle<mirror::Class> klass_; 5841 5842 ReferenceTypeInfo loaded_class_rti_; 5843 5844 DISALLOW_COPY_AND_ASSIGN(HLoadClass); 5845 }; 5846 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs); 5847 5848 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind). 5849 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) { 5850 // The special input is used for PC-relative loads on some architectures, 5851 // including literal pool loads, which are PC-relative too. 5852 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative || 5853 GetLoadKind() == LoadKind::kBootImageAddress || 5854 GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind(); 5855 DCHECK(special_input_.GetInstruction() == nullptr); 5856 special_input_ = HUserRecord<HInstruction*>(special_input); 5857 special_input->AddUseAt(this, 0); 5858 } 5859 5860 class HLoadString FINAL : public HInstruction { 5861 public: 5862 // Determines how to load the String. 5863 enum class LoadKind { 5864 // Use PC-relative boot image String* address that will be known at link time. 5865 // Used for boot image strings referenced by boot image code. 5866 kBootImageLinkTimePcRelative, 5867 5868 // Use a known boot image String* address, embedded in the code by the codegen. 5869 // Used for boot image strings referenced by apps in AOT- and JIT-compiled code. 5870 kBootImageAddress, 5871 5872 // Load from an entry in the .bss section using a PC-relative load. 5873 // Used for strings outside boot image when .bss is accessible with a PC-relative load. 5874 kBssEntry, 5875 5876 // Load from the root table associated with the JIT compiled method. 5877 kJitTableAddress, 5878 5879 // Load using a simple runtime call. This is the fall-back load kind when 5880 // the codegen is unable to use another appropriate kind. 5881 kRuntimeCall, 5882 5883 kLast = kRuntimeCall, 5884 }; 5885 5886 HLoadString(HCurrentMethod* current_method, 5887 dex::StringIndex string_index, 5888 const DexFile& dex_file, 5889 uint32_t dex_pc) 5890 : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc), 5891 special_input_(HUserRecord<HInstruction*>(current_method)), 5892 string_index_(string_index), 5893 dex_file_(dex_file) { 5894 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall); 5895 } 5896 5897 void SetLoadKind(LoadKind load_kind); 5898 5899 LoadKind GetLoadKind() const { 5900 return GetPackedField<LoadKindField>(); 5901 } 5902 5903 const DexFile& GetDexFile() const { 5904 return dex_file_; 5905 } 5906 5907 dex::StringIndex GetStringIndex() const { 5908 return string_index_; 5909 } 5910 5911 Handle<mirror::String> GetString() const { 5912 return string_; 5913 } 5914 5915 void SetString(Handle<mirror::String> str) { 5916 string_ = str; 5917 } 5918 5919 bool CanBeMoved() const OVERRIDE { return true; } 5920 5921 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE; 5922 5923 size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; } 5924 5925 // Will call the runtime if we need to load the string through 5926 // the dex cache and the string is not guaranteed to be there yet. 5927 bool NeedsEnvironment() const OVERRIDE { 5928 LoadKind load_kind = GetLoadKind(); 5929 if (load_kind == LoadKind::kBootImageLinkTimePcRelative || 5930 load_kind == LoadKind::kBootImageAddress || 5931 load_kind == LoadKind::kJitTableAddress) { 5932 return false; 5933 } 5934 return true; 5935 } 5936 5937 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { 5938 return GetLoadKind() == LoadKind::kRuntimeCall; 5939 } 5940 5941 bool CanBeNull() const OVERRIDE { return false; } 5942 bool CanThrow() const OVERRIDE { return NeedsEnvironment(); } 5943 5944 static SideEffects SideEffectsForArchRuntimeCalls() { 5945 return SideEffects::CanTriggerGC(); 5946 } 5947 5948 void AddSpecialInput(HInstruction* special_input); 5949 5950 using HInstruction::GetInputRecords; // Keep the const version visible. 5951 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() OVERRIDE FINAL { 5952 return ArrayRef<HUserRecord<HInstruction*>>( 5953 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u); 5954 } 5955 5956 Primitive::Type GetType() const OVERRIDE { 5957 return Primitive::kPrimNot; 5958 } 5959 5960 DECLARE_INSTRUCTION(LoadString); 5961 5962 private: 5963 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits; 5964 static constexpr size_t kFieldLoadKindSize = 5965 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast)); 5966 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize; 5967 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 5968 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>; 5969 5970 void SetLoadKindInternal(LoadKind load_kind); 5971 5972 // The special input is the HCurrentMethod for kRuntimeCall. 5973 // For other load kinds it's empty or possibly some architecture-specific instruction 5974 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative. 5975 HUserRecord<HInstruction*> special_input_; 5976 5977 dex::StringIndex string_index_; 5978 const DexFile& dex_file_; 5979 5980 Handle<mirror::String> string_; 5981 5982 DISALLOW_COPY_AND_ASSIGN(HLoadString); 5983 }; 5984 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs); 5985 5986 // Note: defined outside class to see operator<<(., HLoadString::LoadKind). 5987 inline void HLoadString::AddSpecialInput(HInstruction* special_input) { 5988 // The special input is used for PC-relative loads on some architectures, 5989 // including literal pool loads, which are PC-relative too. 5990 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative || 5991 GetLoadKind() == LoadKind::kBssEntry || 5992 GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind(); 5993 // HLoadString::GetInputRecords() returns an empty array at this point, 5994 // so use the GetInputRecords() from the base class to set the input record. 5995 DCHECK(special_input_.GetInstruction() == nullptr); 5996 special_input_ = HUserRecord<HInstruction*>(special_input); 5997 special_input->AddUseAt(this, 0); 5998 } 5999 6000 /** 6001 * Performs an initialization check on its Class object input. 6002 */ 6003 class HClinitCheck FINAL : public HExpression<1> { 6004 public: 6005 HClinitCheck(HLoadClass* constant, uint32_t dex_pc) 6006 : HExpression( 6007 Primitive::kPrimNot, 6008 SideEffects::AllChanges(), // Assume write/read on all fields/arrays. 6009 dex_pc) { 6010 SetRawInputAt(0, constant); 6011 } 6012 6013 bool CanBeMoved() const OVERRIDE { return true; } 6014 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 6015 return true; 6016 } 6017 6018 bool NeedsEnvironment() const OVERRIDE { 6019 // May call runtime to initialize the class. 6020 return true; 6021 } 6022 6023 bool CanThrow() const OVERRIDE { return true; } 6024 6025 HLoadClass* GetLoadClass() const { 6026 DCHECK(InputAt(0)->IsLoadClass()); 6027 return InputAt(0)->AsLoadClass(); 6028 } 6029 6030 DECLARE_INSTRUCTION(ClinitCheck); 6031 6032 private: 6033 DISALLOW_COPY_AND_ASSIGN(HClinitCheck); 6034 }; 6035 6036 class HStaticFieldGet FINAL : public HExpression<1> { 6037 public: 6038 HStaticFieldGet(HInstruction* cls, 6039 ArtField* field, 6040 Primitive::Type field_type, 6041 MemberOffset field_offset, 6042 bool is_volatile, 6043 uint32_t field_idx, 6044 uint16_t declaring_class_def_index, 6045 const DexFile& dex_file, 6046 uint32_t dex_pc) 6047 : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), 6048 field_info_(field, 6049 field_offset, 6050 field_type, 6051 is_volatile, 6052 field_idx, 6053 declaring_class_def_index, 6054 dex_file) { 6055 SetRawInputAt(0, cls); 6056 } 6057 6058 6059 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); } 6060 6061 bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { 6062 const HStaticFieldGet* other_get = other->AsStaticFieldGet(); 6063 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue(); 6064 } 6065 6066 size_t ComputeHashCode() const OVERRIDE { 6067 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue(); 6068 } 6069 6070 const FieldInfo& GetFieldInfo() const { return field_info_; } 6071 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 6072 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 6073 bool IsVolatile() const { return field_info_.IsVolatile(); } 6074 6075 DECLARE_INSTRUCTION(StaticFieldGet); 6076 6077 private: 6078 const FieldInfo field_info_; 6079 6080 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet); 6081 }; 6082 6083 class HStaticFieldSet FINAL : public HTemplateInstruction<2> { 6084 public: 6085 HStaticFieldSet(HInstruction* cls, 6086 HInstruction* value, 6087 ArtField* field, 6088 Primitive::Type field_type, 6089 MemberOffset field_offset, 6090 bool is_volatile, 6091 uint32_t field_idx, 6092 uint16_t declaring_class_def_index, 6093 const DexFile& dex_file, 6094 uint32_t dex_pc) 6095 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), 6096 field_info_(field, 6097 field_offset, 6098 field_type, 6099 is_volatile, 6100 field_idx, 6101 declaring_class_def_index, 6102 dex_file) { 6103 SetPackedFlag<kFlagValueCanBeNull>(true); 6104 SetRawInputAt(0, cls); 6105 SetRawInputAt(1, value); 6106 } 6107 6108 const FieldInfo& GetFieldInfo() const { return field_info_; } 6109 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); } 6110 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } 6111 bool IsVolatile() const { return field_info_.IsVolatile(); } 6112 6113 HInstruction* GetValue() const { return InputAt(1); } 6114 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } 6115 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); } 6116 6117 DECLARE_INSTRUCTION(StaticFieldSet); 6118 6119 private: 6120 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits; 6121 static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1; 6122 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits, 6123 "Too many packed fields."); 6124 6125 const FieldInfo field_info_; 6126 6127 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); 6128 }; 6129 6130 class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> { 6131 public: 6132 HUnresolvedInstanceFieldGet(HInstruction* obj, 6133 Primitive::Type field_type, 6134 uint32_t field_index, 6135 uint32_t dex_pc) 6136 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 6137 field_index_(field_index) { 6138 SetRawInputAt(0, obj); 6139 } 6140 6141 bool NeedsEnvironment() const OVERRIDE { return true; } 6142 bool CanThrow() const OVERRIDE { return true; } 6143 6144 Primitive::Type GetFieldType() const { return GetType(); } 6145 uint32_t GetFieldIndex() const { return field_index_; } 6146 6147 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet); 6148 6149 private: 6150 const uint32_t field_index_; 6151 6152 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet); 6153 }; 6154 6155 class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> { 6156 public: 6157 HUnresolvedInstanceFieldSet(HInstruction* obj, 6158 HInstruction* value, 6159 Primitive::Type field_type, 6160 uint32_t field_index, 6161 uint32_t dex_pc) 6162 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 6163 field_index_(field_index) { 6164 SetPackedField<FieldTypeField>(field_type); 6165 DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType())); 6166 SetRawInputAt(0, obj); 6167 SetRawInputAt(1, value); 6168 } 6169 6170 bool NeedsEnvironment() const OVERRIDE { return true; } 6171 bool CanThrow() const OVERRIDE { return true; } 6172 6173 Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); } 6174 uint32_t GetFieldIndex() const { return field_index_; } 6175 6176 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); 6177 6178 private: 6179 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits; 6180 static constexpr size_t kFieldFieldTypeSize = 6181 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 6182 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits = 6183 kFieldFieldType + kFieldFieldTypeSize; 6184 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits, 6185 "Too many packed fields."); 6186 using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>; 6187 6188 const uint32_t field_index_; 6189 6190 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); 6191 }; 6192 6193 class HUnresolvedStaticFieldGet FINAL : public HExpression<0> { 6194 public: 6195 HUnresolvedStaticFieldGet(Primitive::Type field_type, 6196 uint32_t field_index, 6197 uint32_t dex_pc) 6198 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc), 6199 field_index_(field_index) { 6200 } 6201 6202 bool NeedsEnvironment() const OVERRIDE { return true; } 6203 bool CanThrow() const OVERRIDE { return true; } 6204 6205 Primitive::Type GetFieldType() const { return GetType(); } 6206 uint32_t GetFieldIndex() const { return field_index_; } 6207 6208 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet); 6209 6210 private: 6211 const uint32_t field_index_; 6212 6213 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet); 6214 }; 6215 6216 class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> { 6217 public: 6218 HUnresolvedStaticFieldSet(HInstruction* value, 6219 Primitive::Type field_type, 6220 uint32_t field_index, 6221 uint32_t dex_pc) 6222 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), 6223 field_index_(field_index) { 6224 SetPackedField<FieldTypeField>(field_type); 6225 DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType())); 6226 SetRawInputAt(0, value); 6227 } 6228 6229 bool NeedsEnvironment() const OVERRIDE { return true; } 6230 bool CanThrow() const OVERRIDE { return true; } 6231 6232 Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); } 6233 uint32_t GetFieldIndex() const { return field_index_; } 6234 6235 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); 6236 6237 private: 6238 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits; 6239 static constexpr size_t kFieldFieldTypeSize = 6240 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); 6241 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits = 6242 kFieldFieldType + kFieldFieldTypeSize; 6243 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits, 6244 "Too many packed fields."); 6245 using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>; 6246 6247 const uint32_t field_index_; 6248 6249 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); 6250 }; 6251 6252 // Implement the move-exception DEX instruction. 6253 class HLoadException FINAL : public HExpression<0> { 6254 public: 6255 explicit HLoadException(uint32_t dex_pc = kNoDexPc) 6256 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {} 6257 6258 bool CanBeNull() const OVERRIDE { return false; } 6259 6260 DECLARE_INSTRUCTION(LoadException); 6261 6262 private: 6263 DISALLOW_COPY_AND_ASSIGN(HLoadException); 6264 }; 6265 6266 // Implicit part of move-exception which clears thread-local exception storage. 6267 // Must not be removed because the runtime expects the TLS to get cleared. 6268 class HClearException FINAL : public HTemplateInstruction<0> { 6269 public: 6270 explicit HClearException(uint32_t dex_pc = kNoDexPc) 6271 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {} 6272 6273 DECLARE_INSTRUCTION(ClearException); 6274 6275 private: 6276 DISALLOW_COPY_AND_ASSIGN(HClearException); 6277 }; 6278 6279 class HThrow FINAL : public HTemplateInstruction<1> { 6280 public: 6281 HThrow(HInstruction* exception, uint32_t dex_pc) 6282 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 6283 SetRawInputAt(0, exception); 6284 } 6285 6286 bool IsControlFlow() const OVERRIDE { return true; } 6287 6288 bool NeedsEnvironment() const OVERRIDE { return true; } 6289 6290 bool CanThrow() const OVERRIDE { return true; } 6291 6292 6293 DECLARE_INSTRUCTION(Throw); 6294 6295 private: 6296 DISALLOW_COPY_AND_ASSIGN(HThrow); 6297 }; 6298 6299 /** 6300 * Implementation strategies for the code generator of a HInstanceOf 6301 * or `HCheckCast`. 6302 */ 6303 enum class TypeCheckKind { 6304 kUnresolvedCheck, // Check against an unresolved type. 6305 kExactCheck, // Can do a single class compare. 6306 kClassHierarchyCheck, // Can just walk the super class chain. 6307 kAbstractClassCheck, // Can just walk the super class chain, starting one up. 6308 kInterfaceCheck, // No optimization yet when checking against an interface. 6309 kArrayObjectCheck, // Can just check if the array is not primitive. 6310 kArrayCheck, // No optimization yet when checking against a generic array. 6311 kLast = kArrayCheck 6312 }; 6313 6314 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs); 6315 6316 class HInstanceOf FINAL : public HExpression<2> { 6317 public: 6318 HInstanceOf(HInstruction* object, 6319 HLoadClass* constant, 6320 TypeCheckKind check_kind, 6321 uint32_t dex_pc) 6322 : HExpression(Primitive::kPrimBoolean, 6323 SideEffectsForArchRuntimeCalls(check_kind), 6324 dex_pc) { 6325 SetPackedField<TypeCheckKindField>(check_kind); 6326 SetPackedFlag<kFlagMustDoNullCheck>(true); 6327 SetRawInputAt(0, object); 6328 SetRawInputAt(1, constant); 6329 } 6330 6331 bool CanBeMoved() const OVERRIDE { return true; } 6332 6333 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 6334 return true; 6335 } 6336 6337 bool NeedsEnvironment() const OVERRIDE { 6338 return CanCallRuntime(GetTypeCheckKind()); 6339 } 6340 6341 // Used only in code generation. 6342 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); } 6343 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); } 6344 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); } 6345 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; } 6346 6347 static bool CanCallRuntime(TypeCheckKind check_kind) { 6348 // Mips currently does runtime calls for any other checks. 6349 return check_kind != TypeCheckKind::kExactCheck; 6350 } 6351 6352 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) { 6353 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None(); 6354 } 6355 6356 DECLARE_INSTRUCTION(InstanceOf); 6357 6358 private: 6359 static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits; 6360 static constexpr size_t kFieldTypeCheckKindSize = 6361 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast)); 6362 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize; 6363 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1; 6364 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 6365 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>; 6366 6367 DISALLOW_COPY_AND_ASSIGN(HInstanceOf); 6368 }; 6369 6370 class HBoundType FINAL : public HExpression<1> { 6371 public: 6372 explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) 6373 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), 6374 upper_bound_(ReferenceTypeInfo::CreateInvalid()) { 6375 SetPackedFlag<kFlagUpperCanBeNull>(true); 6376 SetPackedFlag<kFlagCanBeNull>(true); 6377 DCHECK_EQ(input->GetType(), Primitive::kPrimNot); 6378 SetRawInputAt(0, input); 6379 } 6380 6381 // {Get,Set}Upper* should only be used in reference type propagation. 6382 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } 6383 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); } 6384 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null); 6385 6386 void SetCanBeNull(bool can_be_null) { 6387 DCHECK(GetUpperCanBeNull() || !can_be_null); 6388 SetPackedFlag<kFlagCanBeNull>(can_be_null); 6389 } 6390 6391 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } 6392 6393 DECLARE_INSTRUCTION(BoundType); 6394 6395 private: 6396 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this 6397 // is false then CanBeNull() cannot be true). 6398 static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits; 6399 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1; 6400 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1; 6401 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 6402 6403 // Encodes the most upper class that this instruction can have. In other words 6404 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). 6405 // It is used to bound the type in cases like: 6406 // if (x instanceof ClassX) { 6407 // // uper_bound_ will be ClassX 6408 // } 6409 ReferenceTypeInfo upper_bound_; 6410 6411 DISALLOW_COPY_AND_ASSIGN(HBoundType); 6412 }; 6413 6414 class HCheckCast FINAL : public HTemplateInstruction<2> { 6415 public: 6416 HCheckCast(HInstruction* object, 6417 HLoadClass* constant, 6418 TypeCheckKind check_kind, 6419 uint32_t dex_pc) 6420 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { 6421 SetPackedField<TypeCheckKindField>(check_kind); 6422 SetPackedFlag<kFlagMustDoNullCheck>(true); 6423 SetRawInputAt(0, object); 6424 SetRawInputAt(1, constant); 6425 } 6426 6427 bool CanBeMoved() const OVERRIDE { return true; } 6428 6429 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 6430 return true; 6431 } 6432 6433 bool NeedsEnvironment() const OVERRIDE { 6434 // Instruction may throw a CheckCastError. 6435 return true; 6436 } 6437 6438 bool CanThrow() const OVERRIDE { return true; } 6439 6440 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); } 6441 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); } 6442 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); } 6443 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; } 6444 6445 DECLARE_INSTRUCTION(CheckCast); 6446 6447 private: 6448 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits; 6449 static constexpr size_t kFieldTypeCheckKindSize = 6450 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast)); 6451 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize; 6452 static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1; 6453 static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); 6454 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>; 6455 6456 DISALLOW_COPY_AND_ASSIGN(HCheckCast); 6457 }; 6458 6459 /** 6460 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers"). 6461 * @details We define the combined barrier types that are actually required 6462 * by the Java Memory Model, rather than using exactly the terminology from 6463 * the JSR-133 cookbook. These should, in many cases, be replaced by acquire/release 6464 * primitives. Note that the JSR-133 cookbook generally does not deal with 6465 * store atomicity issues, and the recipes there are not always entirely sufficient. 6466 * The current recipe is as follows: 6467 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store. 6468 * -# Use AnyAny barrier after volatile store. (StoreLoad is as expensive.) 6469 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load. 6470 * -# Use StoreStore barrier after all stores but before return from any constructor whose 6471 * class has final fields. 6472 * -# Use NTStoreStore to order non-temporal stores with respect to all later 6473 * store-to-memory instructions. Only generated together with non-temporal stores. 6474 */ 6475 enum MemBarrierKind { 6476 kAnyStore, 6477 kLoadAny, 6478 kStoreStore, 6479 kAnyAny, 6480 kNTStoreStore, 6481 kLastBarrierKind = kNTStoreStore 6482 }; 6483 std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind); 6484 6485 class HMemoryBarrier FINAL : public HTemplateInstruction<0> { 6486 public: 6487 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) 6488 : HTemplateInstruction( 6489 SideEffects::AllWritesAndReads(), dex_pc) { // Assume write/read on all fields/arrays. 6490 SetPackedField<BarrierKindField>(barrier_kind); 6491 } 6492 6493 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); } 6494 6495 DECLARE_INSTRUCTION(MemoryBarrier); 6496 6497 private: 6498 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits; 6499 static constexpr size_t kFieldBarrierKindSize = 6500 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind)); 6501 static constexpr size_t kNumberOfMemoryBarrierPackedBits = 6502 kFieldBarrierKind + kFieldBarrierKindSize; 6503 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits, 6504 "Too many packed fields."); 6505 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>; 6506 6507 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); 6508 }; 6509 6510 // A constructor fence orders all prior stores to fields that could be accessed via a final field of 6511 // the specified object(s), with respect to any subsequent store that might "publish" 6512 // (i.e. make visible) the specified object to another thread. 6513 // 6514 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens 6515 // for all final fields (that were set) at the end of the invoked constructor. 6516 // 6517 // The constructor fence models the freeze actions for the final fields of an object 6518 // being constructed (semantically at the end of the constructor). Constructor fences 6519 // have a per-object affinity; two separate objects being constructed get two separate 6520 // constructor fences. 6521 // 6522 // (Note: that if calling a super-constructor or forwarding to another constructor, 6523 // the freezes would happen at the end of *that* constructor being invoked). 6524 // 6525 // The memory model guarantees that when the object being constructed is "published" after 6526 // constructor completion (i.e. escapes the current thread via a store), then any final field 6527 // writes must be observable on other threads (once they observe that publication). 6528 // 6529 // Further, anything written before the freeze, and read by dereferencing through the final field, 6530 // must also be visible (so final object field could itself have an object with non-final fields; 6531 // yet the freeze must also extend to them). 6532 // 6533 // Constructor example: 6534 // 6535 // class HasFinal { 6536 // final int field; Optimizing IR for <init>()V: 6537 // HasFinal() { 6538 // field = 123; HInstanceFieldSet(this, HasFinal.field, 123) 6539 // // freeze(this.field); HConstructorFence(this) 6540 // } HReturn 6541 // } 6542 // 6543 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of 6544 // already-initialized classes; in that case the allocation must act as a "default-initializer" 6545 // of the object which effectively writes the class pointer "final field". 6546 // 6547 // For example, we can model default-initialiation as roughly the equivalent of the following: 6548 // 6549 // class Object { 6550 // private final Class header; 6551 // } 6552 // 6553 // Java code: Optimizing IR: 6554 // 6555 // T new_instance<T>() { 6556 // Object obj = allocate_memory(T.class.size); obj = HInvoke(art_quick_alloc_object, T) 6557 // obj.header = T.class; // header write is done by above call. 6558 // // freeze(obj.header) HConstructorFence(obj) 6559 // return (T)obj; 6560 // } 6561 // 6562 // See also: 6563 // * CompilerDriver::RequiresConstructorBarrier 6564 // * QuasiAtomic::ThreadFenceForConstructor 6565 // 6566 class HConstructorFence FINAL : public HVariableInputSizeInstruction { 6567 // A fence has variable inputs because the inputs can be removed 6568 // after prepare_for_register_allocation phase. 6569 // (TODO: In the future a fence could freeze multiple objects 6570 // after merging two fences together.) 6571 public: 6572 // `fence_object` is the reference that needs to be protected for correct publication. 6573 // 6574 // It makes sense in the following situations: 6575 // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true). 6576 // * new-instance-like instructions, it's the return value (i.e. HNewInstance). 6577 // 6578 // After construction the `fence_object` becomes the 0th input. 6579 // This is not an input in a real sense, but just a convenient place to stash the information 6580 // about the associated object. 6581 HConstructorFence(HInstruction* fence_object, 6582 uint32_t dex_pc, 6583 ArenaAllocator* arena) 6584 // We strongly suspect there is not a more accurate way to describe the fine-grained reordering 6585 // constraints described in the class header. We claim that these SideEffects constraints 6586 // enforce a superset of the real constraints. 6587 // 6588 // The ordering described above is conservatively modeled with SideEffects as follows: 6589 // 6590 // * To prevent reordering of the publication stores: 6591 // ----> "Reads of objects" is the initial SideEffect. 6592 // * For every primitive final field store in the constructor: 6593 // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect. 6594 // * If there are any stores to reference final fields in the constructor: 6595 // ----> Use a more conservative "AllReads" SideEffect because any stores to any references 6596 // that are reachable from `fence_object` also need to be prevented for reordering 6597 // (and we do not want to do alias analysis to figure out what those stores are). 6598 // 6599 // In the implementation, this initially starts out as an "all reads" side effect; this is an 6600 // even more conservative approach than the one described above, and prevents all of the 6601 // above reordering without analyzing any of the instructions in the constructor. 6602 // 6603 // If in a later phase we discover that there are no writes to reference final fields, 6604 // we can refine the side effect to a smaller set of type reads (see above constraints). 6605 : HVariableInputSizeInstruction(SideEffects::AllReads(), 6606 dex_pc, 6607 arena, 6608 /* number_of_inputs */ 1, 6609 kArenaAllocConstructorFenceInputs) { 6610 DCHECK(fence_object != nullptr); 6611 SetRawInputAt(0, fence_object); 6612 } 6613 6614 // The object associated with this constructor fence. 6615 // 6616 // (Note: This will be null after the prepare_for_register_allocation phase, 6617 // as all constructor fence inputs are removed there). 6618 HInstruction* GetFenceObject() const { 6619 return InputAt(0); 6620 } 6621 6622 // Find all the HConstructorFence uses (`fence_use`) for `this` and: 6623 // - Delete `fence_use` from `this`'s use list. 6624 // - Delete `this` from `fence_use`'s inputs list. 6625 // - If the `fence_use` is dead, remove it from the graph. 6626 // 6627 // A fence is considered dead once it no longer has any uses 6628 // and all of the inputs are dead. 6629 // 6630 // This must *not* be called during/after prepare_for_register_allocation, 6631 // because that removes all the inputs to the fences but the fence is actually 6632 // still considered live. 6633 static void RemoveConstructorFences(HInstruction* instruction); 6634 6635 // Check if this constructor fence is protecting 6636 // an HNewInstance or HNewArray that is also the immediate 6637 // predecessor of `this`. 6638 // 6639 // Returns the associated HNewArray or HNewInstance, 6640 // or null otherwise. 6641 HInstruction* GetAssociatedAllocation(); 6642 6643 DECLARE_INSTRUCTION(ConstructorFence); 6644 6645 private: 6646 DISALLOW_COPY_AND_ASSIGN(HConstructorFence); 6647 }; 6648 6649 class HMonitorOperation FINAL : public HTemplateInstruction<1> { 6650 public: 6651 enum class OperationKind { 6652 kEnter, 6653 kExit, 6654 kLast = kExit 6655 }; 6656 6657 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) 6658 : HTemplateInstruction( 6659 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays. 6660 dex_pc) { 6661 SetPackedField<OperationKindField>(kind); 6662 SetRawInputAt(0, object); 6663 } 6664 6665 // Instruction may go into runtime, so we need an environment. 6666 bool NeedsEnvironment() const OVERRIDE { return true; } 6667 6668 bool CanThrow() const OVERRIDE { 6669 // Verifier guarantees that monitor-exit cannot throw. 6670 // This is important because it allows the HGraphBuilder to remove 6671 // a dead throw-catch loop generated for `synchronized` blocks/methods. 6672 return IsEnter(); 6673 } 6674 6675 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); } 6676 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; } 6677 6678 DECLARE_INSTRUCTION(MonitorOperation); 6679 6680 private: 6681 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits; 6682 static constexpr size_t kFieldOperationKindSize = 6683 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast)); 6684 static constexpr size_t kNumberOfMonitorOperationPackedBits = 6685 kFieldOperationKind + kFieldOperationKindSize; 6686 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits, 6687 "Too many packed fields."); 6688 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>; 6689 6690 private: 6691 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); 6692 }; 6693 6694 class HSelect FINAL : public HExpression<3> { 6695 public: 6696 HSelect(HInstruction* condition, 6697 HInstruction* true_value, 6698 HInstruction* false_value, 6699 uint32_t dex_pc) 6700 : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) { 6701 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType())); 6702 6703 // First input must be `true_value` or `false_value` to allow codegens to 6704 // use the SameAsFirstInput allocation policy. We make it `false_value`, so 6705 // that architectures which implement HSelect as a conditional move also 6706 // will not need to invert the condition. 6707 SetRawInputAt(0, false_value); 6708 SetRawInputAt(1, true_value); 6709 SetRawInputAt(2, condition); 6710 } 6711 6712 HInstruction* GetFalseValue() const { return InputAt(0); } 6713 HInstruction* GetTrueValue() const { return InputAt(1); } 6714 HInstruction* GetCondition() const { return InputAt(2); } 6715 6716 bool CanBeMoved() const OVERRIDE { return true; } 6717 bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { 6718 return true; 6719 } 6720 6721 bool CanBeNull() const OVERRIDE { 6722 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull(); 6723 } 6724 6725 DECLARE_INSTRUCTION(Select); 6726 6727 private: 6728 DISALLOW_COPY_AND_ASSIGN(HSelect); 6729 }; 6730 6731 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> { 6732 public: 6733 MoveOperands(Location source, 6734 Location destination, 6735 Primitive::Type type, 6736 HInstruction* instruction) 6737 : source_(source), destination_(destination), type_(type), instruction_(instruction) {} 6738 6739 Location GetSource() const { return source_; } 6740 Location GetDestination() const { return destination_; } 6741 6742 void SetSource(Location value) { source_ = value; } 6743 void SetDestination(Location value) { destination_ = value; } 6744 6745 // The parallel move resolver marks moves as "in-progress" by clearing the 6746 // destination (but not the source). 6747 Location MarkPending() { 6748 DCHECK(!IsPending()); 6749 Location dest = destination_; 6750 destination_ = Location::NoLocation(); 6751 return dest; 6752 } 6753 6754 void ClearPending(Location dest) { 6755 DCHECK(IsPending()); 6756 destination_ = dest; 6757 } 6758 6759 bool IsPending() const { 6760 DCHECK(source_.IsValid() || destination_.IsInvalid()); 6761 return destination_.IsInvalid() && source_.IsValid(); 6762 } 6763 6764 // True if this blocks a move from the given location. 6765 bool Blocks(Location loc) const { 6766 return !IsEliminated() && source_.OverlapsWith(loc); 6767 } 6768 6769 // A move is redundant if it's been eliminated, if its source and 6770 // destination are the same, or if its destination is unneeded. 6771 bool IsRedundant() const { 6772 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_); 6773 } 6774 6775 // We clear both operands to indicate move that's been eliminated. 6776 void Eliminate() { 6777 source_ = destination_ = Location::NoLocation(); 6778 } 6779 6780 bool IsEliminated() const { 6781 DCHECK(!source_.IsInvalid() || destination_.IsInvalid()); 6782 return source_.IsInvalid(); 6783 } 6784 6785 Primitive::Type GetType() const { return type_; } 6786 6787 bool Is64BitMove() const { 6788 return Primitive::Is64BitType(type_); 6789 } 6790 6791 HInstruction* GetInstruction() const { return instruction_; } 6792 6793 private: 6794 Location source_; 6795 Location destination_; 6796 // The type this move is for. 6797 Primitive::Type type_; 6798 // The instruction this move is assocatied with. Null when this move is 6799 // for moving an input in the expected locations of user (including a phi user). 6800 // This is only used in debug mode, to ensure we do not connect interval siblings 6801 // in the same parallel move. 6802 HInstruction* instruction_; 6803 }; 6804 6805 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs); 6806 6807 static constexpr size_t kDefaultNumberOfMoves = 4; 6808 6809 class HParallelMove FINAL : public HTemplateInstruction<0> { 6810 public: 6811 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc) 6812 : HTemplateInstruction(SideEffects::None(), dex_pc), 6813 moves_(arena->Adapter(kArenaAllocMoveOperands)) { 6814 moves_.reserve(kDefaultNumberOfMoves); 6815 } 6816 6817 void AddMove(Location source, 6818 Location destination, 6819 Primitive::Type type, 6820 HInstruction* instruction) { 6821 DCHECK(source.IsValid()); 6822 DCHECK(destination.IsValid()); 6823 if (kIsDebugBuild) { 6824 if (instruction != nullptr) { 6825 for (const MoveOperands& move : moves_) { 6826 if (move.GetInstruction() == instruction) { 6827 // Special case the situation where the move is for the spill slot 6828 // of the instruction. 6829 if ((GetPrevious() == instruction) 6830 || ((GetPrevious() == nullptr) 6831 && instruction->IsPhi() 6832 && instruction->GetBlock() == GetBlock())) { 6833 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind()) 6834 << "Doing parallel moves for the same instruction."; 6835 } else { 6836 DCHECK(false) << "Doing parallel moves for the same instruction."; 6837 } 6838 } 6839 } 6840 } 6841 for (const MoveOperands& move : moves_) { 6842 DCHECK(!destination.OverlapsWith(move.GetDestination())) 6843 << "Overlapped destination for two moves in a parallel move: " 6844 << move.GetSource() << " ==> " << move.GetDestination() << " and " 6845 << source << " ==> " << destination; 6846 } 6847 } 6848 moves_.emplace_back(source, destination, type, instruction); 6849 } 6850 6851 MoveOperands* MoveOperandsAt(size_t index) { 6852 return &moves_[index]; 6853 } 6854 6855 size_t NumMoves() const { return moves_.size(); } 6856 6857 DECLARE_INSTRUCTION(ParallelMove); 6858 6859 private: 6860 ArenaVector<MoveOperands> moves_; 6861 6862 DISALLOW_COPY_AND_ASSIGN(HParallelMove); 6863 }; 6864 6865 } // namespace art 6866 6867 #include "nodes_vector.h" 6868 6869 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64) 6870 #include "nodes_shared.h" 6871 #endif 6872 #ifdef ART_ENABLE_CODEGEN_mips 6873 #include "nodes_mips.h" 6874 #endif 6875 #ifdef ART_ENABLE_CODEGEN_x86 6876 #include "nodes_x86.h" 6877 #endif 6878 6879 namespace art { 6880 6881 class HGraphVisitor : public ValueObject { 6882 public: 6883 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {} 6884 virtual ~HGraphVisitor() {} 6885 6886 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} 6887 virtual void VisitBasicBlock(HBasicBlock* block); 6888 6889 // Visit the graph following basic block insertion order. 6890 void VisitInsertionOrder(); 6891 6892 // Visit the graph following dominator tree reverse post-order. 6893 void VisitReversePostOrder(); 6894 6895 HGraph* GetGraph() const { return graph_; } 6896 6897 // Visit functions for instruction classes. 6898 #define DECLARE_VISIT_INSTRUCTION(name, super) \ 6899 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); } 6900 6901 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 6902 6903 #undef DECLARE_VISIT_INSTRUCTION 6904 6905 private: 6906 HGraph* const graph_; 6907 6908 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor); 6909 }; 6910 6911 class HGraphDelegateVisitor : public HGraphVisitor { 6912 public: 6913 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {} 6914 virtual ~HGraphDelegateVisitor() {} 6915 6916 // Visit functions that delegate to to super class. 6917 #define DECLARE_VISIT_INSTRUCTION(name, super) \ 6918 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); } 6919 6920 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION) 6921 6922 #undef DECLARE_VISIT_INSTRUCTION 6923 6924 private: 6925 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); 6926 }; 6927 6928 // Iterator over the blocks that art part of the loop. Includes blocks part 6929 // of an inner loop. The order in which the blocks are iterated is on their 6930 // block id. 6931 class HBlocksInLoopIterator : public ValueObject { 6932 public: 6933 explicit HBlocksInLoopIterator(const HLoopInformation& info) 6934 : blocks_in_loop_(info.GetBlocks()), 6935 blocks_(info.GetHeader()->GetGraph()->GetBlocks()), 6936 index_(0) { 6937 if (!blocks_in_loop_.IsBitSet(index_)) { 6938 Advance(); 6939 } 6940 } 6941 6942 bool Done() const { return index_ == blocks_.size(); } 6943 HBasicBlock* Current() const { return blocks_[index_]; } 6944 void Advance() { 6945 ++index_; 6946 for (size_t e = blocks_.size(); index_ < e; ++index_) { 6947 if (blocks_in_loop_.IsBitSet(index_)) { 6948 break; 6949 } 6950 } 6951 } 6952 6953 private: 6954 const BitVector& blocks_in_loop_; 6955 const ArenaVector<HBasicBlock*>& blocks_; 6956 size_t index_; 6957 6958 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator); 6959 }; 6960 6961 // Iterator over the blocks that art part of the loop. Includes blocks part 6962 // of an inner loop. The order in which the blocks are iterated is reverse 6963 // post order. 6964 class HBlocksInLoopReversePostOrderIterator : public ValueObject { 6965 public: 6966 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info) 6967 : blocks_in_loop_(info.GetBlocks()), 6968 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()), 6969 index_(0) { 6970 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 6971 Advance(); 6972 } 6973 } 6974 6975 bool Done() const { return index_ == blocks_.size(); } 6976 HBasicBlock* Current() const { return blocks_[index_]; } 6977 void Advance() { 6978 ++index_; 6979 for (size_t e = blocks_.size(); index_ < e; ++index_) { 6980 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) { 6981 break; 6982 } 6983 } 6984 } 6985 6986 private: 6987 const BitVector& blocks_in_loop_; 6988 const ArenaVector<HBasicBlock*>& blocks_; 6989 size_t index_; 6990 6991 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator); 6992 }; 6993 6994 // Returns int64_t value of a properly typed constant. 6995 inline int64_t Int64FromConstant(HConstant* constant) { 6996 if (constant->IsIntConstant()) { 6997 return constant->AsIntConstant()->GetValue(); 6998 } else if (constant->IsLongConstant()) { 6999 return constant->AsLongConstant()->GetValue(); 7000 } else { 7001 DCHECK(constant->IsNullConstant()) << constant->DebugName(); 7002 return 0; 7003 } 7004 } 7005 7006 // Returns true iff instruction is an integral constant (and sets value on success). 7007 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) { 7008 if (instruction->IsIntConstant()) { 7009 *value = instruction->AsIntConstant()->GetValue(); 7010 return true; 7011 } else if (instruction->IsLongConstant()) { 7012 *value = instruction->AsLongConstant()->GetValue(); 7013 return true; 7014 } else if (instruction->IsNullConstant()) { 7015 *value = 0; 7016 return true; 7017 } 7018 return false; 7019 } 7020 7021 #define INSTRUCTION_TYPE_CHECK(type, super) \ 7022 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \ 7023 inline const H##type* HInstruction::As##type() const { \ 7024 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \ 7025 } \ 7026 inline H##type* HInstruction::As##type() { \ 7027 return Is##type() ? static_cast<H##type*>(this) : nullptr; \ 7028 } 7029 7030 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK) 7031 #undef INSTRUCTION_TYPE_CHECK 7032 7033 // Create space in `blocks` for adding `number_of_new_blocks` entries 7034 // starting at location `at`. Blocks after `at` are moved accordingly. 7035 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks, 7036 size_t number_of_new_blocks, 7037 size_t after) { 7038 DCHECK_LT(after, blocks->size()); 7039 size_t old_size = blocks->size(); 7040 size_t new_size = old_size + number_of_new_blocks; 7041 blocks->resize(new_size); 7042 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end()); 7043 } 7044 7045 /* 7046 * Hunt "under the hood" of array lengths (leading to array references), 7047 * null checks (also leading to array references), and new arrays 7048 * (leading to the actual length). This makes it more likely related 7049 * instructions become actually comparable. 7050 */ 7051 inline HInstruction* HuntForDeclaration(HInstruction* instruction) { 7052 while (instruction->IsArrayLength() || 7053 instruction->IsNullCheck() || 7054 instruction->IsNewArray()) { 7055 instruction = instruction->IsNewArray() 7056 ? instruction->AsNewArray()->GetLength() 7057 : instruction->InputAt(0); 7058 } 7059 return instruction; 7060 } 7061 7062 } // namespace art 7063 7064 #endif // ART_COMPILER_OPTIMIZING_NODES_H_ 7065