1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #include "nodes.h" 17 18 #include <cfloat> 19 20 #include "art_method-inl.h" 21 #include "class_linker-inl.h" 22 #include "code_generator.h" 23 #include "common_dominator.h" 24 #include "ssa_builder.h" 25 #include "base/bit_vector-inl.h" 26 #include "base/bit_utils.h" 27 #include "base/stl_util.h" 28 #include "intrinsics.h" 29 #include "mirror/class-inl.h" 30 #include "scoped_thread_state_change-inl.h" 31 32 namespace art { 33 34 // Enable floating-point static evaluation during constant folding 35 // only if all floating-point operations and constants evaluate in the 36 // range and precision of the type used (i.e., 32-bit float, 64-bit 37 // double). 38 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0); 39 40 void HGraph::InitializeInexactObjectRTI(VariableSizedHandleScope* handles) { 41 ScopedObjectAccess soa(Thread::Current()); 42 // Create the inexact Object reference type and store it in the HGraph. 43 ClassLinker* linker = Runtime::Current()->GetClassLinker(); 44 inexact_object_rti_ = ReferenceTypeInfo::Create( 45 handles->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangObject)), 46 /* is_exact */ false); 47 } 48 49 void HGraph::AddBlock(HBasicBlock* block) { 50 block->SetBlockId(blocks_.size()); 51 blocks_.push_back(block); 52 } 53 54 void HGraph::FindBackEdges(ArenaBitVector* visited) { 55 // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks. 56 DCHECK_EQ(visited->GetHighestBitSet(), -1); 57 58 // Nodes that we're currently visiting, indexed by block id. 59 ArenaBitVector visiting(arena_, blocks_.size(), false, kArenaAllocGraphBuilder); 60 // Number of successors visited from a given node, indexed by block id. 61 ArenaVector<size_t> successors_visited(blocks_.size(), 62 0u, 63 arena_->Adapter(kArenaAllocGraphBuilder)); 64 // Stack of nodes that we're currently visiting (same as marked in "visiting" above). 65 ArenaVector<HBasicBlock*> worklist(arena_->Adapter(kArenaAllocGraphBuilder)); 66 constexpr size_t kDefaultWorklistSize = 8; 67 worklist.reserve(kDefaultWorklistSize); 68 visited->SetBit(entry_block_->GetBlockId()); 69 visiting.SetBit(entry_block_->GetBlockId()); 70 worklist.push_back(entry_block_); 71 72 while (!worklist.empty()) { 73 HBasicBlock* current = worklist.back(); 74 uint32_t current_id = current->GetBlockId(); 75 if (successors_visited[current_id] == current->GetSuccessors().size()) { 76 visiting.ClearBit(current_id); 77 worklist.pop_back(); 78 } else { 79 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++]; 80 uint32_t successor_id = successor->GetBlockId(); 81 if (visiting.IsBitSet(successor_id)) { 82 DCHECK(ContainsElement(worklist, successor)); 83 successor->AddBackEdge(current); 84 } else if (!visited->IsBitSet(successor_id)) { 85 visited->SetBit(successor_id); 86 visiting.SetBit(successor_id); 87 worklist.push_back(successor); 88 } 89 } 90 } 91 } 92 93 static void RemoveEnvironmentUses(HInstruction* instruction) { 94 for (HEnvironment* environment = instruction->GetEnvironment(); 95 environment != nullptr; 96 environment = environment->GetParent()) { 97 for (size_t i = 0, e = environment->Size(); i < e; ++i) { 98 if (environment->GetInstructionAt(i) != nullptr) { 99 environment->RemoveAsUserOfInput(i); 100 } 101 } 102 } 103 } 104 105 static void RemoveAsUser(HInstruction* instruction) { 106 instruction->RemoveAsUserOfAllInputs(); 107 RemoveEnvironmentUses(instruction); 108 } 109 110 void HGraph::RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const { 111 for (size_t i = 0; i < blocks_.size(); ++i) { 112 if (!visited.IsBitSet(i)) { 113 HBasicBlock* block = blocks_[i]; 114 if (block == nullptr) continue; 115 DCHECK(block->GetPhis().IsEmpty()) << "Phis are not inserted at this stage"; 116 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { 117 RemoveAsUser(it.Current()); 118 } 119 } 120 } 121 } 122 123 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) { 124 for (size_t i = 0; i < blocks_.size(); ++i) { 125 if (!visited.IsBitSet(i)) { 126 HBasicBlock* block = blocks_[i]; 127 if (block == nullptr) continue; 128 // We only need to update the successor, which might be live. 129 for (HBasicBlock* successor : block->GetSuccessors()) { 130 successor->RemovePredecessor(block); 131 } 132 // Remove the block from the list of blocks, so that further analyses 133 // never see it. 134 blocks_[i] = nullptr; 135 if (block->IsExitBlock()) { 136 SetExitBlock(nullptr); 137 } 138 // Mark the block as removed. This is used by the HGraphBuilder to discard 139 // the block as a branch target. 140 block->SetGraph(nullptr); 141 } 142 } 143 } 144 145 GraphAnalysisResult HGraph::BuildDominatorTree() { 146 ArenaBitVector visited(arena_, blocks_.size(), false, kArenaAllocGraphBuilder); 147 148 // (1) Find the back edges in the graph doing a DFS traversal. 149 FindBackEdges(&visited); 150 151 // (2) Remove instructions and phis from blocks not visited during 152 // the initial DFS as users from other instructions, so that 153 // users can be safely removed before uses later. 154 RemoveInstructionsAsUsersFromDeadBlocks(visited); 155 156 // (3) Remove blocks not visited during the initial DFS. 157 // Step (5) requires dead blocks to be removed from the 158 // predecessors list of live blocks. 159 RemoveDeadBlocks(visited); 160 161 // (4) Simplify the CFG now, so that we don't need to recompute 162 // dominators and the reverse post order. 163 SimplifyCFG(); 164 165 // (5) Compute the dominance information and the reverse post order. 166 ComputeDominanceInformation(); 167 168 // (6) Analyze loops discovered through back edge analysis, and 169 // set the loop information on each block. 170 GraphAnalysisResult result = AnalyzeLoops(); 171 if (result != kAnalysisSuccess) { 172 return result; 173 } 174 175 // (7) Precompute per-block try membership before entering the SSA builder, 176 // which needs the information to build catch block phis from values of 177 // locals at throwing instructions inside try blocks. 178 ComputeTryBlockInformation(); 179 180 return kAnalysisSuccess; 181 } 182 183 void HGraph::ClearDominanceInformation() { 184 for (HBasicBlock* block : GetReversePostOrder()) { 185 block->ClearDominanceInformation(); 186 } 187 reverse_post_order_.clear(); 188 } 189 190 void HGraph::ClearLoopInformation() { 191 SetHasIrreducibleLoops(false); 192 for (HBasicBlock* block : GetReversePostOrder()) { 193 block->SetLoopInformation(nullptr); 194 } 195 } 196 197 void HBasicBlock::ClearDominanceInformation() { 198 dominated_blocks_.clear(); 199 dominator_ = nullptr; 200 } 201 202 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const { 203 HInstruction* instruction = GetFirstInstruction(); 204 while (instruction->IsParallelMove()) { 205 instruction = instruction->GetNext(); 206 } 207 return instruction; 208 } 209 210 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) { 211 DCHECK(ContainsElement(block->GetSuccessors(), successor)); 212 213 HBasicBlock* old_dominator = successor->GetDominator(); 214 HBasicBlock* new_dominator = 215 (old_dominator == nullptr) ? block 216 : CommonDominator::ForPair(old_dominator, block); 217 218 if (old_dominator == new_dominator) { 219 return false; 220 } else { 221 successor->SetDominator(new_dominator); 222 return true; 223 } 224 } 225 226 void HGraph::ComputeDominanceInformation() { 227 DCHECK(reverse_post_order_.empty()); 228 reverse_post_order_.reserve(blocks_.size()); 229 reverse_post_order_.push_back(entry_block_); 230 231 // Number of visits of a given node, indexed by block id. 232 ArenaVector<size_t> visits(blocks_.size(), 0u, arena_->Adapter(kArenaAllocGraphBuilder)); 233 // Number of successors visited from a given node, indexed by block id. 234 ArenaVector<size_t> successors_visited(blocks_.size(), 235 0u, 236 arena_->Adapter(kArenaAllocGraphBuilder)); 237 // Nodes for which we need to visit successors. 238 ArenaVector<HBasicBlock*> worklist(arena_->Adapter(kArenaAllocGraphBuilder)); 239 constexpr size_t kDefaultWorklistSize = 8; 240 worklist.reserve(kDefaultWorklistSize); 241 worklist.push_back(entry_block_); 242 243 while (!worklist.empty()) { 244 HBasicBlock* current = worklist.back(); 245 uint32_t current_id = current->GetBlockId(); 246 if (successors_visited[current_id] == current->GetSuccessors().size()) { 247 worklist.pop_back(); 248 } else { 249 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++]; 250 UpdateDominatorOfSuccessor(current, successor); 251 252 // Once all the forward edges have been visited, we know the immediate 253 // dominator of the block. We can then start visiting its successors. 254 if (++visits[successor->GetBlockId()] == 255 successor->GetPredecessors().size() - successor->NumberOfBackEdges()) { 256 reverse_post_order_.push_back(successor); 257 worklist.push_back(successor); 258 } 259 } 260 } 261 262 // Check if the graph has back edges not dominated by their respective headers. 263 // If so, we need to update the dominators of those headers and recursively of 264 // their successors. We do that with a fix-point iteration over all blocks. 265 // The algorithm is guaranteed to terminate because it loops only if the sum 266 // of all dominator chains has decreased in the current iteration. 267 bool must_run_fix_point = false; 268 for (HBasicBlock* block : blocks_) { 269 if (block != nullptr && 270 block->IsLoopHeader() && 271 block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) { 272 must_run_fix_point = true; 273 break; 274 } 275 } 276 if (must_run_fix_point) { 277 bool update_occurred = true; 278 while (update_occurred) { 279 update_occurred = false; 280 for (HBasicBlock* block : GetReversePostOrder()) { 281 for (HBasicBlock* successor : block->GetSuccessors()) { 282 update_occurred |= UpdateDominatorOfSuccessor(block, successor); 283 } 284 } 285 } 286 } 287 288 // Make sure that there are no remaining blocks whose dominator information 289 // needs to be updated. 290 if (kIsDebugBuild) { 291 for (HBasicBlock* block : GetReversePostOrder()) { 292 for (HBasicBlock* successor : block->GetSuccessors()) { 293 DCHECK(!UpdateDominatorOfSuccessor(block, successor)); 294 } 295 } 296 } 297 298 // Populate `dominated_blocks_` information after computing all dominators. 299 // The potential presence of irreducible loops requires to do it after. 300 for (HBasicBlock* block : GetReversePostOrder()) { 301 if (!block->IsEntryBlock()) { 302 block->GetDominator()->AddDominatedBlock(block); 303 } 304 } 305 } 306 307 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) { 308 HBasicBlock* new_block = new (arena_) HBasicBlock(this, successor->GetDexPc()); 309 AddBlock(new_block); 310 // Use `InsertBetween` to ensure the predecessor index and successor index of 311 // `block` and `successor` are preserved. 312 new_block->InsertBetween(block, successor); 313 return new_block; 314 } 315 316 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) { 317 // Insert a new node between `block` and `successor` to split the 318 // critical edge. 319 HBasicBlock* new_block = SplitEdge(block, successor); 320 new_block->AddInstruction(new (arena_) HGoto(successor->GetDexPc())); 321 if (successor->IsLoopHeader()) { 322 // If we split at a back edge boundary, make the new block the back edge. 323 HLoopInformation* info = successor->GetLoopInformation(); 324 if (info->IsBackEdge(*block)) { 325 info->RemoveBackEdge(block); 326 info->AddBackEdge(new_block); 327 } 328 } 329 } 330 331 void HGraph::SimplifyLoop(HBasicBlock* header) { 332 HLoopInformation* info = header->GetLoopInformation(); 333 334 // Make sure the loop has only one pre header. This simplifies SSA building by having 335 // to just look at the pre header to know which locals are initialized at entry of the 336 // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining 337 // this graph. 338 size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges(); 339 if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) { 340 HBasicBlock* pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 341 AddBlock(pre_header); 342 pre_header->AddInstruction(new (arena_) HGoto(header->GetDexPc())); 343 344 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) { 345 HBasicBlock* predecessor = header->GetPredecessors()[pred]; 346 if (!info->IsBackEdge(*predecessor)) { 347 predecessor->ReplaceSuccessor(header, pre_header); 348 pred--; 349 } 350 } 351 pre_header->AddSuccessor(header); 352 } 353 354 // Make sure the first predecessor of a loop header is the incoming block. 355 if (info->IsBackEdge(*header->GetPredecessors()[0])) { 356 HBasicBlock* to_swap = header->GetPredecessors()[0]; 357 for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) { 358 HBasicBlock* predecessor = header->GetPredecessors()[pred]; 359 if (!info->IsBackEdge(*predecessor)) { 360 header->predecessors_[pred] = to_swap; 361 header->predecessors_[0] = predecessor; 362 break; 363 } 364 } 365 } 366 367 HInstruction* first_instruction = header->GetFirstInstruction(); 368 if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) { 369 // Called from DeadBlockElimination. Update SuspendCheck pointer. 370 info->SetSuspendCheck(first_instruction->AsSuspendCheck()); 371 } 372 } 373 374 void HGraph::ComputeTryBlockInformation() { 375 // Iterate in reverse post order to propagate try membership information from 376 // predecessors to their successors. 377 for (HBasicBlock* block : GetReversePostOrder()) { 378 if (block->IsEntryBlock() || block->IsCatchBlock()) { 379 // Catch blocks after simplification have only exceptional predecessors 380 // and hence are never in tries. 381 continue; 382 } 383 384 // Infer try membership from the first predecessor. Having simplified loops, 385 // the first predecessor can never be a back edge and therefore it must have 386 // been visited already and had its try membership set. 387 HBasicBlock* first_predecessor = block->GetPredecessors()[0]; 388 DCHECK(!block->IsLoopHeader() || !block->GetLoopInformation()->IsBackEdge(*first_predecessor)); 389 const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors(); 390 if (try_entry != nullptr && 391 (block->GetTryCatchInformation() == nullptr || 392 try_entry != &block->GetTryCatchInformation()->GetTryEntry())) { 393 // We are either setting try block membership for the first time or it 394 // has changed. 395 block->SetTryCatchInformation(new (arena_) TryCatchInformation(*try_entry)); 396 } 397 } 398 } 399 400 void HGraph::SimplifyCFG() { 401 // Simplify the CFG for future analysis, and code generation: 402 // (1): Split critical edges. 403 // (2): Simplify loops by having only one preheader. 404 // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators 405 // can be invalidated. We remember the initial size to avoid iterating over the new blocks. 406 for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) { 407 HBasicBlock* block = blocks_[block_id]; 408 if (block == nullptr) continue; 409 if (block->GetSuccessors().size() > 1) { 410 // Only split normal-flow edges. We cannot split exceptional edges as they 411 // are synthesized (approximate real control flow), and we do not need to 412 // anyway. Moves that would be inserted there are performed by the runtime. 413 ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors(); 414 for (size_t j = 0, e = normal_successors.size(); j < e; ++j) { 415 HBasicBlock* successor = normal_successors[j]; 416 DCHECK(!successor->IsCatchBlock()); 417 if (successor == exit_block_) { 418 // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we 419 // do not want to split because Goto->Exit is not allowed. 420 DCHECK(block->IsSingleTryBoundary()); 421 } else if (successor->GetPredecessors().size() > 1) { 422 SplitCriticalEdge(block, successor); 423 // SplitCriticalEdge could have invalidated the `normal_successors` 424 // ArrayRef. We must re-acquire it. 425 normal_successors = block->GetNormalSuccessors(); 426 DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor); 427 DCHECK_EQ(e, normal_successors.size()); 428 } 429 } 430 } 431 if (block->IsLoopHeader()) { 432 SimplifyLoop(block); 433 } else if (!block->IsEntryBlock() && 434 block->GetFirstInstruction() != nullptr && 435 block->GetFirstInstruction()->IsSuspendCheck()) { 436 // We are being called by the dead code elimiation pass, and what used to be 437 // a loop got dismantled. Just remove the suspend check. 438 block->RemoveInstruction(block->GetFirstInstruction()); 439 } 440 } 441 } 442 443 GraphAnalysisResult HGraph::AnalyzeLoops() const { 444 // We iterate post order to ensure we visit inner loops before outer loops. 445 // `PopulateRecursive` needs this guarantee to know whether a natural loop 446 // contains an irreducible loop. 447 for (HBasicBlock* block : GetPostOrder()) { 448 if (block->IsLoopHeader()) { 449 if (block->IsCatchBlock()) { 450 // TODO: Dealing with exceptional back edges could be tricky because 451 // they only approximate the real control flow. Bail out for now. 452 return kAnalysisFailThrowCatchLoop; 453 } 454 block->GetLoopInformation()->Populate(); 455 } 456 } 457 return kAnalysisSuccess; 458 } 459 460 void HLoopInformation::Dump(std::ostream& os) { 461 os << "header: " << header_->GetBlockId() << std::endl; 462 os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl; 463 for (HBasicBlock* block : back_edges_) { 464 os << "back edge: " << block->GetBlockId() << std::endl; 465 } 466 for (HBasicBlock* block : header_->GetPredecessors()) { 467 os << "predecessor: " << block->GetBlockId() << std::endl; 468 } 469 for (uint32_t idx : blocks_.Indexes()) { 470 os << " in loop: " << idx << std::endl; 471 } 472 } 473 474 void HGraph::InsertConstant(HConstant* constant) { 475 // New constants are inserted before the SuspendCheck at the bottom of the 476 // entry block. Note that this method can be called from the graph builder and 477 // the entry block therefore may not end with SuspendCheck->Goto yet. 478 HInstruction* insert_before = nullptr; 479 480 HInstruction* gota = entry_block_->GetLastInstruction(); 481 if (gota != nullptr && gota->IsGoto()) { 482 HInstruction* suspend_check = gota->GetPrevious(); 483 if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) { 484 insert_before = suspend_check; 485 } else { 486 insert_before = gota; 487 } 488 } 489 490 if (insert_before == nullptr) { 491 entry_block_->AddInstruction(constant); 492 } else { 493 entry_block_->InsertInstructionBefore(constant, insert_before); 494 } 495 } 496 497 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) { 498 // For simplicity, don't bother reviving the cached null constant if it is 499 // not null and not in a block. Otherwise, we need to clear the instruction 500 // id and/or any invariants the graph is assuming when adding new instructions. 501 if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) { 502 cached_null_constant_ = new (arena_) HNullConstant(dex_pc); 503 cached_null_constant_->SetReferenceTypeInfo(inexact_object_rti_); 504 InsertConstant(cached_null_constant_); 505 } 506 if (kIsDebugBuild) { 507 ScopedObjectAccess soa(Thread::Current()); 508 DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid()); 509 } 510 return cached_null_constant_; 511 } 512 513 HCurrentMethod* HGraph::GetCurrentMethod() { 514 // For simplicity, don't bother reviving the cached current method if it is 515 // not null and not in a block. Otherwise, we need to clear the instruction 516 // id and/or any invariants the graph is assuming when adding new instructions. 517 if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) { 518 cached_current_method_ = new (arena_) HCurrentMethod( 519 Is64BitInstructionSet(instruction_set_) ? Primitive::kPrimLong : Primitive::kPrimInt, 520 entry_block_->GetDexPc()); 521 if (entry_block_->GetFirstInstruction() == nullptr) { 522 entry_block_->AddInstruction(cached_current_method_); 523 } else { 524 entry_block_->InsertInstructionBefore( 525 cached_current_method_, entry_block_->GetFirstInstruction()); 526 } 527 } 528 return cached_current_method_; 529 } 530 531 HConstant* HGraph::GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc) { 532 switch (type) { 533 case Primitive::Type::kPrimBoolean: 534 DCHECK(IsUint<1>(value)); 535 FALLTHROUGH_INTENDED; 536 case Primitive::Type::kPrimByte: 537 case Primitive::Type::kPrimChar: 538 case Primitive::Type::kPrimShort: 539 case Primitive::Type::kPrimInt: 540 DCHECK(IsInt(Primitive::ComponentSize(type) * kBitsPerByte, value)); 541 return GetIntConstant(static_cast<int32_t>(value), dex_pc); 542 543 case Primitive::Type::kPrimLong: 544 return GetLongConstant(value, dex_pc); 545 546 default: 547 LOG(FATAL) << "Unsupported constant type"; 548 UNREACHABLE(); 549 } 550 } 551 552 void HGraph::CacheFloatConstant(HFloatConstant* constant) { 553 int32_t value = bit_cast<int32_t, float>(constant->GetValue()); 554 DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end()); 555 cached_float_constants_.Overwrite(value, constant); 556 } 557 558 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) { 559 int64_t value = bit_cast<int64_t, double>(constant->GetValue()); 560 DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end()); 561 cached_double_constants_.Overwrite(value, constant); 562 } 563 564 void HLoopInformation::Add(HBasicBlock* block) { 565 blocks_.SetBit(block->GetBlockId()); 566 } 567 568 void HLoopInformation::Remove(HBasicBlock* block) { 569 blocks_.ClearBit(block->GetBlockId()); 570 } 571 572 void HLoopInformation::PopulateRecursive(HBasicBlock* block) { 573 if (blocks_.IsBitSet(block->GetBlockId())) { 574 return; 575 } 576 577 blocks_.SetBit(block->GetBlockId()); 578 block->SetInLoop(this); 579 if (block->IsLoopHeader()) { 580 // We're visiting loops in post-order, so inner loops must have been 581 // populated already. 582 DCHECK(block->GetLoopInformation()->IsPopulated()); 583 if (block->GetLoopInformation()->IsIrreducible()) { 584 contains_irreducible_loop_ = true; 585 } 586 } 587 for (HBasicBlock* predecessor : block->GetPredecessors()) { 588 PopulateRecursive(predecessor); 589 } 590 } 591 592 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) { 593 size_t block_id = block->GetBlockId(); 594 595 // If `block` is in `finalized`, we know its membership in the loop has been 596 // decided and it does not need to be revisited. 597 if (finalized->IsBitSet(block_id)) { 598 return; 599 } 600 601 bool is_finalized = false; 602 if (block->IsLoopHeader()) { 603 // If we hit a loop header in an irreducible loop, we first check if the 604 // pre header of that loop belongs to the currently analyzed loop. If it does, 605 // then we visit the back edges. 606 // Note that we cannot use GetPreHeader, as the loop may have not been populated 607 // yet. 608 HBasicBlock* pre_header = block->GetPredecessors()[0]; 609 PopulateIrreducibleRecursive(pre_header, finalized); 610 if (blocks_.IsBitSet(pre_header->GetBlockId())) { 611 block->SetInLoop(this); 612 blocks_.SetBit(block_id); 613 finalized->SetBit(block_id); 614 is_finalized = true; 615 616 HLoopInformation* info = block->GetLoopInformation(); 617 for (HBasicBlock* back_edge : info->GetBackEdges()) { 618 PopulateIrreducibleRecursive(back_edge, finalized); 619 } 620 } 621 } else { 622 // Visit all predecessors. If one predecessor is part of the loop, this 623 // block is also part of this loop. 624 for (HBasicBlock* predecessor : block->GetPredecessors()) { 625 PopulateIrreducibleRecursive(predecessor, finalized); 626 if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) { 627 block->SetInLoop(this); 628 blocks_.SetBit(block_id); 629 finalized->SetBit(block_id); 630 is_finalized = true; 631 } 632 } 633 } 634 635 // All predecessors have been recursively visited. Mark finalized if not marked yet. 636 if (!is_finalized) { 637 finalized->SetBit(block_id); 638 } 639 } 640 641 void HLoopInformation::Populate() { 642 DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated"; 643 // Populate this loop: starting with the back edge, recursively add predecessors 644 // that are not already part of that loop. Set the header as part of the loop 645 // to end the recursion. 646 // This is a recursive implementation of the algorithm described in 647 // "Advanced Compiler Design & Implementation" (Muchnick) p192. 648 HGraph* graph = header_->GetGraph(); 649 blocks_.SetBit(header_->GetBlockId()); 650 header_->SetInLoop(this); 651 652 bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader(); 653 654 if (is_irreducible_loop) { 655 ArenaBitVector visited(graph->GetArena(), 656 graph->GetBlocks().size(), 657 /* expandable */ false, 658 kArenaAllocGraphBuilder); 659 // Stop marking blocks at the loop header. 660 visited.SetBit(header_->GetBlockId()); 661 662 for (HBasicBlock* back_edge : GetBackEdges()) { 663 PopulateIrreducibleRecursive(back_edge, &visited); 664 } 665 } else { 666 for (HBasicBlock* back_edge : GetBackEdges()) { 667 PopulateRecursive(back_edge); 668 } 669 } 670 671 if (!is_irreducible_loop && graph->IsCompilingOsr()) { 672 // When compiling in OSR mode, all loops in the compiled method may be entered 673 // from the interpreter. We treat this OSR entry point just like an extra entry 674 // to an irreducible loop, so we need to mark the method's loops as irreducible. 675 // This does not apply to inlined loops which do not act as OSR entry points. 676 if (suspend_check_ == nullptr) { 677 // Just building the graph in OSR mode, this loop is not inlined. We never build an 678 // inner graph in OSR mode as we can do OSR transition only from the outer method. 679 is_irreducible_loop = true; 680 } else { 681 // Look at the suspend check's environment to determine if the loop was inlined. 682 DCHECK(suspend_check_->HasEnvironment()); 683 if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) { 684 is_irreducible_loop = true; 685 } 686 } 687 } 688 if (is_irreducible_loop) { 689 irreducible_ = true; 690 contains_irreducible_loop_ = true; 691 graph->SetHasIrreducibleLoops(true); 692 } 693 graph->SetHasLoops(true); 694 } 695 696 HBasicBlock* HLoopInformation::GetPreHeader() const { 697 HBasicBlock* block = header_->GetPredecessors()[0]; 698 DCHECK(irreducible_ || (block == header_->GetDominator())); 699 return block; 700 } 701 702 bool HLoopInformation::Contains(const HBasicBlock& block) const { 703 return blocks_.IsBitSet(block.GetBlockId()); 704 } 705 706 bool HLoopInformation::IsIn(const HLoopInformation& other) const { 707 return other.blocks_.IsBitSet(header_->GetBlockId()); 708 } 709 710 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const { 711 return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId()); 712 } 713 714 size_t HLoopInformation::GetLifetimeEnd() const { 715 size_t last_position = 0; 716 for (HBasicBlock* back_edge : GetBackEdges()) { 717 last_position = std::max(back_edge->GetLifetimeEnd(), last_position); 718 } 719 return last_position; 720 } 721 722 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const { 723 for (HBasicBlock* back_edge : GetBackEdges()) { 724 DCHECK(back_edge->GetDominator() != nullptr); 725 if (!header_->Dominates(back_edge)) { 726 return true; 727 } 728 } 729 return false; 730 } 731 732 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) { 733 for (HBasicBlock* back_edge : GetBackEdges()) { 734 if (!block->Dominates(back_edge)) { 735 return false; 736 } 737 } 738 return true; 739 } 740 741 742 bool HLoopInformation::HasExitEdge() const { 743 // Determine if this loop has at least one exit edge. 744 HBlocksInLoopReversePostOrderIterator it_loop(*this); 745 for (; !it_loop.Done(); it_loop.Advance()) { 746 for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) { 747 if (!Contains(*successor)) { 748 return true; 749 } 750 } 751 } 752 return false; 753 } 754 755 bool HBasicBlock::Dominates(HBasicBlock* other) const { 756 // Walk up the dominator tree from `other`, to find out if `this` 757 // is an ancestor. 758 HBasicBlock* current = other; 759 while (current != nullptr) { 760 if (current == this) { 761 return true; 762 } 763 current = current->GetDominator(); 764 } 765 return false; 766 } 767 768 static void UpdateInputsUsers(HInstruction* instruction) { 769 HInputsRef inputs = instruction->GetInputs(); 770 for (size_t i = 0; i < inputs.size(); ++i) { 771 inputs[i]->AddUseAt(instruction, i); 772 } 773 // Environment should be created later. 774 DCHECK(!instruction->HasEnvironment()); 775 } 776 777 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial, 778 HInstruction* replacement) { 779 DCHECK(initial->GetBlock() == this); 780 if (initial->IsControlFlow()) { 781 // We can only replace a control flow instruction with another control flow instruction. 782 DCHECK(replacement->IsControlFlow()); 783 DCHECK_EQ(replacement->GetId(), -1); 784 DCHECK_EQ(replacement->GetType(), Primitive::kPrimVoid); 785 DCHECK_EQ(initial->GetBlock(), this); 786 DCHECK_EQ(initial->GetType(), Primitive::kPrimVoid); 787 DCHECK(initial->GetUses().empty()); 788 DCHECK(initial->GetEnvUses().empty()); 789 replacement->SetBlock(this); 790 replacement->SetId(GetGraph()->GetNextInstructionId()); 791 instructions_.InsertInstructionBefore(replacement, initial); 792 UpdateInputsUsers(replacement); 793 } else { 794 InsertInstructionBefore(replacement, initial); 795 initial->ReplaceWith(replacement); 796 } 797 RemoveInstruction(initial); 798 } 799 800 static void Add(HInstructionList* instruction_list, 801 HBasicBlock* block, 802 HInstruction* instruction) { 803 DCHECK(instruction->GetBlock() == nullptr); 804 DCHECK_EQ(instruction->GetId(), -1); 805 instruction->SetBlock(block); 806 instruction->SetId(block->GetGraph()->GetNextInstructionId()); 807 UpdateInputsUsers(instruction); 808 instruction_list->AddInstruction(instruction); 809 } 810 811 void HBasicBlock::AddInstruction(HInstruction* instruction) { 812 Add(&instructions_, this, instruction); 813 } 814 815 void HBasicBlock::AddPhi(HPhi* phi) { 816 Add(&phis_, this, phi); 817 } 818 819 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) { 820 DCHECK(!cursor->IsPhi()); 821 DCHECK(!instruction->IsPhi()); 822 DCHECK_EQ(instruction->GetId(), -1); 823 DCHECK_NE(cursor->GetId(), -1); 824 DCHECK_EQ(cursor->GetBlock(), this); 825 DCHECK(!instruction->IsControlFlow()); 826 instruction->SetBlock(this); 827 instruction->SetId(GetGraph()->GetNextInstructionId()); 828 UpdateInputsUsers(instruction); 829 instructions_.InsertInstructionBefore(instruction, cursor); 830 } 831 832 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) { 833 DCHECK(!cursor->IsPhi()); 834 DCHECK(!instruction->IsPhi()); 835 DCHECK_EQ(instruction->GetId(), -1); 836 DCHECK_NE(cursor->GetId(), -1); 837 DCHECK_EQ(cursor->GetBlock(), this); 838 DCHECK(!instruction->IsControlFlow()); 839 DCHECK(!cursor->IsControlFlow()); 840 instruction->SetBlock(this); 841 instruction->SetId(GetGraph()->GetNextInstructionId()); 842 UpdateInputsUsers(instruction); 843 instructions_.InsertInstructionAfter(instruction, cursor); 844 } 845 846 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) { 847 DCHECK_EQ(phi->GetId(), -1); 848 DCHECK_NE(cursor->GetId(), -1); 849 DCHECK_EQ(cursor->GetBlock(), this); 850 phi->SetBlock(this); 851 phi->SetId(GetGraph()->GetNextInstructionId()); 852 UpdateInputsUsers(phi); 853 phis_.InsertInstructionAfter(phi, cursor); 854 } 855 856 static void Remove(HInstructionList* instruction_list, 857 HBasicBlock* block, 858 HInstruction* instruction, 859 bool ensure_safety) { 860 DCHECK_EQ(block, instruction->GetBlock()); 861 instruction->SetBlock(nullptr); 862 instruction_list->RemoveInstruction(instruction); 863 if (ensure_safety) { 864 DCHECK(instruction->GetUses().empty()); 865 DCHECK(instruction->GetEnvUses().empty()); 866 RemoveAsUser(instruction); 867 } 868 } 869 870 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) { 871 DCHECK(!instruction->IsPhi()); 872 Remove(&instructions_, this, instruction, ensure_safety); 873 } 874 875 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) { 876 Remove(&phis_, this, phi, ensure_safety); 877 } 878 879 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) { 880 if (instruction->IsPhi()) { 881 RemovePhi(instruction->AsPhi(), ensure_safety); 882 } else { 883 RemoveInstruction(instruction, ensure_safety); 884 } 885 } 886 887 void HEnvironment::CopyFrom(const ArenaVector<HInstruction*>& locals) { 888 for (size_t i = 0; i < locals.size(); i++) { 889 HInstruction* instruction = locals[i]; 890 SetRawEnvAt(i, instruction); 891 if (instruction != nullptr) { 892 instruction->AddEnvUseAt(this, i); 893 } 894 } 895 } 896 897 void HEnvironment::CopyFrom(HEnvironment* env) { 898 for (size_t i = 0; i < env->Size(); i++) { 899 HInstruction* instruction = env->GetInstructionAt(i); 900 SetRawEnvAt(i, instruction); 901 if (instruction != nullptr) { 902 instruction->AddEnvUseAt(this, i); 903 } 904 } 905 } 906 907 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env, 908 HBasicBlock* loop_header) { 909 DCHECK(loop_header->IsLoopHeader()); 910 for (size_t i = 0; i < env->Size(); i++) { 911 HInstruction* instruction = env->GetInstructionAt(i); 912 SetRawEnvAt(i, instruction); 913 if (instruction == nullptr) { 914 continue; 915 } 916 if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) { 917 // At the end of the loop pre-header, the corresponding value for instruction 918 // is the first input of the phi. 919 HInstruction* initial = instruction->AsPhi()->InputAt(0); 920 SetRawEnvAt(i, initial); 921 initial->AddEnvUseAt(this, i); 922 } else { 923 instruction->AddEnvUseAt(this, i); 924 } 925 } 926 } 927 928 void HEnvironment::RemoveAsUserOfInput(size_t index) const { 929 const HUserRecord<HEnvironment*>& env_use = vregs_[index]; 930 HInstruction* user = env_use.GetInstruction(); 931 auto before_env_use_node = env_use.GetBeforeUseNode(); 932 user->env_uses_.erase_after(before_env_use_node); 933 user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node); 934 } 935 936 HInstruction::InstructionKind HInstruction::GetKind() const { 937 return GetKindInternal(); 938 } 939 940 HInstruction* HInstruction::GetNextDisregardingMoves() const { 941 HInstruction* next = GetNext(); 942 while (next != nullptr && next->IsParallelMove()) { 943 next = next->GetNext(); 944 } 945 return next; 946 } 947 948 HInstruction* HInstruction::GetPreviousDisregardingMoves() const { 949 HInstruction* previous = GetPrevious(); 950 while (previous != nullptr && previous->IsParallelMove()) { 951 previous = previous->GetPrevious(); 952 } 953 return previous; 954 } 955 956 void HInstructionList::AddInstruction(HInstruction* instruction) { 957 if (first_instruction_ == nullptr) { 958 DCHECK(last_instruction_ == nullptr); 959 first_instruction_ = last_instruction_ = instruction; 960 } else { 961 last_instruction_->next_ = instruction; 962 instruction->previous_ = last_instruction_; 963 last_instruction_ = instruction; 964 } 965 } 966 967 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) { 968 DCHECK(Contains(cursor)); 969 if (cursor == first_instruction_) { 970 cursor->previous_ = instruction; 971 instruction->next_ = cursor; 972 first_instruction_ = instruction; 973 } else { 974 instruction->previous_ = cursor->previous_; 975 instruction->next_ = cursor; 976 cursor->previous_ = instruction; 977 instruction->previous_->next_ = instruction; 978 } 979 } 980 981 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) { 982 DCHECK(Contains(cursor)); 983 if (cursor == last_instruction_) { 984 cursor->next_ = instruction; 985 instruction->previous_ = cursor; 986 last_instruction_ = instruction; 987 } else { 988 instruction->next_ = cursor->next_; 989 instruction->previous_ = cursor; 990 cursor->next_ = instruction; 991 instruction->next_->previous_ = instruction; 992 } 993 } 994 995 void HInstructionList::RemoveInstruction(HInstruction* instruction) { 996 if (instruction->previous_ != nullptr) { 997 instruction->previous_->next_ = instruction->next_; 998 } 999 if (instruction->next_ != nullptr) { 1000 instruction->next_->previous_ = instruction->previous_; 1001 } 1002 if (instruction == first_instruction_) { 1003 first_instruction_ = instruction->next_; 1004 } 1005 if (instruction == last_instruction_) { 1006 last_instruction_ = instruction->previous_; 1007 } 1008 } 1009 1010 bool HInstructionList::Contains(HInstruction* instruction) const { 1011 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) { 1012 if (it.Current() == instruction) { 1013 return true; 1014 } 1015 } 1016 return false; 1017 } 1018 1019 bool HInstructionList::FoundBefore(const HInstruction* instruction1, 1020 const HInstruction* instruction2) const { 1021 DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock()); 1022 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) { 1023 if (it.Current() == instruction1) { 1024 return true; 1025 } 1026 if (it.Current() == instruction2) { 1027 return false; 1028 } 1029 } 1030 LOG(FATAL) << "Did not find an order between two instructions of the same block."; 1031 return true; 1032 } 1033 1034 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const { 1035 if (other_instruction == this) { 1036 // An instruction does not strictly dominate itself. 1037 return false; 1038 } 1039 HBasicBlock* block = GetBlock(); 1040 HBasicBlock* other_block = other_instruction->GetBlock(); 1041 if (block != other_block) { 1042 return GetBlock()->Dominates(other_instruction->GetBlock()); 1043 } else { 1044 // If both instructions are in the same block, ensure this 1045 // instruction comes before `other_instruction`. 1046 if (IsPhi()) { 1047 if (!other_instruction->IsPhi()) { 1048 // Phis appear before non phi-instructions so this instruction 1049 // dominates `other_instruction`. 1050 return true; 1051 } else { 1052 // There is no order among phis. 1053 LOG(FATAL) << "There is no dominance between phis of a same block."; 1054 return false; 1055 } 1056 } else { 1057 // `this` is not a phi. 1058 if (other_instruction->IsPhi()) { 1059 // Phis appear before non phi-instructions so this instruction 1060 // does not dominate `other_instruction`. 1061 return false; 1062 } else { 1063 // Check whether this instruction comes before 1064 // `other_instruction` in the instruction list. 1065 return block->GetInstructions().FoundBefore(this, other_instruction); 1066 } 1067 } 1068 } 1069 } 1070 1071 void HInstruction::RemoveEnvironment() { 1072 RemoveEnvironmentUses(this); 1073 environment_ = nullptr; 1074 } 1075 1076 void HInstruction::ReplaceWith(HInstruction* other) { 1077 DCHECK(other != nullptr); 1078 // Note: fixup_end remains valid across splice_after(). 1079 auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin(); 1080 other->uses_.splice_after(other->uses_.before_begin(), uses_); 1081 other->FixUpUserRecordsAfterUseInsertion(fixup_end); 1082 1083 // Note: env_fixup_end remains valid across splice_after(). 1084 auto env_fixup_end = 1085 other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin(); 1086 other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_); 1087 other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end); 1088 1089 DCHECK(uses_.empty()); 1090 DCHECK(env_uses_.empty()); 1091 } 1092 1093 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) { 1094 const HUseList<HInstruction*>& uses = GetUses(); 1095 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) { 1096 HInstruction* user = it->GetUser(); 1097 size_t index = it->GetIndex(); 1098 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput(). 1099 ++it; 1100 if (dominator->StrictlyDominates(user)) { 1101 user->ReplaceInput(replacement, index); 1102 } 1103 } 1104 } 1105 1106 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) { 1107 HUserRecord<HInstruction*> input_use = InputRecordAt(index); 1108 if (input_use.GetInstruction() == replacement) { 1109 // Nothing to do. 1110 return; 1111 } 1112 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode(); 1113 // Note: fixup_end remains valid across splice_after(). 1114 auto fixup_end = 1115 replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin(); 1116 replacement->uses_.splice_after(replacement->uses_.before_begin(), 1117 input_use.GetInstruction()->uses_, 1118 before_use_node); 1119 replacement->FixUpUserRecordsAfterUseInsertion(fixup_end); 1120 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node); 1121 } 1122 1123 size_t HInstruction::EnvironmentSize() const { 1124 return HasEnvironment() ? environment_->Size() : 0; 1125 } 1126 1127 void HVariableInputSizeInstruction::AddInput(HInstruction* input) { 1128 DCHECK(input->GetBlock() != nullptr); 1129 inputs_.push_back(HUserRecord<HInstruction*>(input)); 1130 input->AddUseAt(this, inputs_.size() - 1); 1131 } 1132 1133 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) { 1134 inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input)); 1135 input->AddUseAt(this, index); 1136 // Update indexes in use nodes of inputs that have been pushed further back by the insert(). 1137 for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) { 1138 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u); 1139 inputs_[i].GetUseNode()->SetIndex(i); 1140 } 1141 } 1142 1143 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) { 1144 RemoveAsUserOfInput(index); 1145 inputs_.erase(inputs_.begin() + index); 1146 // Update indexes in use nodes of inputs that have been pulled forward by the erase(). 1147 for (size_t i = index, e = inputs_.size(); i < e; ++i) { 1148 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u); 1149 inputs_[i].GetUseNode()->SetIndex(i); 1150 } 1151 } 1152 1153 #define DEFINE_ACCEPT(name, super) \ 1154 void H##name::Accept(HGraphVisitor* visitor) { \ 1155 visitor->Visit##name(this); \ 1156 } 1157 1158 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT) 1159 1160 #undef DEFINE_ACCEPT 1161 1162 void HGraphVisitor::VisitInsertionOrder() { 1163 const ArenaVector<HBasicBlock*>& blocks = graph_->GetBlocks(); 1164 for (HBasicBlock* block : blocks) { 1165 if (block != nullptr) { 1166 VisitBasicBlock(block); 1167 } 1168 } 1169 } 1170 1171 void HGraphVisitor::VisitReversePostOrder() { 1172 for (HBasicBlock* block : graph_->GetReversePostOrder()) { 1173 VisitBasicBlock(block); 1174 } 1175 } 1176 1177 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) { 1178 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) { 1179 it.Current()->Accept(this); 1180 } 1181 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { 1182 it.Current()->Accept(this); 1183 } 1184 } 1185 1186 HConstant* HTypeConversion::TryStaticEvaluation() const { 1187 HGraph* graph = GetBlock()->GetGraph(); 1188 if (GetInput()->IsIntConstant()) { 1189 int32_t value = GetInput()->AsIntConstant()->GetValue(); 1190 switch (GetResultType()) { 1191 case Primitive::kPrimLong: 1192 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1193 case Primitive::kPrimFloat: 1194 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1195 case Primitive::kPrimDouble: 1196 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1197 default: 1198 return nullptr; 1199 } 1200 } else if (GetInput()->IsLongConstant()) { 1201 int64_t value = GetInput()->AsLongConstant()->GetValue(); 1202 switch (GetResultType()) { 1203 case Primitive::kPrimInt: 1204 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1205 case Primitive::kPrimFloat: 1206 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1207 case Primitive::kPrimDouble: 1208 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1209 default: 1210 return nullptr; 1211 } 1212 } else if (GetInput()->IsFloatConstant()) { 1213 float value = GetInput()->AsFloatConstant()->GetValue(); 1214 switch (GetResultType()) { 1215 case Primitive::kPrimInt: 1216 if (std::isnan(value)) 1217 return graph->GetIntConstant(0, GetDexPc()); 1218 if (value >= kPrimIntMax) 1219 return graph->GetIntConstant(kPrimIntMax, GetDexPc()); 1220 if (value <= kPrimIntMin) 1221 return graph->GetIntConstant(kPrimIntMin, GetDexPc()); 1222 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1223 case Primitive::kPrimLong: 1224 if (std::isnan(value)) 1225 return graph->GetLongConstant(0, GetDexPc()); 1226 if (value >= kPrimLongMax) 1227 return graph->GetLongConstant(kPrimLongMax, GetDexPc()); 1228 if (value <= kPrimLongMin) 1229 return graph->GetLongConstant(kPrimLongMin, GetDexPc()); 1230 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1231 case Primitive::kPrimDouble: 1232 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1233 default: 1234 return nullptr; 1235 } 1236 } else if (GetInput()->IsDoubleConstant()) { 1237 double value = GetInput()->AsDoubleConstant()->GetValue(); 1238 switch (GetResultType()) { 1239 case Primitive::kPrimInt: 1240 if (std::isnan(value)) 1241 return graph->GetIntConstant(0, GetDexPc()); 1242 if (value >= kPrimIntMax) 1243 return graph->GetIntConstant(kPrimIntMax, GetDexPc()); 1244 if (value <= kPrimLongMin) 1245 return graph->GetIntConstant(kPrimIntMin, GetDexPc()); 1246 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1247 case Primitive::kPrimLong: 1248 if (std::isnan(value)) 1249 return graph->GetLongConstant(0, GetDexPc()); 1250 if (value >= kPrimLongMax) 1251 return graph->GetLongConstant(kPrimLongMax, GetDexPc()); 1252 if (value <= kPrimLongMin) 1253 return graph->GetLongConstant(kPrimLongMin, GetDexPc()); 1254 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1255 case Primitive::kPrimFloat: 1256 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1257 default: 1258 return nullptr; 1259 } 1260 } 1261 return nullptr; 1262 } 1263 1264 HConstant* HUnaryOperation::TryStaticEvaluation() const { 1265 if (GetInput()->IsIntConstant()) { 1266 return Evaluate(GetInput()->AsIntConstant()); 1267 } else if (GetInput()->IsLongConstant()) { 1268 return Evaluate(GetInput()->AsLongConstant()); 1269 } else if (kEnableFloatingPointStaticEvaluation) { 1270 if (GetInput()->IsFloatConstant()) { 1271 return Evaluate(GetInput()->AsFloatConstant()); 1272 } else if (GetInput()->IsDoubleConstant()) { 1273 return Evaluate(GetInput()->AsDoubleConstant()); 1274 } 1275 } 1276 return nullptr; 1277 } 1278 1279 HConstant* HBinaryOperation::TryStaticEvaluation() const { 1280 if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) { 1281 return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant()); 1282 } else if (GetLeft()->IsLongConstant()) { 1283 if (GetRight()->IsIntConstant()) { 1284 // The binop(long, int) case is only valid for shifts and rotations. 1285 DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName(); 1286 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant()); 1287 } else if (GetRight()->IsLongConstant()) { 1288 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant()); 1289 } 1290 } else if (GetLeft()->IsNullConstant() && GetRight()->IsNullConstant()) { 1291 // The binop(null, null) case is only valid for equal and not-equal conditions. 1292 DCHECK(IsEqual() || IsNotEqual()) << DebugName(); 1293 return Evaluate(GetLeft()->AsNullConstant(), GetRight()->AsNullConstant()); 1294 } else if (kEnableFloatingPointStaticEvaluation) { 1295 if (GetLeft()->IsFloatConstant() && GetRight()->IsFloatConstant()) { 1296 return Evaluate(GetLeft()->AsFloatConstant(), GetRight()->AsFloatConstant()); 1297 } else if (GetLeft()->IsDoubleConstant() && GetRight()->IsDoubleConstant()) { 1298 return Evaluate(GetLeft()->AsDoubleConstant(), GetRight()->AsDoubleConstant()); 1299 } 1300 } 1301 return nullptr; 1302 } 1303 1304 HConstant* HBinaryOperation::GetConstantRight() const { 1305 if (GetRight()->IsConstant()) { 1306 return GetRight()->AsConstant(); 1307 } else if (IsCommutative() && GetLeft()->IsConstant()) { 1308 return GetLeft()->AsConstant(); 1309 } else { 1310 return nullptr; 1311 } 1312 } 1313 1314 // If `GetConstantRight()` returns one of the input, this returns the other 1315 // one. Otherwise it returns null. 1316 HInstruction* HBinaryOperation::GetLeastConstantLeft() const { 1317 HInstruction* most_constant_right = GetConstantRight(); 1318 if (most_constant_right == nullptr) { 1319 return nullptr; 1320 } else if (most_constant_right == GetLeft()) { 1321 return GetRight(); 1322 } else { 1323 return GetLeft(); 1324 } 1325 } 1326 1327 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs) { 1328 switch (rhs) { 1329 case ComparisonBias::kNoBias: 1330 return os << "no_bias"; 1331 case ComparisonBias::kGtBias: 1332 return os << "gt_bias"; 1333 case ComparisonBias::kLtBias: 1334 return os << "lt_bias"; 1335 default: 1336 LOG(FATAL) << "Unknown ComparisonBias: " << static_cast<int>(rhs); 1337 UNREACHABLE(); 1338 } 1339 } 1340 1341 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const { 1342 return this == instruction->GetPreviousDisregardingMoves(); 1343 } 1344 1345 bool HInstruction::Equals(const HInstruction* other) const { 1346 if (!InstructionTypeEquals(other)) return false; 1347 DCHECK_EQ(GetKind(), other->GetKind()); 1348 if (!InstructionDataEquals(other)) return false; 1349 if (GetType() != other->GetType()) return false; 1350 HConstInputsRef inputs = GetInputs(); 1351 HConstInputsRef other_inputs = other->GetInputs(); 1352 if (inputs.size() != other_inputs.size()) return false; 1353 for (size_t i = 0; i != inputs.size(); ++i) { 1354 if (inputs[i] != other_inputs[i]) return false; 1355 } 1356 1357 DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode()); 1358 return true; 1359 } 1360 1361 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs) { 1362 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break; 1363 switch (rhs) { 1364 FOR_EACH_INSTRUCTION(DECLARE_CASE) 1365 default: 1366 os << "Unknown instruction kind " << static_cast<int>(rhs); 1367 break; 1368 } 1369 #undef DECLARE_CASE 1370 return os; 1371 } 1372 1373 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) { 1374 if (do_checks) { 1375 DCHECK(!IsPhi()); 1376 DCHECK(!IsControlFlow()); 1377 DCHECK(CanBeMoved() || 1378 // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization. 1379 IsShouldDeoptimizeFlag()); 1380 DCHECK(!cursor->IsPhi()); 1381 } 1382 1383 next_->previous_ = previous_; 1384 if (previous_ != nullptr) { 1385 previous_->next_ = next_; 1386 } 1387 if (block_->instructions_.first_instruction_ == this) { 1388 block_->instructions_.first_instruction_ = next_; 1389 } 1390 DCHECK_NE(block_->instructions_.last_instruction_, this); 1391 1392 previous_ = cursor->previous_; 1393 if (previous_ != nullptr) { 1394 previous_->next_ = this; 1395 } 1396 next_ = cursor; 1397 cursor->previous_ = this; 1398 block_ = cursor->block_; 1399 1400 if (block_->instructions_.first_instruction_ == cursor) { 1401 block_->instructions_.first_instruction_ = this; 1402 } 1403 } 1404 1405 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() { 1406 DCHECK(!CanThrow()); 1407 DCHECK(!HasSideEffects()); 1408 DCHECK(!HasEnvironmentUses()); 1409 DCHECK(HasNonEnvironmentUses()); 1410 DCHECK(!IsPhi()); // Makes no sense for Phi. 1411 DCHECK_EQ(InputCount(), 0u); 1412 1413 // Find the target block. 1414 auto uses_it = GetUses().begin(); 1415 auto uses_end = GetUses().end(); 1416 HBasicBlock* target_block = uses_it->GetUser()->GetBlock(); 1417 ++uses_it; 1418 while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) { 1419 ++uses_it; 1420 } 1421 if (uses_it != uses_end) { 1422 // This instruction has uses in two or more blocks. Find the common dominator. 1423 CommonDominator finder(target_block); 1424 for (; uses_it != uses_end; ++uses_it) { 1425 finder.Update(uses_it->GetUser()->GetBlock()); 1426 } 1427 target_block = finder.Get(); 1428 DCHECK(target_block != nullptr); 1429 } 1430 // Move to the first dominator not in a loop. 1431 while (target_block->IsInLoop()) { 1432 target_block = target_block->GetDominator(); 1433 DCHECK(target_block != nullptr); 1434 } 1435 1436 // Find insertion position. 1437 HInstruction* insert_pos = nullptr; 1438 for (const HUseListNode<HInstruction*>& use : GetUses()) { 1439 if (use.GetUser()->GetBlock() == target_block && 1440 (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) { 1441 insert_pos = use.GetUser(); 1442 } 1443 } 1444 if (insert_pos == nullptr) { 1445 // No user in `target_block`, insert before the control flow instruction. 1446 insert_pos = target_block->GetLastInstruction(); 1447 DCHECK(insert_pos->IsControlFlow()); 1448 // Avoid splitting HCondition from HIf to prevent unnecessary materialization. 1449 if (insert_pos->IsIf()) { 1450 HInstruction* if_input = insert_pos->AsIf()->InputAt(0); 1451 if (if_input == insert_pos->GetPrevious()) { 1452 insert_pos = if_input; 1453 } 1454 } 1455 } 1456 MoveBefore(insert_pos); 1457 } 1458 1459 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor) { 1460 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented."; 1461 DCHECK_EQ(cursor->GetBlock(), this); 1462 1463 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), 1464 cursor->GetDexPc()); 1465 new_block->instructions_.first_instruction_ = cursor; 1466 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1467 instructions_.last_instruction_ = cursor->previous_; 1468 if (cursor->previous_ == nullptr) { 1469 instructions_.first_instruction_ = nullptr; 1470 } else { 1471 cursor->previous_->next_ = nullptr; 1472 cursor->previous_ = nullptr; 1473 } 1474 1475 new_block->instructions_.SetBlockOfInstructions(new_block); 1476 AddInstruction(new (GetGraph()->GetArena()) HGoto(new_block->GetDexPc())); 1477 1478 for (HBasicBlock* successor : GetSuccessors()) { 1479 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1480 } 1481 new_block->successors_.swap(successors_); 1482 DCHECK(successors_.empty()); 1483 AddSuccessor(new_block); 1484 1485 GetGraph()->AddBlock(new_block); 1486 return new_block; 1487 } 1488 1489 HBasicBlock* HBasicBlock::CreateImmediateDominator() { 1490 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented."; 1491 DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented."; 1492 1493 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), GetDexPc()); 1494 1495 for (HBasicBlock* predecessor : GetPredecessors()) { 1496 predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block; 1497 } 1498 new_block->predecessors_.swap(predecessors_); 1499 DCHECK(predecessors_.empty()); 1500 AddPredecessor(new_block); 1501 1502 GetGraph()->AddBlock(new_block); 1503 return new_block; 1504 } 1505 1506 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) { 1507 DCHECK_EQ(cursor->GetBlock(), this); 1508 1509 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), 1510 cursor->GetDexPc()); 1511 new_block->instructions_.first_instruction_ = cursor; 1512 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1513 instructions_.last_instruction_ = cursor->previous_; 1514 if (cursor->previous_ == nullptr) { 1515 instructions_.first_instruction_ = nullptr; 1516 } else { 1517 cursor->previous_->next_ = nullptr; 1518 cursor->previous_ = nullptr; 1519 } 1520 1521 new_block->instructions_.SetBlockOfInstructions(new_block); 1522 1523 for (HBasicBlock* successor : GetSuccessors()) { 1524 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1525 } 1526 new_block->successors_.swap(successors_); 1527 DCHECK(successors_.empty()); 1528 1529 for (HBasicBlock* dominated : GetDominatedBlocks()) { 1530 dominated->dominator_ = new_block; 1531 } 1532 new_block->dominated_blocks_.swap(dominated_blocks_); 1533 DCHECK(dominated_blocks_.empty()); 1534 return new_block; 1535 } 1536 1537 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) { 1538 DCHECK(!cursor->IsControlFlow()); 1539 DCHECK_NE(instructions_.last_instruction_, cursor); 1540 DCHECK_EQ(cursor->GetBlock(), this); 1541 1542 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), GetDexPc()); 1543 new_block->instructions_.first_instruction_ = cursor->GetNext(); 1544 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1545 cursor->next_->previous_ = nullptr; 1546 cursor->next_ = nullptr; 1547 instructions_.last_instruction_ = cursor; 1548 1549 new_block->instructions_.SetBlockOfInstructions(new_block); 1550 for (HBasicBlock* successor : GetSuccessors()) { 1551 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1552 } 1553 new_block->successors_.swap(successors_); 1554 DCHECK(successors_.empty()); 1555 1556 for (HBasicBlock* dominated : GetDominatedBlocks()) { 1557 dominated->dominator_ = new_block; 1558 } 1559 new_block->dominated_blocks_.swap(dominated_blocks_); 1560 DCHECK(dominated_blocks_.empty()); 1561 return new_block; 1562 } 1563 1564 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const { 1565 if (EndsWithTryBoundary()) { 1566 HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary(); 1567 if (try_boundary->IsEntry()) { 1568 DCHECK(!IsTryBlock()); 1569 return try_boundary; 1570 } else { 1571 DCHECK(IsTryBlock()); 1572 DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary)); 1573 return nullptr; 1574 } 1575 } else if (IsTryBlock()) { 1576 return &try_catch_information_->GetTryEntry(); 1577 } else { 1578 return nullptr; 1579 } 1580 } 1581 1582 bool HBasicBlock::HasThrowingInstructions() const { 1583 for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) { 1584 if (it.Current()->CanThrow()) { 1585 return true; 1586 } 1587 } 1588 return false; 1589 } 1590 1591 static bool HasOnlyOneInstruction(const HBasicBlock& block) { 1592 return block.GetPhis().IsEmpty() 1593 && !block.GetInstructions().IsEmpty() 1594 && block.GetFirstInstruction() == block.GetLastInstruction(); 1595 } 1596 1597 bool HBasicBlock::IsSingleGoto() const { 1598 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto(); 1599 } 1600 1601 bool HBasicBlock::IsSingleTryBoundary() const { 1602 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary(); 1603 } 1604 1605 bool HBasicBlock::EndsWithControlFlowInstruction() const { 1606 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow(); 1607 } 1608 1609 bool HBasicBlock::EndsWithIf() const { 1610 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf(); 1611 } 1612 1613 bool HBasicBlock::EndsWithTryBoundary() const { 1614 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary(); 1615 } 1616 1617 bool HBasicBlock::HasSinglePhi() const { 1618 return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr; 1619 } 1620 1621 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const { 1622 if (EndsWithTryBoundary()) { 1623 // The normal-flow successor of HTryBoundary is always stored at index zero. 1624 DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor()); 1625 return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u); 1626 } else { 1627 // All successors of blocks not ending with TryBoundary are normal. 1628 return ArrayRef<HBasicBlock* const>(successors_); 1629 } 1630 } 1631 1632 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const { 1633 if (EndsWithTryBoundary()) { 1634 return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers(); 1635 } else { 1636 // Blocks not ending with TryBoundary do not have exceptional successors. 1637 return ArrayRef<HBasicBlock* const>(); 1638 } 1639 } 1640 1641 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const { 1642 ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers(); 1643 ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers(); 1644 1645 size_t length = handlers1.size(); 1646 if (length != handlers2.size()) { 1647 return false; 1648 } 1649 1650 // Exception handlers need to be stored in the same order. 1651 for (size_t i = 0; i < length; ++i) { 1652 if (handlers1[i] != handlers2[i]) { 1653 return false; 1654 } 1655 } 1656 return true; 1657 } 1658 1659 size_t HInstructionList::CountSize() const { 1660 size_t size = 0; 1661 HInstruction* current = first_instruction_; 1662 for (; current != nullptr; current = current->GetNext()) { 1663 size++; 1664 } 1665 return size; 1666 } 1667 1668 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const { 1669 for (HInstruction* current = first_instruction_; 1670 current != nullptr; 1671 current = current->GetNext()) { 1672 current->SetBlock(block); 1673 } 1674 } 1675 1676 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) { 1677 DCHECK(Contains(cursor)); 1678 if (!instruction_list.IsEmpty()) { 1679 if (cursor == last_instruction_) { 1680 last_instruction_ = instruction_list.last_instruction_; 1681 } else { 1682 cursor->next_->previous_ = instruction_list.last_instruction_; 1683 } 1684 instruction_list.last_instruction_->next_ = cursor->next_; 1685 cursor->next_ = instruction_list.first_instruction_; 1686 instruction_list.first_instruction_->previous_ = cursor; 1687 } 1688 } 1689 1690 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) { 1691 DCHECK(Contains(cursor)); 1692 if (!instruction_list.IsEmpty()) { 1693 if (cursor == first_instruction_) { 1694 first_instruction_ = instruction_list.first_instruction_; 1695 } else { 1696 cursor->previous_->next_ = instruction_list.first_instruction_; 1697 } 1698 instruction_list.last_instruction_->next_ = cursor; 1699 instruction_list.first_instruction_->previous_ = cursor->previous_; 1700 cursor->previous_ = instruction_list.last_instruction_; 1701 } 1702 } 1703 1704 void HInstructionList::Add(const HInstructionList& instruction_list) { 1705 if (IsEmpty()) { 1706 first_instruction_ = instruction_list.first_instruction_; 1707 last_instruction_ = instruction_list.last_instruction_; 1708 } else { 1709 AddAfter(last_instruction_, instruction_list); 1710 } 1711 } 1712 1713 // Should be called on instructions in a dead block in post order. This method 1714 // assumes `insn` has been removed from all users with the exception of catch 1715 // phis because of missing exceptional edges in the graph. It removes the 1716 // instruction from catch phi uses, together with inputs of other catch phis in 1717 // the catch block at the same index, as these must be dead too. 1718 static void RemoveUsesOfDeadInstruction(HInstruction* insn) { 1719 DCHECK(!insn->HasEnvironmentUses()); 1720 while (insn->HasNonEnvironmentUses()) { 1721 const HUseListNode<HInstruction*>& use = insn->GetUses().front(); 1722 size_t use_index = use.GetIndex(); 1723 HBasicBlock* user_block = use.GetUser()->GetBlock(); 1724 DCHECK(use.GetUser()->IsPhi() && user_block->IsCatchBlock()); 1725 for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1726 phi_it.Current()->AsPhi()->RemoveInputAt(use_index); 1727 } 1728 } 1729 } 1730 1731 void HBasicBlock::DisconnectAndDelete() { 1732 // Dominators must be removed after all the blocks they dominate. This way 1733 // a loop header is removed last, a requirement for correct loop information 1734 // iteration. 1735 DCHECK(dominated_blocks_.empty()); 1736 1737 // The following steps gradually remove the block from all its dependants in 1738 // post order (b/27683071). 1739 1740 // (1) Store a basic block that we'll use in step (5) to find loops to be updated. 1741 // We need to do this before step (4) which destroys the predecessor list. 1742 HBasicBlock* loop_update_start = this; 1743 if (IsLoopHeader()) { 1744 HLoopInformation* loop_info = GetLoopInformation(); 1745 // All other blocks in this loop should have been removed because the header 1746 // was their dominator. 1747 // Note that we do not remove `this` from `loop_info` as it is unreachable. 1748 DCHECK(!loop_info->IsIrreducible()); 1749 DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u); 1750 DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId()); 1751 loop_update_start = loop_info->GetPreHeader(); 1752 } 1753 1754 // (2) Disconnect the block from its successors and update their phis. 1755 for (HBasicBlock* successor : successors_) { 1756 // Delete this block from the list of predecessors. 1757 size_t this_index = successor->GetPredecessorIndexOf(this); 1758 successor->predecessors_.erase(successor->predecessors_.begin() + this_index); 1759 1760 // Check that `successor` has other predecessors, otherwise `this` is the 1761 // dominator of `successor` which violates the order DCHECKed at the top. 1762 DCHECK(!successor->predecessors_.empty()); 1763 1764 // Remove this block's entries in the successor's phis. Skip exceptional 1765 // successors because catch phi inputs do not correspond to predecessor 1766 // blocks but throwing instructions. The inputs of the catch phis will be 1767 // updated in step (3). 1768 if (!successor->IsCatchBlock()) { 1769 if (successor->predecessors_.size() == 1u) { 1770 // The successor has just one predecessor left. Replace phis with the only 1771 // remaining input. 1772 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1773 HPhi* phi = phi_it.Current()->AsPhi(); 1774 phi->ReplaceWith(phi->InputAt(1 - this_index)); 1775 successor->RemovePhi(phi); 1776 } 1777 } else { 1778 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1779 phi_it.Current()->AsPhi()->RemoveInputAt(this_index); 1780 } 1781 } 1782 } 1783 } 1784 successors_.clear(); 1785 1786 // (3) Remove instructions and phis. Instructions should have no remaining uses 1787 // except in catch phis. If an instruction is used by a catch phi at `index`, 1788 // remove `index`-th input of all phis in the catch block since they are 1789 // guaranteed dead. Note that we may miss dead inputs this way but the 1790 // graph will always remain consistent. 1791 for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) { 1792 HInstruction* insn = it.Current(); 1793 RemoveUsesOfDeadInstruction(insn); 1794 RemoveInstruction(insn); 1795 } 1796 for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) { 1797 HPhi* insn = it.Current()->AsPhi(); 1798 RemoveUsesOfDeadInstruction(insn); 1799 RemovePhi(insn); 1800 } 1801 1802 // (4) Disconnect the block from its predecessors and update their 1803 // control-flow instructions. 1804 for (HBasicBlock* predecessor : predecessors_) { 1805 // We should not see any back edges as they would have been removed by step (3). 1806 DCHECK(!IsInLoop() || !GetLoopInformation()->IsBackEdge(*predecessor)); 1807 1808 HInstruction* last_instruction = predecessor->GetLastInstruction(); 1809 if (last_instruction->IsTryBoundary() && !IsCatchBlock()) { 1810 // This block is the only normal-flow successor of the TryBoundary which 1811 // makes `predecessor` dead. Since DCE removes blocks in post order, 1812 // exception handlers of this TryBoundary were already visited and any 1813 // remaining handlers therefore must be live. We remove `predecessor` from 1814 // their list of predecessors. 1815 DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this); 1816 while (predecessor->GetSuccessors().size() > 1) { 1817 HBasicBlock* handler = predecessor->GetSuccessors()[1]; 1818 DCHECK(handler->IsCatchBlock()); 1819 predecessor->RemoveSuccessor(handler); 1820 handler->RemovePredecessor(predecessor); 1821 } 1822 } 1823 1824 predecessor->RemoveSuccessor(this); 1825 uint32_t num_pred_successors = predecessor->GetSuccessors().size(); 1826 if (num_pred_successors == 1u) { 1827 // If we have one successor after removing one, then we must have 1828 // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one 1829 // successor. Replace those with a HGoto. 1830 DCHECK(last_instruction->IsIf() || 1831 last_instruction->IsPackedSwitch() || 1832 (last_instruction->IsTryBoundary() && IsCatchBlock())); 1833 predecessor->RemoveInstruction(last_instruction); 1834 predecessor->AddInstruction(new (graph_->GetArena()) HGoto(last_instruction->GetDexPc())); 1835 } else if (num_pred_successors == 0u) { 1836 // The predecessor has no remaining successors and therefore must be dead. 1837 // We deliberately leave it without a control-flow instruction so that the 1838 // GraphChecker fails unless it is not removed during the pass too. 1839 predecessor->RemoveInstruction(last_instruction); 1840 } else { 1841 // There are multiple successors left. The removed block might be a successor 1842 // of a PackedSwitch which will be completely removed (perhaps replaced with 1843 // a Goto), or we are deleting a catch block from a TryBoundary. In either 1844 // case, leave `last_instruction` as is for now. 1845 DCHECK(last_instruction->IsPackedSwitch() || 1846 (last_instruction->IsTryBoundary() && IsCatchBlock())); 1847 } 1848 } 1849 predecessors_.clear(); 1850 1851 // (5) Remove the block from all loops it is included in. Skip the inner-most 1852 // loop if this is the loop header (see definition of `loop_update_start`) 1853 // because the loop header's predecessor list has been destroyed in step (4). 1854 for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) { 1855 HLoopInformation* loop_info = it.Current(); 1856 loop_info->Remove(this); 1857 if (loop_info->IsBackEdge(*this)) { 1858 // If this was the last back edge of the loop, we deliberately leave the 1859 // loop in an inconsistent state and will fail GraphChecker unless the 1860 // entire loop is removed during the pass. 1861 loop_info->RemoveBackEdge(this); 1862 } 1863 } 1864 1865 // (6) Disconnect from the dominator. 1866 dominator_->RemoveDominatedBlock(this); 1867 SetDominator(nullptr); 1868 1869 // (7) Delete from the graph, update reverse post order. 1870 graph_->DeleteDeadEmptyBlock(this); 1871 SetGraph(nullptr); 1872 } 1873 1874 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) { 1875 DCHECK(EndsWithControlFlowInstruction()); 1876 RemoveInstruction(GetLastInstruction()); 1877 instructions_.Add(other->GetInstructions()); 1878 other->instructions_.SetBlockOfInstructions(this); 1879 other->instructions_.Clear(); 1880 } 1881 1882 void HBasicBlock::MergeWith(HBasicBlock* other) { 1883 DCHECK_EQ(GetGraph(), other->GetGraph()); 1884 DCHECK(ContainsElement(dominated_blocks_, other)); 1885 DCHECK_EQ(GetSingleSuccessor(), other); 1886 DCHECK_EQ(other->GetSinglePredecessor(), this); 1887 DCHECK(other->GetPhis().IsEmpty()); 1888 1889 // Move instructions from `other` to `this`. 1890 MergeInstructionsWith(other); 1891 1892 // Remove `other` from the loops it is included in. 1893 for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) { 1894 HLoopInformation* loop_info = it.Current(); 1895 loop_info->Remove(other); 1896 if (loop_info->IsBackEdge(*other)) { 1897 loop_info->ReplaceBackEdge(other, this); 1898 } 1899 } 1900 1901 // Update links to the successors of `other`. 1902 successors_.clear(); 1903 for (HBasicBlock* successor : other->GetSuccessors()) { 1904 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this; 1905 } 1906 successors_.swap(other->successors_); 1907 DCHECK(other->successors_.empty()); 1908 1909 // Update the dominator tree. 1910 RemoveDominatedBlock(other); 1911 for (HBasicBlock* dominated : other->GetDominatedBlocks()) { 1912 dominated->SetDominator(this); 1913 } 1914 dominated_blocks_.insert( 1915 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end()); 1916 other->dominated_blocks_.clear(); 1917 other->dominator_ = nullptr; 1918 1919 // Clear the list of predecessors of `other` in preparation of deleting it. 1920 other->predecessors_.clear(); 1921 1922 // Delete `other` from the graph. The function updates reverse post order. 1923 graph_->DeleteDeadEmptyBlock(other); 1924 other->SetGraph(nullptr); 1925 } 1926 1927 void HBasicBlock::MergeWithInlined(HBasicBlock* other) { 1928 DCHECK_NE(GetGraph(), other->GetGraph()); 1929 DCHECK(GetDominatedBlocks().empty()); 1930 DCHECK(GetSuccessors().empty()); 1931 DCHECK(!EndsWithControlFlowInstruction()); 1932 DCHECK(other->GetSinglePredecessor()->IsEntryBlock()); 1933 DCHECK(other->GetPhis().IsEmpty()); 1934 DCHECK(!other->IsInLoop()); 1935 1936 // Move instructions from `other` to `this`. 1937 instructions_.Add(other->GetInstructions()); 1938 other->instructions_.SetBlockOfInstructions(this); 1939 1940 // Update links to the successors of `other`. 1941 successors_.clear(); 1942 for (HBasicBlock* successor : other->GetSuccessors()) { 1943 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this; 1944 } 1945 successors_.swap(other->successors_); 1946 DCHECK(other->successors_.empty()); 1947 1948 // Update the dominator tree. 1949 for (HBasicBlock* dominated : other->GetDominatedBlocks()) { 1950 dominated->SetDominator(this); 1951 } 1952 dominated_blocks_.insert( 1953 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end()); 1954 other->dominated_blocks_.clear(); 1955 other->dominator_ = nullptr; 1956 other->graph_ = nullptr; 1957 } 1958 1959 void HBasicBlock::ReplaceWith(HBasicBlock* other) { 1960 while (!GetPredecessors().empty()) { 1961 HBasicBlock* predecessor = GetPredecessors()[0]; 1962 predecessor->ReplaceSuccessor(this, other); 1963 } 1964 while (!GetSuccessors().empty()) { 1965 HBasicBlock* successor = GetSuccessors()[0]; 1966 successor->ReplacePredecessor(this, other); 1967 } 1968 for (HBasicBlock* dominated : GetDominatedBlocks()) { 1969 other->AddDominatedBlock(dominated); 1970 } 1971 GetDominator()->ReplaceDominatedBlock(this, other); 1972 other->SetDominator(GetDominator()); 1973 dominator_ = nullptr; 1974 graph_ = nullptr; 1975 } 1976 1977 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) { 1978 DCHECK_EQ(block->GetGraph(), this); 1979 DCHECK(block->GetSuccessors().empty()); 1980 DCHECK(block->GetPredecessors().empty()); 1981 DCHECK(block->GetDominatedBlocks().empty()); 1982 DCHECK(block->GetDominator() == nullptr); 1983 DCHECK(block->GetInstructions().IsEmpty()); 1984 DCHECK(block->GetPhis().IsEmpty()); 1985 1986 if (block->IsExitBlock()) { 1987 SetExitBlock(nullptr); 1988 } 1989 1990 RemoveElement(reverse_post_order_, block); 1991 blocks_[block->GetBlockId()] = nullptr; 1992 block->SetGraph(nullptr); 1993 } 1994 1995 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block, 1996 HBasicBlock* reference, 1997 bool replace_if_back_edge) { 1998 if (block->IsLoopHeader()) { 1999 // Clear the information of which blocks are contained in that loop. Since the 2000 // information is stored as a bit vector based on block ids, we have to update 2001 // it, as those block ids were specific to the callee graph and we are now adding 2002 // these blocks to the caller graph. 2003 block->GetLoopInformation()->ClearAllBlocks(); 2004 } 2005 2006 // If not already in a loop, update the loop information. 2007 if (!block->IsInLoop()) { 2008 block->SetLoopInformation(reference->GetLoopInformation()); 2009 } 2010 2011 // If the block is in a loop, update all its outward loops. 2012 HLoopInformation* loop_info = block->GetLoopInformation(); 2013 if (loop_info != nullptr) { 2014 for (HLoopInformationOutwardIterator loop_it(*block); 2015 !loop_it.Done(); 2016 loop_it.Advance()) { 2017 loop_it.Current()->Add(block); 2018 } 2019 if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) { 2020 loop_info->ReplaceBackEdge(reference, block); 2021 } 2022 } 2023 2024 // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block. 2025 TryCatchInformation* try_catch_info = reference->IsTryBlock() 2026 ? reference->GetTryCatchInformation() 2027 : nullptr; 2028 block->SetTryCatchInformation(try_catch_info); 2029 } 2030 2031 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) { 2032 DCHECK(HasExitBlock()) << "Unimplemented scenario"; 2033 // Update the environments in this graph to have the invoke's environment 2034 // as parent. 2035 { 2036 // Skip the entry block, we do not need to update the entry's suspend check. 2037 for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) { 2038 for (HInstructionIterator instr_it(block->GetInstructions()); 2039 !instr_it.Done(); 2040 instr_it.Advance()) { 2041 HInstruction* current = instr_it.Current(); 2042 if (current->NeedsEnvironment()) { 2043 DCHECK(current->HasEnvironment()); 2044 current->GetEnvironment()->SetAndCopyParentChain( 2045 outer_graph->GetArena(), invoke->GetEnvironment()); 2046 } 2047 } 2048 } 2049 } 2050 outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs()); 2051 2052 if (HasBoundsChecks()) { 2053 outer_graph->SetHasBoundsChecks(true); 2054 } 2055 if (HasLoops()) { 2056 outer_graph->SetHasLoops(true); 2057 } 2058 if (HasIrreducibleLoops()) { 2059 outer_graph->SetHasIrreducibleLoops(true); 2060 } 2061 if (HasTryCatch()) { 2062 outer_graph->SetHasTryCatch(true); 2063 } 2064 if (HasSIMD()) { 2065 outer_graph->SetHasSIMD(true); 2066 } 2067 2068 HInstruction* return_value = nullptr; 2069 if (GetBlocks().size() == 3) { 2070 // Inliner already made sure we don't inline methods that always throw. 2071 DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow()); 2072 // Simple case of an entry block, a body block, and an exit block. 2073 // Put the body block's instruction into `invoke`'s block. 2074 HBasicBlock* body = GetBlocks()[1]; 2075 DCHECK(GetBlocks()[0]->IsEntryBlock()); 2076 DCHECK(GetBlocks()[2]->IsExitBlock()); 2077 DCHECK(!body->IsExitBlock()); 2078 DCHECK(!body->IsInLoop()); 2079 HInstruction* last = body->GetLastInstruction(); 2080 2081 // Note that we add instructions before the invoke only to simplify polymorphic inlining. 2082 invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions()); 2083 body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock()); 2084 2085 // Replace the invoke with the return value of the inlined graph. 2086 if (last->IsReturn()) { 2087 return_value = last->InputAt(0); 2088 } else { 2089 DCHECK(last->IsReturnVoid()); 2090 } 2091 2092 invoke->GetBlock()->RemoveInstruction(last); 2093 } else { 2094 // Need to inline multiple blocks. We split `invoke`'s block 2095 // into two blocks, merge the first block of the inlined graph into 2096 // the first half, and replace the exit block of the inlined graph 2097 // with the second half. 2098 ArenaAllocator* allocator = outer_graph->GetArena(); 2099 HBasicBlock* at = invoke->GetBlock(); 2100 // Note that we split before the invoke only to simplify polymorphic inlining. 2101 HBasicBlock* to = at->SplitBeforeForInlining(invoke); 2102 2103 HBasicBlock* first = entry_block_->GetSuccessors()[0]; 2104 DCHECK(!first->IsInLoop()); 2105 at->MergeWithInlined(first); 2106 exit_block_->ReplaceWith(to); 2107 2108 // Update the meta information surrounding blocks: 2109 // (1) the graph they are now in, 2110 // (2) the reverse post order of that graph, 2111 // (3) their potential loop information, inner and outer, 2112 // (4) try block membership. 2113 // Note that we do not need to update catch phi inputs because they 2114 // correspond to the register file of the outer method which the inlinee 2115 // cannot modify. 2116 2117 // We don't add the entry block, the exit block, and the first block, which 2118 // has been merged with `at`. 2119 static constexpr int kNumberOfSkippedBlocksInCallee = 3; 2120 2121 // We add the `to` block. 2122 static constexpr int kNumberOfNewBlocksInCaller = 1; 2123 size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee) 2124 + kNumberOfNewBlocksInCaller; 2125 2126 // Find the location of `at` in the outer graph's reverse post order. The new 2127 // blocks will be added after it. 2128 size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at); 2129 MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at); 2130 2131 // Do a reverse post order of the blocks in the callee and do (1), (2), (3) 2132 // and (4) to the blocks that apply. 2133 for (HBasicBlock* current : GetReversePostOrder()) { 2134 if (current != exit_block_ && current != entry_block_ && current != first) { 2135 DCHECK(current->GetTryCatchInformation() == nullptr); 2136 DCHECK(current->GetGraph() == this); 2137 current->SetGraph(outer_graph); 2138 outer_graph->AddBlock(current); 2139 outer_graph->reverse_post_order_[++index_of_at] = current; 2140 UpdateLoopAndTryInformationOfNewBlock(current, at, /* replace_if_back_edge */ false); 2141 } 2142 } 2143 2144 // Do (1), (2), (3) and (4) to `to`. 2145 to->SetGraph(outer_graph); 2146 outer_graph->AddBlock(to); 2147 outer_graph->reverse_post_order_[++index_of_at] = to; 2148 // Only `to` can become a back edge, as the inlined blocks 2149 // are predecessors of `to`. 2150 UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge */ true); 2151 2152 // Update all predecessors of the exit block (now the `to` block) 2153 // to not `HReturn` but `HGoto` instead. Special case throwing blocks 2154 // to now get the outer graph exit block as successor. Note that the inliner 2155 // currently doesn't support inlining methods with try/catch. 2156 HPhi* return_value_phi = nullptr; 2157 bool rerun_dominance = false; 2158 bool rerun_loop_analysis = false; 2159 for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) { 2160 HBasicBlock* predecessor = to->GetPredecessors()[pred]; 2161 HInstruction* last = predecessor->GetLastInstruction(); 2162 if (last->IsThrow()) { 2163 DCHECK(!at->IsTryBlock()); 2164 predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock()); 2165 --pred; 2166 // We need to re-run dominance information, as the exit block now has 2167 // a new dominator. 2168 rerun_dominance = true; 2169 if (predecessor->GetLoopInformation() != nullptr) { 2170 // The exit block and blocks post dominated by the exit block do not belong 2171 // to any loop. Because we do not compute the post dominators, we need to re-run 2172 // loop analysis to get the loop information correct. 2173 rerun_loop_analysis = true; 2174 } 2175 } else { 2176 if (last->IsReturnVoid()) { 2177 DCHECK(return_value == nullptr); 2178 DCHECK(return_value_phi == nullptr); 2179 } else { 2180 DCHECK(last->IsReturn()); 2181 if (return_value_phi != nullptr) { 2182 return_value_phi->AddInput(last->InputAt(0)); 2183 } else if (return_value == nullptr) { 2184 return_value = last->InputAt(0); 2185 } else { 2186 // There will be multiple returns. 2187 return_value_phi = new (allocator) HPhi( 2188 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc()); 2189 to->AddPhi(return_value_phi); 2190 return_value_phi->AddInput(return_value); 2191 return_value_phi->AddInput(last->InputAt(0)); 2192 return_value = return_value_phi; 2193 } 2194 } 2195 predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc())); 2196 predecessor->RemoveInstruction(last); 2197 } 2198 } 2199 if (rerun_loop_analysis) { 2200 DCHECK(!outer_graph->HasIrreducibleLoops()) 2201 << "Recomputing loop information in graphs with irreducible loops " 2202 << "is unsupported, as it could lead to loop header changes"; 2203 outer_graph->ClearLoopInformation(); 2204 outer_graph->ClearDominanceInformation(); 2205 outer_graph->BuildDominatorTree(); 2206 } else if (rerun_dominance) { 2207 outer_graph->ClearDominanceInformation(); 2208 outer_graph->ComputeDominanceInformation(); 2209 } 2210 } 2211 2212 // Walk over the entry block and: 2213 // - Move constants from the entry block to the outer_graph's entry block, 2214 // - Replace HParameterValue instructions with their real value. 2215 // - Remove suspend checks, that hold an environment. 2216 // We must do this after the other blocks have been inlined, otherwise ids of 2217 // constants could overlap with the inner graph. 2218 size_t parameter_index = 0; 2219 for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) { 2220 HInstruction* current = it.Current(); 2221 HInstruction* replacement = nullptr; 2222 if (current->IsNullConstant()) { 2223 replacement = outer_graph->GetNullConstant(current->GetDexPc()); 2224 } else if (current->IsIntConstant()) { 2225 replacement = outer_graph->GetIntConstant( 2226 current->AsIntConstant()->GetValue(), current->GetDexPc()); 2227 } else if (current->IsLongConstant()) { 2228 replacement = outer_graph->GetLongConstant( 2229 current->AsLongConstant()->GetValue(), current->GetDexPc()); 2230 } else if (current->IsFloatConstant()) { 2231 replacement = outer_graph->GetFloatConstant( 2232 current->AsFloatConstant()->GetValue(), current->GetDexPc()); 2233 } else if (current->IsDoubleConstant()) { 2234 replacement = outer_graph->GetDoubleConstant( 2235 current->AsDoubleConstant()->GetValue(), current->GetDexPc()); 2236 } else if (current->IsParameterValue()) { 2237 if (kIsDebugBuild 2238 && invoke->IsInvokeStaticOrDirect() 2239 && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) { 2240 // Ensure we do not use the last input of `invoke`, as it 2241 // contains a clinit check which is not an actual argument. 2242 size_t last_input_index = invoke->InputCount() - 1; 2243 DCHECK(parameter_index != last_input_index); 2244 } 2245 replacement = invoke->InputAt(parameter_index++); 2246 } else if (current->IsCurrentMethod()) { 2247 replacement = outer_graph->GetCurrentMethod(); 2248 } else { 2249 DCHECK(current->IsGoto() || current->IsSuspendCheck()); 2250 entry_block_->RemoveInstruction(current); 2251 } 2252 if (replacement != nullptr) { 2253 current->ReplaceWith(replacement); 2254 // If the current is the return value then we need to update the latter. 2255 if (current == return_value) { 2256 DCHECK_EQ(entry_block_, return_value->GetBlock()); 2257 return_value = replacement; 2258 } 2259 } 2260 } 2261 2262 return return_value; 2263 } 2264 2265 /* 2266 * Loop will be transformed to: 2267 * old_pre_header 2268 * | 2269 * if_block 2270 * / \ 2271 * true_block false_block 2272 * \ / 2273 * new_pre_header 2274 * | 2275 * header 2276 */ 2277 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) { 2278 DCHECK(header->IsLoopHeader()); 2279 HBasicBlock* old_pre_header = header->GetDominator(); 2280 2281 // Need extra block to avoid critical edge. 2282 HBasicBlock* if_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2283 HBasicBlock* true_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2284 HBasicBlock* false_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2285 HBasicBlock* new_pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2286 AddBlock(if_block); 2287 AddBlock(true_block); 2288 AddBlock(false_block); 2289 AddBlock(new_pre_header); 2290 2291 header->ReplacePredecessor(old_pre_header, new_pre_header); 2292 old_pre_header->successors_.clear(); 2293 old_pre_header->dominated_blocks_.clear(); 2294 2295 old_pre_header->AddSuccessor(if_block); 2296 if_block->AddSuccessor(true_block); // True successor 2297 if_block->AddSuccessor(false_block); // False successor 2298 true_block->AddSuccessor(new_pre_header); 2299 false_block->AddSuccessor(new_pre_header); 2300 2301 old_pre_header->dominated_blocks_.push_back(if_block); 2302 if_block->SetDominator(old_pre_header); 2303 if_block->dominated_blocks_.push_back(true_block); 2304 true_block->SetDominator(if_block); 2305 if_block->dominated_blocks_.push_back(false_block); 2306 false_block->SetDominator(if_block); 2307 if_block->dominated_blocks_.push_back(new_pre_header); 2308 new_pre_header->SetDominator(if_block); 2309 new_pre_header->dominated_blocks_.push_back(header); 2310 header->SetDominator(new_pre_header); 2311 2312 // Fix reverse post order. 2313 size_t index_of_header = IndexOfElement(reverse_post_order_, header); 2314 MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1); 2315 reverse_post_order_[index_of_header++] = if_block; 2316 reverse_post_order_[index_of_header++] = true_block; 2317 reverse_post_order_[index_of_header++] = false_block; 2318 reverse_post_order_[index_of_header++] = new_pre_header; 2319 2320 // The pre_header can never be a back edge of a loop. 2321 DCHECK((old_pre_header->GetLoopInformation() == nullptr) || 2322 !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header)); 2323 UpdateLoopAndTryInformationOfNewBlock( 2324 if_block, old_pre_header, /* replace_if_back_edge */ false); 2325 UpdateLoopAndTryInformationOfNewBlock( 2326 true_block, old_pre_header, /* replace_if_back_edge */ false); 2327 UpdateLoopAndTryInformationOfNewBlock( 2328 false_block, old_pre_header, /* replace_if_back_edge */ false); 2329 UpdateLoopAndTryInformationOfNewBlock( 2330 new_pre_header, old_pre_header, /* replace_if_back_edge */ false); 2331 } 2332 2333 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header, 2334 HBasicBlock* body, 2335 HBasicBlock* exit) { 2336 DCHECK(header->IsLoopHeader()); 2337 HLoopInformation* loop = header->GetLoopInformation(); 2338 2339 // Add new loop blocks. 2340 HBasicBlock* new_pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2341 HBasicBlock* new_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2342 HBasicBlock* new_body = new (arena_) HBasicBlock(this, header->GetDexPc()); 2343 AddBlock(new_pre_header); 2344 AddBlock(new_header); 2345 AddBlock(new_body); 2346 2347 // Set up control flow. 2348 header->ReplaceSuccessor(exit, new_pre_header); 2349 new_pre_header->AddSuccessor(new_header); 2350 new_header->AddSuccessor(exit); 2351 new_header->AddSuccessor(new_body); 2352 new_body->AddSuccessor(new_header); 2353 2354 // Set up dominators. 2355 header->ReplaceDominatedBlock(exit, new_pre_header); 2356 new_pre_header->SetDominator(header); 2357 new_pre_header->dominated_blocks_.push_back(new_header); 2358 new_header->SetDominator(new_pre_header); 2359 new_header->dominated_blocks_.push_back(new_body); 2360 new_body->SetDominator(new_header); 2361 new_header->dominated_blocks_.push_back(exit); 2362 exit->SetDominator(new_header); 2363 2364 // Fix reverse post order. 2365 size_t index_of_header = IndexOfElement(reverse_post_order_, header); 2366 MakeRoomFor(&reverse_post_order_, 2, index_of_header); 2367 reverse_post_order_[++index_of_header] = new_pre_header; 2368 reverse_post_order_[++index_of_header] = new_header; 2369 size_t index_of_body = IndexOfElement(reverse_post_order_, body); 2370 MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1); 2371 reverse_post_order_[index_of_body] = new_body; 2372 2373 // Add gotos and suspend check (client must add conditional in header). 2374 new_pre_header->AddInstruction(new (arena_) HGoto()); 2375 HSuspendCheck* suspend_check = new (arena_) HSuspendCheck(header->GetDexPc()); 2376 new_header->AddInstruction(suspend_check); 2377 new_body->AddInstruction(new (arena_) HGoto()); 2378 suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment( 2379 loop->GetSuspendCheck()->GetEnvironment(), header); 2380 2381 // Update loop information. 2382 new_header->AddBackEdge(new_body); 2383 new_header->GetLoopInformation()->SetSuspendCheck(suspend_check); 2384 new_header->GetLoopInformation()->Populate(); 2385 new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation()); // outward 2386 HLoopInformationOutwardIterator it(*new_header); 2387 for (it.Advance(); !it.Done(); it.Advance()) { 2388 it.Current()->Add(new_pre_header); 2389 it.Current()->Add(new_header); 2390 it.Current()->Add(new_body); 2391 } 2392 return new_pre_header; 2393 } 2394 2395 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti) 2396 REQUIRES_SHARED(Locks::mutator_lock_) { 2397 if (rti.IsValid()) { 2398 DCHECK(upper_bound_rti.IsSupertypeOf(rti)) 2399 << " upper_bound_rti: " << upper_bound_rti 2400 << " rti: " << rti; 2401 DCHECK(!upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes() || rti.IsExact()) 2402 << " upper_bound_rti: " << upper_bound_rti 2403 << " rti: " << rti; 2404 } 2405 } 2406 2407 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) { 2408 if (kIsDebugBuild) { 2409 DCHECK_EQ(GetType(), Primitive::kPrimNot); 2410 ScopedObjectAccess soa(Thread::Current()); 2411 DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName(); 2412 if (IsBoundType()) { 2413 // Having the test here spares us from making the method virtual just for 2414 // the sake of a DCHECK. 2415 CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound()); 2416 } 2417 } 2418 reference_type_handle_ = rti.GetTypeHandle(); 2419 SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact()); 2420 } 2421 2422 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) { 2423 if (kIsDebugBuild) { 2424 ScopedObjectAccess soa(Thread::Current()); 2425 DCHECK(upper_bound.IsValid()); 2426 DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once."; 2427 CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound); 2428 } 2429 upper_bound_ = upper_bound; 2430 SetPackedFlag<kFlagUpperCanBeNull>(can_be_null); 2431 } 2432 2433 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) { 2434 if (kIsDebugBuild) { 2435 ScopedObjectAccess soa(Thread::Current()); 2436 DCHECK(IsValidHandle(type_handle)); 2437 if (!is_exact) { 2438 DCHECK(!type_handle->CannotBeAssignedFromOtherTypes()) 2439 << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed"; 2440 } 2441 } 2442 return ReferenceTypeInfo(type_handle, is_exact); 2443 } 2444 2445 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) { 2446 ScopedObjectAccess soa(Thread::Current()); 2447 os << "[" 2448 << " is_valid=" << rhs.IsValid() 2449 << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get())) 2450 << " is_exact=" << rhs.IsExact() 2451 << " ]"; 2452 return os; 2453 } 2454 2455 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) { 2456 // For now, assume that instructions in different blocks may use the 2457 // environment. 2458 // TODO: Use the control flow to decide if this is true. 2459 if (GetBlock() != other->GetBlock()) { 2460 return true; 2461 } 2462 2463 // We know that we are in the same block. Walk from 'this' to 'other', 2464 // checking to see if there is any instruction with an environment. 2465 HInstruction* current = this; 2466 for (; current != other && current != nullptr; current = current->GetNext()) { 2467 // This is a conservative check, as the instruction result may not be in 2468 // the referenced environment. 2469 if (current->HasEnvironment()) { 2470 return true; 2471 } 2472 } 2473 2474 // We should have been called with 'this' before 'other' in the block. 2475 // Just confirm this. 2476 DCHECK(current != nullptr); 2477 return false; 2478 } 2479 2480 void HInvoke::SetIntrinsic(Intrinsics intrinsic, 2481 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 2482 IntrinsicSideEffects side_effects, 2483 IntrinsicExceptions exceptions) { 2484 intrinsic_ = intrinsic; 2485 IntrinsicOptimizations opt(this); 2486 2487 // Adjust method's side effects from intrinsic table. 2488 switch (side_effects) { 2489 case kNoSideEffects: SetSideEffects(SideEffects::None()); break; 2490 case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break; 2491 case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break; 2492 case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break; 2493 } 2494 2495 if (needs_env_or_cache == kNoEnvironmentOrCache) { 2496 opt.SetDoesNotNeedDexCache(); 2497 opt.SetDoesNotNeedEnvironment(); 2498 } else { 2499 // If we need an environment, that means there will be a call, which can trigger GC. 2500 SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC())); 2501 } 2502 // Adjust method's exception status from intrinsic table. 2503 SetCanThrow(exceptions == kCanThrow); 2504 } 2505 2506 bool HNewInstance::IsStringAlloc() const { 2507 ScopedObjectAccess soa(Thread::Current()); 2508 return GetReferenceTypeInfo().IsStringClass(); 2509 } 2510 2511 bool HInvoke::NeedsEnvironment() const { 2512 if (!IsIntrinsic()) { 2513 return true; 2514 } 2515 IntrinsicOptimizations opt(*this); 2516 return !opt.GetDoesNotNeedEnvironment(); 2517 } 2518 2519 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const { 2520 ArtMethod* caller = GetEnvironment()->GetMethod(); 2521 ScopedObjectAccess soa(Thread::Current()); 2522 // `caller` is null for a top-level graph representing a method whose declaring 2523 // class was not resolved. 2524 return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile(); 2525 } 2526 2527 bool HInvokeStaticOrDirect::NeedsDexCacheOfDeclaringClass() const { 2528 if (GetMethodLoadKind() != MethodLoadKind::kDexCacheViaMethod) { 2529 return false; 2530 } 2531 if (!IsIntrinsic()) { 2532 return true; 2533 } 2534 IntrinsicOptimizations opt(*this); 2535 return !opt.GetDoesNotNeedDexCache(); 2536 } 2537 2538 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs) { 2539 switch (rhs) { 2540 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: 2541 return os << "string_init"; 2542 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive: 2543 return os << "recursive"; 2544 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress: 2545 return os << "direct"; 2546 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: 2547 return os << "dex_cache_pc_relative"; 2548 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: 2549 return os << "dex_cache_via_method"; 2550 default: 2551 LOG(FATAL) << "Unknown MethodLoadKind: " << static_cast<int>(rhs); 2552 UNREACHABLE(); 2553 } 2554 } 2555 2556 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) { 2557 switch (rhs) { 2558 case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit: 2559 return os << "explicit"; 2560 case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit: 2561 return os << "implicit"; 2562 case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone: 2563 return os << "none"; 2564 default: 2565 LOG(FATAL) << "Unknown ClinitCheckRequirement: " << static_cast<int>(rhs); 2566 UNREACHABLE(); 2567 } 2568 } 2569 2570 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const { 2571 const HLoadClass* other_load_class = other->AsLoadClass(); 2572 // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type 2573 // names rather than type indexes. However, we shall also have to re-think the hash code. 2574 if (type_index_ != other_load_class->type_index_ || 2575 GetPackedFields() != other_load_class->GetPackedFields()) { 2576 return false; 2577 } 2578 switch (GetLoadKind()) { 2579 case LoadKind::kBootImageAddress: 2580 case LoadKind::kJitTableAddress: { 2581 ScopedObjectAccess soa(Thread::Current()); 2582 return GetClass().Get() == other_load_class->GetClass().Get(); 2583 } 2584 default: 2585 DCHECK(HasTypeReference(GetLoadKind())); 2586 return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile()); 2587 } 2588 } 2589 2590 void HLoadClass::SetLoadKind(LoadKind load_kind) { 2591 SetPackedField<LoadKindField>(load_kind); 2592 2593 if (load_kind != LoadKind::kDexCacheViaMethod && 2594 load_kind != LoadKind::kReferrersClass) { 2595 RemoveAsUserOfInput(0u); 2596 SetRawInputAt(0u, nullptr); 2597 } 2598 2599 if (!NeedsEnvironment()) { 2600 RemoveEnvironment(); 2601 SetSideEffects(SideEffects::None()); 2602 } 2603 } 2604 2605 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs) { 2606 switch (rhs) { 2607 case HLoadClass::LoadKind::kReferrersClass: 2608 return os << "ReferrersClass"; 2609 case HLoadClass::LoadKind::kBootImageLinkTimeAddress: 2610 return os << "BootImageLinkTimeAddress"; 2611 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: 2612 return os << "BootImageLinkTimePcRelative"; 2613 case HLoadClass::LoadKind::kBootImageAddress: 2614 return os << "BootImageAddress"; 2615 case HLoadClass::LoadKind::kBssEntry: 2616 return os << "BssEntry"; 2617 case HLoadClass::LoadKind::kJitTableAddress: 2618 return os << "JitTableAddress"; 2619 case HLoadClass::LoadKind::kDexCacheViaMethod: 2620 return os << "DexCacheViaMethod"; 2621 default: 2622 LOG(FATAL) << "Unknown HLoadClass::LoadKind: " << static_cast<int>(rhs); 2623 UNREACHABLE(); 2624 } 2625 } 2626 2627 bool HLoadString::InstructionDataEquals(const HInstruction* other) const { 2628 const HLoadString* other_load_string = other->AsLoadString(); 2629 // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings 2630 // rather than their indexes. However, we shall also have to re-think the hash code. 2631 if (string_index_ != other_load_string->string_index_ || 2632 GetPackedFields() != other_load_string->GetPackedFields()) { 2633 return false; 2634 } 2635 switch (GetLoadKind()) { 2636 case LoadKind::kBootImageAddress: 2637 case LoadKind::kJitTableAddress: { 2638 ScopedObjectAccess soa(Thread::Current()); 2639 return GetString().Get() == other_load_string->GetString().Get(); 2640 } 2641 default: 2642 return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile()); 2643 } 2644 } 2645 2646 void HLoadString::SetLoadKind(LoadKind load_kind) { 2647 // Once sharpened, the load kind should not be changed again. 2648 DCHECK_EQ(GetLoadKind(), LoadKind::kDexCacheViaMethod); 2649 SetPackedField<LoadKindField>(load_kind); 2650 2651 if (load_kind != LoadKind::kDexCacheViaMethod) { 2652 RemoveAsUserOfInput(0u); 2653 SetRawInputAt(0u, nullptr); 2654 } 2655 if (!NeedsEnvironment()) { 2656 RemoveEnvironment(); 2657 SetSideEffects(SideEffects::None()); 2658 } 2659 } 2660 2661 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs) { 2662 switch (rhs) { 2663 case HLoadString::LoadKind::kBootImageLinkTimeAddress: 2664 return os << "BootImageLinkTimeAddress"; 2665 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: 2666 return os << "BootImageLinkTimePcRelative"; 2667 case HLoadString::LoadKind::kBootImageAddress: 2668 return os << "BootImageAddress"; 2669 case HLoadString::LoadKind::kBssEntry: 2670 return os << "BssEntry"; 2671 case HLoadString::LoadKind::kJitTableAddress: 2672 return os << "JitTableAddress"; 2673 case HLoadString::LoadKind::kDexCacheViaMethod: 2674 return os << "DexCacheViaMethod"; 2675 default: 2676 LOG(FATAL) << "Unknown HLoadString::LoadKind: " << static_cast<int>(rhs); 2677 UNREACHABLE(); 2678 } 2679 } 2680 2681 void HInstruction::RemoveEnvironmentUsers() { 2682 for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) { 2683 HEnvironment* user = use.GetUser(); 2684 user->SetRawEnvAt(use.GetIndex(), nullptr); 2685 } 2686 env_uses_.clear(); 2687 } 2688 2689 // Returns an instruction with the opposite Boolean value from 'cond'. 2690 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) { 2691 ArenaAllocator* allocator = GetArena(); 2692 2693 if (cond->IsCondition() && 2694 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType())) { 2695 // Can't reverse floating point conditions. We have to use HBooleanNot in that case. 2696 HInstruction* lhs = cond->InputAt(0); 2697 HInstruction* rhs = cond->InputAt(1); 2698 HInstruction* replacement = nullptr; 2699 switch (cond->AsCondition()->GetOppositeCondition()) { // get *opposite* 2700 case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break; 2701 case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break; 2702 case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break; 2703 case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break; 2704 case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break; 2705 case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break; 2706 case kCondB: replacement = new (allocator) HBelow(lhs, rhs); break; 2707 case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break; 2708 case kCondA: replacement = new (allocator) HAbove(lhs, rhs); break; 2709 case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break; 2710 default: 2711 LOG(FATAL) << "Unexpected condition"; 2712 UNREACHABLE(); 2713 } 2714 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor); 2715 return replacement; 2716 } else if (cond->IsIntConstant()) { 2717 HIntConstant* int_const = cond->AsIntConstant(); 2718 if (int_const->IsFalse()) { 2719 return GetIntConstant(1); 2720 } else { 2721 DCHECK(int_const->IsTrue()) << int_const->GetValue(); 2722 return GetIntConstant(0); 2723 } 2724 } else { 2725 HInstruction* replacement = new (allocator) HBooleanNot(cond); 2726 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor); 2727 return replacement; 2728 } 2729 } 2730 2731 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) { 2732 os << "[" 2733 << " source=" << rhs.GetSource() 2734 << " destination=" << rhs.GetDestination() 2735 << " type=" << rhs.GetType() 2736 << " instruction="; 2737 if (rhs.GetInstruction() != nullptr) { 2738 os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId(); 2739 } else { 2740 os << "null"; 2741 } 2742 os << " ]"; 2743 return os; 2744 } 2745 2746 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) { 2747 switch (rhs) { 2748 case TypeCheckKind::kUnresolvedCheck: 2749 return os << "unresolved_check"; 2750 case TypeCheckKind::kExactCheck: 2751 return os << "exact_check"; 2752 case TypeCheckKind::kClassHierarchyCheck: 2753 return os << "class_hierarchy_check"; 2754 case TypeCheckKind::kAbstractClassCheck: 2755 return os << "abstract_class_check"; 2756 case TypeCheckKind::kInterfaceCheck: 2757 return os << "interface_check"; 2758 case TypeCheckKind::kArrayObjectCheck: 2759 return os << "array_object_check"; 2760 case TypeCheckKind::kArrayCheck: 2761 return os << "array_check"; 2762 default: 2763 LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs); 2764 UNREACHABLE(); 2765 } 2766 } 2767 2768 std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind) { 2769 switch (kind) { 2770 case MemBarrierKind::kAnyStore: 2771 return os << "AnyStore"; 2772 case MemBarrierKind::kLoadAny: 2773 return os << "LoadAny"; 2774 case MemBarrierKind::kStoreStore: 2775 return os << "StoreStore"; 2776 case MemBarrierKind::kAnyAny: 2777 return os << "AnyAny"; 2778 case MemBarrierKind::kNTStoreStore: 2779 return os << "NTStoreStore"; 2780 2781 default: 2782 LOG(FATAL) << "Unknown MemBarrierKind: " << static_cast<int>(kind); 2783 UNREACHABLE(); 2784 } 2785 } 2786 2787 } // namespace art 2788