1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 #include "nodes.h" 17 18 #include <cfloat> 19 20 #include "art_method-inl.h" 21 #include "class_linker-inl.h" 22 #include "code_generator.h" 23 #include "common_dominator.h" 24 #include "ssa_builder.h" 25 #include "base/bit_vector-inl.h" 26 #include "base/bit_utils.h" 27 #include "base/stl_util.h" 28 #include "intrinsics.h" 29 #include "mirror/class-inl.h" 30 #include "scoped_thread_state_change-inl.h" 31 32 namespace art { 33 34 // Enable floating-point static evaluation during constant folding 35 // only if all floating-point operations and constants evaluate in the 36 // range and precision of the type used (i.e., 32-bit float, 64-bit 37 // double). 38 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0); 39 40 void HGraph::InitializeInexactObjectRTI(VariableSizedHandleScope* handles) { 41 ScopedObjectAccess soa(Thread::Current()); 42 // Create the inexact Object reference type and store it in the HGraph. 43 ClassLinker* linker = Runtime::Current()->GetClassLinker(); 44 inexact_object_rti_ = ReferenceTypeInfo::Create( 45 handles->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangObject)), 46 /* is_exact */ false); 47 } 48 49 void HGraph::AddBlock(HBasicBlock* block) { 50 block->SetBlockId(blocks_.size()); 51 blocks_.push_back(block); 52 } 53 54 void HGraph::FindBackEdges(ArenaBitVector* visited) { 55 // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks. 56 DCHECK_EQ(visited->GetHighestBitSet(), -1); 57 58 // Nodes that we're currently visiting, indexed by block id. 59 ArenaBitVector visiting(arena_, blocks_.size(), false, kArenaAllocGraphBuilder); 60 // Number of successors visited from a given node, indexed by block id. 61 ArenaVector<size_t> successors_visited(blocks_.size(), 62 0u, 63 arena_->Adapter(kArenaAllocGraphBuilder)); 64 // Stack of nodes that we're currently visiting (same as marked in "visiting" above). 65 ArenaVector<HBasicBlock*> worklist(arena_->Adapter(kArenaAllocGraphBuilder)); 66 constexpr size_t kDefaultWorklistSize = 8; 67 worklist.reserve(kDefaultWorklistSize); 68 visited->SetBit(entry_block_->GetBlockId()); 69 visiting.SetBit(entry_block_->GetBlockId()); 70 worklist.push_back(entry_block_); 71 72 while (!worklist.empty()) { 73 HBasicBlock* current = worklist.back(); 74 uint32_t current_id = current->GetBlockId(); 75 if (successors_visited[current_id] == current->GetSuccessors().size()) { 76 visiting.ClearBit(current_id); 77 worklist.pop_back(); 78 } else { 79 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++]; 80 uint32_t successor_id = successor->GetBlockId(); 81 if (visiting.IsBitSet(successor_id)) { 82 DCHECK(ContainsElement(worklist, successor)); 83 successor->AddBackEdge(current); 84 } else if (!visited->IsBitSet(successor_id)) { 85 visited->SetBit(successor_id); 86 visiting.SetBit(successor_id); 87 worklist.push_back(successor); 88 } 89 } 90 } 91 } 92 93 static void RemoveEnvironmentUses(HInstruction* instruction) { 94 for (HEnvironment* environment = instruction->GetEnvironment(); 95 environment != nullptr; 96 environment = environment->GetParent()) { 97 for (size_t i = 0, e = environment->Size(); i < e; ++i) { 98 if (environment->GetInstructionAt(i) != nullptr) { 99 environment->RemoveAsUserOfInput(i); 100 } 101 } 102 } 103 } 104 105 static void RemoveAsUser(HInstruction* instruction) { 106 instruction->RemoveAsUserOfAllInputs(); 107 RemoveEnvironmentUses(instruction); 108 } 109 110 void HGraph::RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const { 111 for (size_t i = 0; i < blocks_.size(); ++i) { 112 if (!visited.IsBitSet(i)) { 113 HBasicBlock* block = blocks_[i]; 114 if (block == nullptr) continue; 115 DCHECK(block->GetPhis().IsEmpty()) << "Phis are not inserted at this stage"; 116 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { 117 RemoveAsUser(it.Current()); 118 } 119 } 120 } 121 } 122 123 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) { 124 for (size_t i = 0; i < blocks_.size(); ++i) { 125 if (!visited.IsBitSet(i)) { 126 HBasicBlock* block = blocks_[i]; 127 if (block == nullptr) continue; 128 // We only need to update the successor, which might be live. 129 for (HBasicBlock* successor : block->GetSuccessors()) { 130 successor->RemovePredecessor(block); 131 } 132 // Remove the block from the list of blocks, so that further analyses 133 // never see it. 134 blocks_[i] = nullptr; 135 if (block->IsExitBlock()) { 136 SetExitBlock(nullptr); 137 } 138 // Mark the block as removed. This is used by the HGraphBuilder to discard 139 // the block as a branch target. 140 block->SetGraph(nullptr); 141 } 142 } 143 } 144 145 GraphAnalysisResult HGraph::BuildDominatorTree() { 146 ArenaBitVector visited(arena_, blocks_.size(), false, kArenaAllocGraphBuilder); 147 148 // (1) Find the back edges in the graph doing a DFS traversal. 149 FindBackEdges(&visited); 150 151 // (2) Remove instructions and phis from blocks not visited during 152 // the initial DFS as users from other instructions, so that 153 // users can be safely removed before uses later. 154 RemoveInstructionsAsUsersFromDeadBlocks(visited); 155 156 // (3) Remove blocks not visited during the initial DFS. 157 // Step (5) requires dead blocks to be removed from the 158 // predecessors list of live blocks. 159 RemoveDeadBlocks(visited); 160 161 // (4) Simplify the CFG now, so that we don't need to recompute 162 // dominators and the reverse post order. 163 SimplifyCFG(); 164 165 // (5) Compute the dominance information and the reverse post order. 166 ComputeDominanceInformation(); 167 168 // (6) Analyze loops discovered through back edge analysis, and 169 // set the loop information on each block. 170 GraphAnalysisResult result = AnalyzeLoops(); 171 if (result != kAnalysisSuccess) { 172 return result; 173 } 174 175 // (7) Precompute per-block try membership before entering the SSA builder, 176 // which needs the information to build catch block phis from values of 177 // locals at throwing instructions inside try blocks. 178 ComputeTryBlockInformation(); 179 180 return kAnalysisSuccess; 181 } 182 183 void HGraph::ClearDominanceInformation() { 184 for (HBasicBlock* block : GetReversePostOrder()) { 185 block->ClearDominanceInformation(); 186 } 187 reverse_post_order_.clear(); 188 } 189 190 void HGraph::ClearLoopInformation() { 191 SetHasIrreducibleLoops(false); 192 for (HBasicBlock* block : GetReversePostOrder()) { 193 block->SetLoopInformation(nullptr); 194 } 195 } 196 197 void HBasicBlock::ClearDominanceInformation() { 198 dominated_blocks_.clear(); 199 dominator_ = nullptr; 200 } 201 202 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const { 203 HInstruction* instruction = GetFirstInstruction(); 204 while (instruction->IsParallelMove()) { 205 instruction = instruction->GetNext(); 206 } 207 return instruction; 208 } 209 210 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) { 211 DCHECK(ContainsElement(block->GetSuccessors(), successor)); 212 213 HBasicBlock* old_dominator = successor->GetDominator(); 214 HBasicBlock* new_dominator = 215 (old_dominator == nullptr) ? block 216 : CommonDominator::ForPair(old_dominator, block); 217 218 if (old_dominator == new_dominator) { 219 return false; 220 } else { 221 successor->SetDominator(new_dominator); 222 return true; 223 } 224 } 225 226 void HGraph::ComputeDominanceInformation() { 227 DCHECK(reverse_post_order_.empty()); 228 reverse_post_order_.reserve(blocks_.size()); 229 reverse_post_order_.push_back(entry_block_); 230 231 // Number of visits of a given node, indexed by block id. 232 ArenaVector<size_t> visits(blocks_.size(), 0u, arena_->Adapter(kArenaAllocGraphBuilder)); 233 // Number of successors visited from a given node, indexed by block id. 234 ArenaVector<size_t> successors_visited(blocks_.size(), 235 0u, 236 arena_->Adapter(kArenaAllocGraphBuilder)); 237 // Nodes for which we need to visit successors. 238 ArenaVector<HBasicBlock*> worklist(arena_->Adapter(kArenaAllocGraphBuilder)); 239 constexpr size_t kDefaultWorklistSize = 8; 240 worklist.reserve(kDefaultWorklistSize); 241 worklist.push_back(entry_block_); 242 243 while (!worklist.empty()) { 244 HBasicBlock* current = worklist.back(); 245 uint32_t current_id = current->GetBlockId(); 246 if (successors_visited[current_id] == current->GetSuccessors().size()) { 247 worklist.pop_back(); 248 } else { 249 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++]; 250 UpdateDominatorOfSuccessor(current, successor); 251 252 // Once all the forward edges have been visited, we know the immediate 253 // dominator of the block. We can then start visiting its successors. 254 if (++visits[successor->GetBlockId()] == 255 successor->GetPredecessors().size() - successor->NumberOfBackEdges()) { 256 reverse_post_order_.push_back(successor); 257 worklist.push_back(successor); 258 } 259 } 260 } 261 262 // Check if the graph has back edges not dominated by their respective headers. 263 // If so, we need to update the dominators of those headers and recursively of 264 // their successors. We do that with a fix-point iteration over all blocks. 265 // The algorithm is guaranteed to terminate because it loops only if the sum 266 // of all dominator chains has decreased in the current iteration. 267 bool must_run_fix_point = false; 268 for (HBasicBlock* block : blocks_) { 269 if (block != nullptr && 270 block->IsLoopHeader() && 271 block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) { 272 must_run_fix_point = true; 273 break; 274 } 275 } 276 if (must_run_fix_point) { 277 bool update_occurred = true; 278 while (update_occurred) { 279 update_occurred = false; 280 for (HBasicBlock* block : GetReversePostOrder()) { 281 for (HBasicBlock* successor : block->GetSuccessors()) { 282 update_occurred |= UpdateDominatorOfSuccessor(block, successor); 283 } 284 } 285 } 286 } 287 288 // Make sure that there are no remaining blocks whose dominator information 289 // needs to be updated. 290 if (kIsDebugBuild) { 291 for (HBasicBlock* block : GetReversePostOrder()) { 292 for (HBasicBlock* successor : block->GetSuccessors()) { 293 DCHECK(!UpdateDominatorOfSuccessor(block, successor)); 294 } 295 } 296 } 297 298 // Populate `dominated_blocks_` information after computing all dominators. 299 // The potential presence of irreducible loops requires to do it after. 300 for (HBasicBlock* block : GetReversePostOrder()) { 301 if (!block->IsEntryBlock()) { 302 block->GetDominator()->AddDominatedBlock(block); 303 } 304 } 305 } 306 307 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) { 308 HBasicBlock* new_block = new (arena_) HBasicBlock(this, successor->GetDexPc()); 309 AddBlock(new_block); 310 // Use `InsertBetween` to ensure the predecessor index and successor index of 311 // `block` and `successor` are preserved. 312 new_block->InsertBetween(block, successor); 313 return new_block; 314 } 315 316 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) { 317 // Insert a new node between `block` and `successor` to split the 318 // critical edge. 319 HBasicBlock* new_block = SplitEdge(block, successor); 320 new_block->AddInstruction(new (arena_) HGoto(successor->GetDexPc())); 321 if (successor->IsLoopHeader()) { 322 // If we split at a back edge boundary, make the new block the back edge. 323 HLoopInformation* info = successor->GetLoopInformation(); 324 if (info->IsBackEdge(*block)) { 325 info->RemoveBackEdge(block); 326 info->AddBackEdge(new_block); 327 } 328 } 329 } 330 331 void HGraph::SimplifyLoop(HBasicBlock* header) { 332 HLoopInformation* info = header->GetLoopInformation(); 333 334 // Make sure the loop has only one pre header. This simplifies SSA building by having 335 // to just look at the pre header to know which locals are initialized at entry of the 336 // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining 337 // this graph. 338 size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges(); 339 if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) { 340 HBasicBlock* pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 341 AddBlock(pre_header); 342 pre_header->AddInstruction(new (arena_) HGoto(header->GetDexPc())); 343 344 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) { 345 HBasicBlock* predecessor = header->GetPredecessors()[pred]; 346 if (!info->IsBackEdge(*predecessor)) { 347 predecessor->ReplaceSuccessor(header, pre_header); 348 pred--; 349 } 350 } 351 pre_header->AddSuccessor(header); 352 } 353 354 // Make sure the first predecessor of a loop header is the incoming block. 355 if (info->IsBackEdge(*header->GetPredecessors()[0])) { 356 HBasicBlock* to_swap = header->GetPredecessors()[0]; 357 for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) { 358 HBasicBlock* predecessor = header->GetPredecessors()[pred]; 359 if (!info->IsBackEdge(*predecessor)) { 360 header->predecessors_[pred] = to_swap; 361 header->predecessors_[0] = predecessor; 362 break; 363 } 364 } 365 } 366 367 HInstruction* first_instruction = header->GetFirstInstruction(); 368 if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) { 369 // Called from DeadBlockElimination. Update SuspendCheck pointer. 370 info->SetSuspendCheck(first_instruction->AsSuspendCheck()); 371 } 372 } 373 374 void HGraph::ComputeTryBlockInformation() { 375 // Iterate in reverse post order to propagate try membership information from 376 // predecessors to their successors. 377 for (HBasicBlock* block : GetReversePostOrder()) { 378 if (block->IsEntryBlock() || block->IsCatchBlock()) { 379 // Catch blocks after simplification have only exceptional predecessors 380 // and hence are never in tries. 381 continue; 382 } 383 384 // Infer try membership from the first predecessor. Having simplified loops, 385 // the first predecessor can never be a back edge and therefore it must have 386 // been visited already and had its try membership set. 387 HBasicBlock* first_predecessor = block->GetPredecessors()[0]; 388 DCHECK(!block->IsLoopHeader() || !block->GetLoopInformation()->IsBackEdge(*first_predecessor)); 389 const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors(); 390 if (try_entry != nullptr && 391 (block->GetTryCatchInformation() == nullptr || 392 try_entry != &block->GetTryCatchInformation()->GetTryEntry())) { 393 // We are either setting try block membership for the first time or it 394 // has changed. 395 block->SetTryCatchInformation(new (arena_) TryCatchInformation(*try_entry)); 396 } 397 } 398 } 399 400 void HGraph::SimplifyCFG() { 401 // Simplify the CFG for future analysis, and code generation: 402 // (1): Split critical edges. 403 // (2): Simplify loops by having only one preheader. 404 // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators 405 // can be invalidated. We remember the initial size to avoid iterating over the new blocks. 406 for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) { 407 HBasicBlock* block = blocks_[block_id]; 408 if (block == nullptr) continue; 409 if (block->GetSuccessors().size() > 1) { 410 // Only split normal-flow edges. We cannot split exceptional edges as they 411 // are synthesized (approximate real control flow), and we do not need to 412 // anyway. Moves that would be inserted there are performed by the runtime. 413 ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors(); 414 for (size_t j = 0, e = normal_successors.size(); j < e; ++j) { 415 HBasicBlock* successor = normal_successors[j]; 416 DCHECK(!successor->IsCatchBlock()); 417 if (successor == exit_block_) { 418 // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we 419 // do not want to split because Goto->Exit is not allowed. 420 DCHECK(block->IsSingleTryBoundary()); 421 } else if (successor->GetPredecessors().size() > 1) { 422 SplitCriticalEdge(block, successor); 423 // SplitCriticalEdge could have invalidated the `normal_successors` 424 // ArrayRef. We must re-acquire it. 425 normal_successors = block->GetNormalSuccessors(); 426 DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor); 427 DCHECK_EQ(e, normal_successors.size()); 428 } 429 } 430 } 431 if (block->IsLoopHeader()) { 432 SimplifyLoop(block); 433 } else if (!block->IsEntryBlock() && 434 block->GetFirstInstruction() != nullptr && 435 block->GetFirstInstruction()->IsSuspendCheck()) { 436 // We are being called by the dead code elimiation pass, and what used to be 437 // a loop got dismantled. Just remove the suspend check. 438 block->RemoveInstruction(block->GetFirstInstruction()); 439 } 440 } 441 } 442 443 GraphAnalysisResult HGraph::AnalyzeLoops() const { 444 // We iterate post order to ensure we visit inner loops before outer loops. 445 // `PopulateRecursive` needs this guarantee to know whether a natural loop 446 // contains an irreducible loop. 447 for (HBasicBlock* block : GetPostOrder()) { 448 if (block->IsLoopHeader()) { 449 if (block->IsCatchBlock()) { 450 // TODO: Dealing with exceptional back edges could be tricky because 451 // they only approximate the real control flow. Bail out for now. 452 return kAnalysisFailThrowCatchLoop; 453 } 454 block->GetLoopInformation()->Populate(); 455 } 456 } 457 return kAnalysisSuccess; 458 } 459 460 void HLoopInformation::Dump(std::ostream& os) { 461 os << "header: " << header_->GetBlockId() << std::endl; 462 os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl; 463 for (HBasicBlock* block : back_edges_) { 464 os << "back edge: " << block->GetBlockId() << std::endl; 465 } 466 for (HBasicBlock* block : header_->GetPredecessors()) { 467 os << "predecessor: " << block->GetBlockId() << std::endl; 468 } 469 for (uint32_t idx : blocks_.Indexes()) { 470 os << " in loop: " << idx << std::endl; 471 } 472 } 473 474 void HGraph::InsertConstant(HConstant* constant) { 475 // New constants are inserted before the SuspendCheck at the bottom of the 476 // entry block. Note that this method can be called from the graph builder and 477 // the entry block therefore may not end with SuspendCheck->Goto yet. 478 HInstruction* insert_before = nullptr; 479 480 HInstruction* gota = entry_block_->GetLastInstruction(); 481 if (gota != nullptr && gota->IsGoto()) { 482 HInstruction* suspend_check = gota->GetPrevious(); 483 if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) { 484 insert_before = suspend_check; 485 } else { 486 insert_before = gota; 487 } 488 } 489 490 if (insert_before == nullptr) { 491 entry_block_->AddInstruction(constant); 492 } else { 493 entry_block_->InsertInstructionBefore(constant, insert_before); 494 } 495 } 496 497 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) { 498 // For simplicity, don't bother reviving the cached null constant if it is 499 // not null and not in a block. Otherwise, we need to clear the instruction 500 // id and/or any invariants the graph is assuming when adding new instructions. 501 if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) { 502 cached_null_constant_ = new (arena_) HNullConstant(dex_pc); 503 cached_null_constant_->SetReferenceTypeInfo(inexact_object_rti_); 504 InsertConstant(cached_null_constant_); 505 } 506 if (kIsDebugBuild) { 507 ScopedObjectAccess soa(Thread::Current()); 508 DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid()); 509 } 510 return cached_null_constant_; 511 } 512 513 HCurrentMethod* HGraph::GetCurrentMethod() { 514 // For simplicity, don't bother reviving the cached current method if it is 515 // not null and not in a block. Otherwise, we need to clear the instruction 516 // id and/or any invariants the graph is assuming when adding new instructions. 517 if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) { 518 cached_current_method_ = new (arena_) HCurrentMethod( 519 Is64BitInstructionSet(instruction_set_) ? Primitive::kPrimLong : Primitive::kPrimInt, 520 entry_block_->GetDexPc()); 521 if (entry_block_->GetFirstInstruction() == nullptr) { 522 entry_block_->AddInstruction(cached_current_method_); 523 } else { 524 entry_block_->InsertInstructionBefore( 525 cached_current_method_, entry_block_->GetFirstInstruction()); 526 } 527 } 528 return cached_current_method_; 529 } 530 531 const char* HGraph::GetMethodName() const { 532 const DexFile::MethodId& method_id = dex_file_.GetMethodId(method_idx_); 533 return dex_file_.GetMethodName(method_id); 534 } 535 536 std::string HGraph::PrettyMethod(bool with_signature) const { 537 return dex_file_.PrettyMethod(method_idx_, with_signature); 538 } 539 540 HConstant* HGraph::GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc) { 541 switch (type) { 542 case Primitive::Type::kPrimBoolean: 543 DCHECK(IsUint<1>(value)); 544 FALLTHROUGH_INTENDED; 545 case Primitive::Type::kPrimByte: 546 case Primitive::Type::kPrimChar: 547 case Primitive::Type::kPrimShort: 548 case Primitive::Type::kPrimInt: 549 DCHECK(IsInt(Primitive::ComponentSize(type) * kBitsPerByte, value)); 550 return GetIntConstant(static_cast<int32_t>(value), dex_pc); 551 552 case Primitive::Type::kPrimLong: 553 return GetLongConstant(value, dex_pc); 554 555 default: 556 LOG(FATAL) << "Unsupported constant type"; 557 UNREACHABLE(); 558 } 559 } 560 561 void HGraph::CacheFloatConstant(HFloatConstant* constant) { 562 int32_t value = bit_cast<int32_t, float>(constant->GetValue()); 563 DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end()); 564 cached_float_constants_.Overwrite(value, constant); 565 } 566 567 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) { 568 int64_t value = bit_cast<int64_t, double>(constant->GetValue()); 569 DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end()); 570 cached_double_constants_.Overwrite(value, constant); 571 } 572 573 void HLoopInformation::Add(HBasicBlock* block) { 574 blocks_.SetBit(block->GetBlockId()); 575 } 576 577 void HLoopInformation::Remove(HBasicBlock* block) { 578 blocks_.ClearBit(block->GetBlockId()); 579 } 580 581 void HLoopInformation::PopulateRecursive(HBasicBlock* block) { 582 if (blocks_.IsBitSet(block->GetBlockId())) { 583 return; 584 } 585 586 blocks_.SetBit(block->GetBlockId()); 587 block->SetInLoop(this); 588 if (block->IsLoopHeader()) { 589 // We're visiting loops in post-order, so inner loops must have been 590 // populated already. 591 DCHECK(block->GetLoopInformation()->IsPopulated()); 592 if (block->GetLoopInformation()->IsIrreducible()) { 593 contains_irreducible_loop_ = true; 594 } 595 } 596 for (HBasicBlock* predecessor : block->GetPredecessors()) { 597 PopulateRecursive(predecessor); 598 } 599 } 600 601 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) { 602 size_t block_id = block->GetBlockId(); 603 604 // If `block` is in `finalized`, we know its membership in the loop has been 605 // decided and it does not need to be revisited. 606 if (finalized->IsBitSet(block_id)) { 607 return; 608 } 609 610 bool is_finalized = false; 611 if (block->IsLoopHeader()) { 612 // If we hit a loop header in an irreducible loop, we first check if the 613 // pre header of that loop belongs to the currently analyzed loop. If it does, 614 // then we visit the back edges. 615 // Note that we cannot use GetPreHeader, as the loop may have not been populated 616 // yet. 617 HBasicBlock* pre_header = block->GetPredecessors()[0]; 618 PopulateIrreducibleRecursive(pre_header, finalized); 619 if (blocks_.IsBitSet(pre_header->GetBlockId())) { 620 block->SetInLoop(this); 621 blocks_.SetBit(block_id); 622 finalized->SetBit(block_id); 623 is_finalized = true; 624 625 HLoopInformation* info = block->GetLoopInformation(); 626 for (HBasicBlock* back_edge : info->GetBackEdges()) { 627 PopulateIrreducibleRecursive(back_edge, finalized); 628 } 629 } 630 } else { 631 // Visit all predecessors. If one predecessor is part of the loop, this 632 // block is also part of this loop. 633 for (HBasicBlock* predecessor : block->GetPredecessors()) { 634 PopulateIrreducibleRecursive(predecessor, finalized); 635 if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) { 636 block->SetInLoop(this); 637 blocks_.SetBit(block_id); 638 finalized->SetBit(block_id); 639 is_finalized = true; 640 } 641 } 642 } 643 644 // All predecessors have been recursively visited. Mark finalized if not marked yet. 645 if (!is_finalized) { 646 finalized->SetBit(block_id); 647 } 648 } 649 650 void HLoopInformation::Populate() { 651 DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated"; 652 // Populate this loop: starting with the back edge, recursively add predecessors 653 // that are not already part of that loop. Set the header as part of the loop 654 // to end the recursion. 655 // This is a recursive implementation of the algorithm described in 656 // "Advanced Compiler Design & Implementation" (Muchnick) p192. 657 HGraph* graph = header_->GetGraph(); 658 blocks_.SetBit(header_->GetBlockId()); 659 header_->SetInLoop(this); 660 661 bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader(); 662 663 if (is_irreducible_loop) { 664 ArenaBitVector visited(graph->GetArena(), 665 graph->GetBlocks().size(), 666 /* expandable */ false, 667 kArenaAllocGraphBuilder); 668 // Stop marking blocks at the loop header. 669 visited.SetBit(header_->GetBlockId()); 670 671 for (HBasicBlock* back_edge : GetBackEdges()) { 672 PopulateIrreducibleRecursive(back_edge, &visited); 673 } 674 } else { 675 for (HBasicBlock* back_edge : GetBackEdges()) { 676 PopulateRecursive(back_edge); 677 } 678 } 679 680 if (!is_irreducible_loop && graph->IsCompilingOsr()) { 681 // When compiling in OSR mode, all loops in the compiled method may be entered 682 // from the interpreter. We treat this OSR entry point just like an extra entry 683 // to an irreducible loop, so we need to mark the method's loops as irreducible. 684 // This does not apply to inlined loops which do not act as OSR entry points. 685 if (suspend_check_ == nullptr) { 686 // Just building the graph in OSR mode, this loop is not inlined. We never build an 687 // inner graph in OSR mode as we can do OSR transition only from the outer method. 688 is_irreducible_loop = true; 689 } else { 690 // Look at the suspend check's environment to determine if the loop was inlined. 691 DCHECK(suspend_check_->HasEnvironment()); 692 if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) { 693 is_irreducible_loop = true; 694 } 695 } 696 } 697 if (is_irreducible_loop) { 698 irreducible_ = true; 699 contains_irreducible_loop_ = true; 700 graph->SetHasIrreducibleLoops(true); 701 } 702 graph->SetHasLoops(true); 703 } 704 705 HBasicBlock* HLoopInformation::GetPreHeader() const { 706 HBasicBlock* block = header_->GetPredecessors()[0]; 707 DCHECK(irreducible_ || (block == header_->GetDominator())); 708 return block; 709 } 710 711 bool HLoopInformation::Contains(const HBasicBlock& block) const { 712 return blocks_.IsBitSet(block.GetBlockId()); 713 } 714 715 bool HLoopInformation::IsIn(const HLoopInformation& other) const { 716 return other.blocks_.IsBitSet(header_->GetBlockId()); 717 } 718 719 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const { 720 return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId()); 721 } 722 723 size_t HLoopInformation::GetLifetimeEnd() const { 724 size_t last_position = 0; 725 for (HBasicBlock* back_edge : GetBackEdges()) { 726 last_position = std::max(back_edge->GetLifetimeEnd(), last_position); 727 } 728 return last_position; 729 } 730 731 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const { 732 for (HBasicBlock* back_edge : GetBackEdges()) { 733 DCHECK(back_edge->GetDominator() != nullptr); 734 if (!header_->Dominates(back_edge)) { 735 return true; 736 } 737 } 738 return false; 739 } 740 741 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) { 742 for (HBasicBlock* back_edge : GetBackEdges()) { 743 if (!block->Dominates(back_edge)) { 744 return false; 745 } 746 } 747 return true; 748 } 749 750 751 bool HLoopInformation::HasExitEdge() const { 752 // Determine if this loop has at least one exit edge. 753 HBlocksInLoopReversePostOrderIterator it_loop(*this); 754 for (; !it_loop.Done(); it_loop.Advance()) { 755 for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) { 756 if (!Contains(*successor)) { 757 return true; 758 } 759 } 760 } 761 return false; 762 } 763 764 bool HBasicBlock::Dominates(HBasicBlock* other) const { 765 // Walk up the dominator tree from `other`, to find out if `this` 766 // is an ancestor. 767 HBasicBlock* current = other; 768 while (current != nullptr) { 769 if (current == this) { 770 return true; 771 } 772 current = current->GetDominator(); 773 } 774 return false; 775 } 776 777 static void UpdateInputsUsers(HInstruction* instruction) { 778 HInputsRef inputs = instruction->GetInputs(); 779 for (size_t i = 0; i < inputs.size(); ++i) { 780 inputs[i]->AddUseAt(instruction, i); 781 } 782 // Environment should be created later. 783 DCHECK(!instruction->HasEnvironment()); 784 } 785 786 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial, 787 HInstruction* replacement) { 788 DCHECK(initial->GetBlock() == this); 789 if (initial->IsControlFlow()) { 790 // We can only replace a control flow instruction with another control flow instruction. 791 DCHECK(replacement->IsControlFlow()); 792 DCHECK_EQ(replacement->GetId(), -1); 793 DCHECK_EQ(replacement->GetType(), Primitive::kPrimVoid); 794 DCHECK_EQ(initial->GetBlock(), this); 795 DCHECK_EQ(initial->GetType(), Primitive::kPrimVoid); 796 DCHECK(initial->GetUses().empty()); 797 DCHECK(initial->GetEnvUses().empty()); 798 replacement->SetBlock(this); 799 replacement->SetId(GetGraph()->GetNextInstructionId()); 800 instructions_.InsertInstructionBefore(replacement, initial); 801 UpdateInputsUsers(replacement); 802 } else { 803 InsertInstructionBefore(replacement, initial); 804 initial->ReplaceWith(replacement); 805 } 806 RemoveInstruction(initial); 807 } 808 809 static void Add(HInstructionList* instruction_list, 810 HBasicBlock* block, 811 HInstruction* instruction) { 812 DCHECK(instruction->GetBlock() == nullptr); 813 DCHECK_EQ(instruction->GetId(), -1); 814 instruction->SetBlock(block); 815 instruction->SetId(block->GetGraph()->GetNextInstructionId()); 816 UpdateInputsUsers(instruction); 817 instruction_list->AddInstruction(instruction); 818 } 819 820 void HBasicBlock::AddInstruction(HInstruction* instruction) { 821 Add(&instructions_, this, instruction); 822 } 823 824 void HBasicBlock::AddPhi(HPhi* phi) { 825 Add(&phis_, this, phi); 826 } 827 828 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) { 829 DCHECK(!cursor->IsPhi()); 830 DCHECK(!instruction->IsPhi()); 831 DCHECK_EQ(instruction->GetId(), -1); 832 DCHECK_NE(cursor->GetId(), -1); 833 DCHECK_EQ(cursor->GetBlock(), this); 834 DCHECK(!instruction->IsControlFlow()); 835 instruction->SetBlock(this); 836 instruction->SetId(GetGraph()->GetNextInstructionId()); 837 UpdateInputsUsers(instruction); 838 instructions_.InsertInstructionBefore(instruction, cursor); 839 } 840 841 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) { 842 DCHECK(!cursor->IsPhi()); 843 DCHECK(!instruction->IsPhi()); 844 DCHECK_EQ(instruction->GetId(), -1); 845 DCHECK_NE(cursor->GetId(), -1); 846 DCHECK_EQ(cursor->GetBlock(), this); 847 DCHECK(!instruction->IsControlFlow()); 848 DCHECK(!cursor->IsControlFlow()); 849 instruction->SetBlock(this); 850 instruction->SetId(GetGraph()->GetNextInstructionId()); 851 UpdateInputsUsers(instruction); 852 instructions_.InsertInstructionAfter(instruction, cursor); 853 } 854 855 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) { 856 DCHECK_EQ(phi->GetId(), -1); 857 DCHECK_NE(cursor->GetId(), -1); 858 DCHECK_EQ(cursor->GetBlock(), this); 859 phi->SetBlock(this); 860 phi->SetId(GetGraph()->GetNextInstructionId()); 861 UpdateInputsUsers(phi); 862 phis_.InsertInstructionAfter(phi, cursor); 863 } 864 865 static void Remove(HInstructionList* instruction_list, 866 HBasicBlock* block, 867 HInstruction* instruction, 868 bool ensure_safety) { 869 DCHECK_EQ(block, instruction->GetBlock()); 870 instruction->SetBlock(nullptr); 871 instruction_list->RemoveInstruction(instruction); 872 if (ensure_safety) { 873 DCHECK(instruction->GetUses().empty()); 874 DCHECK(instruction->GetEnvUses().empty()); 875 RemoveAsUser(instruction); 876 } 877 } 878 879 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) { 880 DCHECK(!instruction->IsPhi()); 881 Remove(&instructions_, this, instruction, ensure_safety); 882 } 883 884 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) { 885 Remove(&phis_, this, phi, ensure_safety); 886 } 887 888 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) { 889 if (instruction->IsPhi()) { 890 RemovePhi(instruction->AsPhi(), ensure_safety); 891 } else { 892 RemoveInstruction(instruction, ensure_safety); 893 } 894 } 895 896 void HEnvironment::CopyFrom(const ArenaVector<HInstruction*>& locals) { 897 for (size_t i = 0; i < locals.size(); i++) { 898 HInstruction* instruction = locals[i]; 899 SetRawEnvAt(i, instruction); 900 if (instruction != nullptr) { 901 instruction->AddEnvUseAt(this, i); 902 } 903 } 904 } 905 906 void HEnvironment::CopyFrom(HEnvironment* env) { 907 for (size_t i = 0; i < env->Size(); i++) { 908 HInstruction* instruction = env->GetInstructionAt(i); 909 SetRawEnvAt(i, instruction); 910 if (instruction != nullptr) { 911 instruction->AddEnvUseAt(this, i); 912 } 913 } 914 } 915 916 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env, 917 HBasicBlock* loop_header) { 918 DCHECK(loop_header->IsLoopHeader()); 919 for (size_t i = 0; i < env->Size(); i++) { 920 HInstruction* instruction = env->GetInstructionAt(i); 921 SetRawEnvAt(i, instruction); 922 if (instruction == nullptr) { 923 continue; 924 } 925 if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) { 926 // At the end of the loop pre-header, the corresponding value for instruction 927 // is the first input of the phi. 928 HInstruction* initial = instruction->AsPhi()->InputAt(0); 929 SetRawEnvAt(i, initial); 930 initial->AddEnvUseAt(this, i); 931 } else { 932 instruction->AddEnvUseAt(this, i); 933 } 934 } 935 } 936 937 void HEnvironment::RemoveAsUserOfInput(size_t index) const { 938 const HUserRecord<HEnvironment*>& env_use = vregs_[index]; 939 HInstruction* user = env_use.GetInstruction(); 940 auto before_env_use_node = env_use.GetBeforeUseNode(); 941 user->env_uses_.erase_after(before_env_use_node); 942 user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node); 943 } 944 945 HInstruction::InstructionKind HInstruction::GetKind() const { 946 return GetKindInternal(); 947 } 948 949 HInstruction* HInstruction::GetNextDisregardingMoves() const { 950 HInstruction* next = GetNext(); 951 while (next != nullptr && next->IsParallelMove()) { 952 next = next->GetNext(); 953 } 954 return next; 955 } 956 957 HInstruction* HInstruction::GetPreviousDisregardingMoves() const { 958 HInstruction* previous = GetPrevious(); 959 while (previous != nullptr && previous->IsParallelMove()) { 960 previous = previous->GetPrevious(); 961 } 962 return previous; 963 } 964 965 void HInstructionList::AddInstruction(HInstruction* instruction) { 966 if (first_instruction_ == nullptr) { 967 DCHECK(last_instruction_ == nullptr); 968 first_instruction_ = last_instruction_ = instruction; 969 } else { 970 DCHECK(last_instruction_ != nullptr); 971 last_instruction_->next_ = instruction; 972 instruction->previous_ = last_instruction_; 973 last_instruction_ = instruction; 974 } 975 } 976 977 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) { 978 DCHECK(Contains(cursor)); 979 if (cursor == first_instruction_) { 980 cursor->previous_ = instruction; 981 instruction->next_ = cursor; 982 first_instruction_ = instruction; 983 } else { 984 instruction->previous_ = cursor->previous_; 985 instruction->next_ = cursor; 986 cursor->previous_ = instruction; 987 instruction->previous_->next_ = instruction; 988 } 989 } 990 991 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) { 992 DCHECK(Contains(cursor)); 993 if (cursor == last_instruction_) { 994 cursor->next_ = instruction; 995 instruction->previous_ = cursor; 996 last_instruction_ = instruction; 997 } else { 998 instruction->next_ = cursor->next_; 999 instruction->previous_ = cursor; 1000 cursor->next_ = instruction; 1001 instruction->next_->previous_ = instruction; 1002 } 1003 } 1004 1005 void HInstructionList::RemoveInstruction(HInstruction* instruction) { 1006 if (instruction->previous_ != nullptr) { 1007 instruction->previous_->next_ = instruction->next_; 1008 } 1009 if (instruction->next_ != nullptr) { 1010 instruction->next_->previous_ = instruction->previous_; 1011 } 1012 if (instruction == first_instruction_) { 1013 first_instruction_ = instruction->next_; 1014 } 1015 if (instruction == last_instruction_) { 1016 last_instruction_ = instruction->previous_; 1017 } 1018 } 1019 1020 bool HInstructionList::Contains(HInstruction* instruction) const { 1021 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) { 1022 if (it.Current() == instruction) { 1023 return true; 1024 } 1025 } 1026 return false; 1027 } 1028 1029 bool HInstructionList::FoundBefore(const HInstruction* instruction1, 1030 const HInstruction* instruction2) const { 1031 DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock()); 1032 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) { 1033 if (it.Current() == instruction1) { 1034 return true; 1035 } 1036 if (it.Current() == instruction2) { 1037 return false; 1038 } 1039 } 1040 LOG(FATAL) << "Did not find an order between two instructions of the same block."; 1041 return true; 1042 } 1043 1044 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const { 1045 if (other_instruction == this) { 1046 // An instruction does not strictly dominate itself. 1047 return false; 1048 } 1049 HBasicBlock* block = GetBlock(); 1050 HBasicBlock* other_block = other_instruction->GetBlock(); 1051 if (block != other_block) { 1052 return GetBlock()->Dominates(other_instruction->GetBlock()); 1053 } else { 1054 // If both instructions are in the same block, ensure this 1055 // instruction comes before `other_instruction`. 1056 if (IsPhi()) { 1057 if (!other_instruction->IsPhi()) { 1058 // Phis appear before non phi-instructions so this instruction 1059 // dominates `other_instruction`. 1060 return true; 1061 } else { 1062 // There is no order among phis. 1063 LOG(FATAL) << "There is no dominance between phis of a same block."; 1064 return false; 1065 } 1066 } else { 1067 // `this` is not a phi. 1068 if (other_instruction->IsPhi()) { 1069 // Phis appear before non phi-instructions so this instruction 1070 // does not dominate `other_instruction`. 1071 return false; 1072 } else { 1073 // Check whether this instruction comes before 1074 // `other_instruction` in the instruction list. 1075 return block->GetInstructions().FoundBefore(this, other_instruction); 1076 } 1077 } 1078 } 1079 } 1080 1081 void HInstruction::RemoveEnvironment() { 1082 RemoveEnvironmentUses(this); 1083 environment_ = nullptr; 1084 } 1085 1086 void HInstruction::ReplaceWith(HInstruction* other) { 1087 DCHECK(other != nullptr); 1088 // Note: fixup_end remains valid across splice_after(). 1089 auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin(); 1090 other->uses_.splice_after(other->uses_.before_begin(), uses_); 1091 other->FixUpUserRecordsAfterUseInsertion(fixup_end); 1092 1093 // Note: env_fixup_end remains valid across splice_after(). 1094 auto env_fixup_end = 1095 other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin(); 1096 other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_); 1097 other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end); 1098 1099 DCHECK(uses_.empty()); 1100 DCHECK(env_uses_.empty()); 1101 } 1102 1103 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) { 1104 const HUseList<HInstruction*>& uses = GetUses(); 1105 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) { 1106 HInstruction* user = it->GetUser(); 1107 size_t index = it->GetIndex(); 1108 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput(). 1109 ++it; 1110 if (dominator->StrictlyDominates(user)) { 1111 user->ReplaceInput(replacement, index); 1112 } 1113 } 1114 } 1115 1116 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) { 1117 HUserRecord<HInstruction*> input_use = InputRecordAt(index); 1118 if (input_use.GetInstruction() == replacement) { 1119 // Nothing to do. 1120 return; 1121 } 1122 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode(); 1123 // Note: fixup_end remains valid across splice_after(). 1124 auto fixup_end = 1125 replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin(); 1126 replacement->uses_.splice_after(replacement->uses_.before_begin(), 1127 input_use.GetInstruction()->uses_, 1128 before_use_node); 1129 replacement->FixUpUserRecordsAfterUseInsertion(fixup_end); 1130 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node); 1131 } 1132 1133 size_t HInstruction::EnvironmentSize() const { 1134 return HasEnvironment() ? environment_->Size() : 0; 1135 } 1136 1137 void HVariableInputSizeInstruction::AddInput(HInstruction* input) { 1138 DCHECK(input->GetBlock() != nullptr); 1139 inputs_.push_back(HUserRecord<HInstruction*>(input)); 1140 input->AddUseAt(this, inputs_.size() - 1); 1141 } 1142 1143 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) { 1144 inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input)); 1145 input->AddUseAt(this, index); 1146 // Update indexes in use nodes of inputs that have been pushed further back by the insert(). 1147 for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) { 1148 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u); 1149 inputs_[i].GetUseNode()->SetIndex(i); 1150 } 1151 } 1152 1153 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) { 1154 RemoveAsUserOfInput(index); 1155 inputs_.erase(inputs_.begin() + index); 1156 // Update indexes in use nodes of inputs that have been pulled forward by the erase(). 1157 for (size_t i = index, e = inputs_.size(); i < e; ++i) { 1158 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u); 1159 inputs_[i].GetUseNode()->SetIndex(i); 1160 } 1161 } 1162 1163 void HVariableInputSizeInstruction::RemoveAllInputs() { 1164 RemoveAsUserOfAllInputs(); 1165 DCHECK(!HasNonEnvironmentUses()); 1166 1167 inputs_.clear(); 1168 DCHECK_EQ(0u, InputCount()); 1169 } 1170 1171 void HConstructorFence::RemoveConstructorFences(HInstruction* instruction) { 1172 DCHECK(instruction->GetBlock() != nullptr); 1173 // Removing constructor fences only makes sense for instructions with an object return type. 1174 DCHECK_EQ(Primitive::kPrimNot, instruction->GetType()); 1175 1176 // Efficient implementation that simultaneously (in one pass): 1177 // * Scans the uses list for all constructor fences. 1178 // * Deletes that constructor fence from the uses list of `instruction`. 1179 // * Deletes `instruction` from the constructor fence's inputs. 1180 // * Deletes the constructor fence if it now has 0 inputs. 1181 1182 const HUseList<HInstruction*>& uses = instruction->GetUses(); 1183 // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt. 1184 for (auto it = uses.begin(), end = uses.end(); it != end; ) { 1185 const HUseListNode<HInstruction*>& use_node = *it; 1186 HInstruction* const use_instruction = use_node.GetUser(); 1187 1188 // Advance the iterator immediately once we fetch the use_node. 1189 // Warning: If the input is removed, the current iterator becomes invalid. 1190 ++it; 1191 1192 if (use_instruction->IsConstructorFence()) { 1193 HConstructorFence* ctor_fence = use_instruction->AsConstructorFence(); 1194 size_t input_index = use_node.GetIndex(); 1195 1196 // Process the candidate instruction for removal 1197 // from the graph. 1198 1199 // Constructor fence instructions are never 1200 // used by other instructions. 1201 // 1202 // If we wanted to make this more generic, it 1203 // could be a runtime if statement. 1204 DCHECK(!ctor_fence->HasUses()); 1205 1206 // A constructor fence's return type is "kPrimVoid" 1207 // and therefore it can't have any environment uses. 1208 DCHECK(!ctor_fence->HasEnvironmentUses()); 1209 1210 // Remove the inputs first, otherwise removing the instruction 1211 // will try to remove its uses while we are already removing uses 1212 // and this operation will fail. 1213 DCHECK_EQ(instruction, ctor_fence->InputAt(input_index)); 1214 1215 // Removing the input will also remove the `use_node`. 1216 // (Do not look at `use_node` after this, it will be a dangling reference). 1217 ctor_fence->RemoveInputAt(input_index); 1218 1219 // Once all inputs are removed, the fence is considered dead and 1220 // is removed. 1221 if (ctor_fence->InputCount() == 0u) { 1222 ctor_fence->GetBlock()->RemoveInstruction(ctor_fence); 1223 } 1224 } 1225 } 1226 1227 if (kIsDebugBuild) { 1228 // Post-condition checks: 1229 // * None of the uses of `instruction` are a constructor fence. 1230 // * The `instruction` itself did not get removed from a block. 1231 for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) { 1232 CHECK(!use_node.GetUser()->IsConstructorFence()); 1233 } 1234 CHECK(instruction->GetBlock() != nullptr); 1235 } 1236 } 1237 1238 HInstruction* HConstructorFence::GetAssociatedAllocation() { 1239 HInstruction* new_instance_inst = GetPrevious(); 1240 // Check if the immediately preceding instruction is a new-instance/new-array. 1241 // Otherwise this fence is for protecting final fields. 1242 if (new_instance_inst != nullptr && 1243 (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) { 1244 // TODO: Need to update this code to handle multiple inputs. 1245 DCHECK_EQ(InputCount(), 1u); 1246 return new_instance_inst; 1247 } else { 1248 return nullptr; 1249 } 1250 } 1251 1252 #define DEFINE_ACCEPT(name, super) \ 1253 void H##name::Accept(HGraphVisitor* visitor) { \ 1254 visitor->Visit##name(this); \ 1255 } 1256 1257 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT) 1258 1259 #undef DEFINE_ACCEPT 1260 1261 void HGraphVisitor::VisitInsertionOrder() { 1262 const ArenaVector<HBasicBlock*>& blocks = graph_->GetBlocks(); 1263 for (HBasicBlock* block : blocks) { 1264 if (block != nullptr) { 1265 VisitBasicBlock(block); 1266 } 1267 } 1268 } 1269 1270 void HGraphVisitor::VisitReversePostOrder() { 1271 for (HBasicBlock* block : graph_->GetReversePostOrder()) { 1272 VisitBasicBlock(block); 1273 } 1274 } 1275 1276 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) { 1277 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) { 1278 it.Current()->Accept(this); 1279 } 1280 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { 1281 it.Current()->Accept(this); 1282 } 1283 } 1284 1285 HConstant* HTypeConversion::TryStaticEvaluation() const { 1286 HGraph* graph = GetBlock()->GetGraph(); 1287 if (GetInput()->IsIntConstant()) { 1288 int32_t value = GetInput()->AsIntConstant()->GetValue(); 1289 switch (GetResultType()) { 1290 case Primitive::kPrimLong: 1291 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1292 case Primitive::kPrimFloat: 1293 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1294 case Primitive::kPrimDouble: 1295 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1296 default: 1297 return nullptr; 1298 } 1299 } else if (GetInput()->IsLongConstant()) { 1300 int64_t value = GetInput()->AsLongConstant()->GetValue(); 1301 switch (GetResultType()) { 1302 case Primitive::kPrimInt: 1303 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1304 case Primitive::kPrimFloat: 1305 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1306 case Primitive::kPrimDouble: 1307 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1308 default: 1309 return nullptr; 1310 } 1311 } else if (GetInput()->IsFloatConstant()) { 1312 float value = GetInput()->AsFloatConstant()->GetValue(); 1313 switch (GetResultType()) { 1314 case Primitive::kPrimInt: 1315 if (std::isnan(value)) 1316 return graph->GetIntConstant(0, GetDexPc()); 1317 if (value >= kPrimIntMax) 1318 return graph->GetIntConstant(kPrimIntMax, GetDexPc()); 1319 if (value <= kPrimIntMin) 1320 return graph->GetIntConstant(kPrimIntMin, GetDexPc()); 1321 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1322 case Primitive::kPrimLong: 1323 if (std::isnan(value)) 1324 return graph->GetLongConstant(0, GetDexPc()); 1325 if (value >= kPrimLongMax) 1326 return graph->GetLongConstant(kPrimLongMax, GetDexPc()); 1327 if (value <= kPrimLongMin) 1328 return graph->GetLongConstant(kPrimLongMin, GetDexPc()); 1329 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1330 case Primitive::kPrimDouble: 1331 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc()); 1332 default: 1333 return nullptr; 1334 } 1335 } else if (GetInput()->IsDoubleConstant()) { 1336 double value = GetInput()->AsDoubleConstant()->GetValue(); 1337 switch (GetResultType()) { 1338 case Primitive::kPrimInt: 1339 if (std::isnan(value)) 1340 return graph->GetIntConstant(0, GetDexPc()); 1341 if (value >= kPrimIntMax) 1342 return graph->GetIntConstant(kPrimIntMax, GetDexPc()); 1343 if (value <= kPrimLongMin) 1344 return graph->GetIntConstant(kPrimIntMin, GetDexPc()); 1345 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc()); 1346 case Primitive::kPrimLong: 1347 if (std::isnan(value)) 1348 return graph->GetLongConstant(0, GetDexPc()); 1349 if (value >= kPrimLongMax) 1350 return graph->GetLongConstant(kPrimLongMax, GetDexPc()); 1351 if (value <= kPrimLongMin) 1352 return graph->GetLongConstant(kPrimLongMin, GetDexPc()); 1353 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc()); 1354 case Primitive::kPrimFloat: 1355 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc()); 1356 default: 1357 return nullptr; 1358 } 1359 } 1360 return nullptr; 1361 } 1362 1363 HConstant* HUnaryOperation::TryStaticEvaluation() const { 1364 if (GetInput()->IsIntConstant()) { 1365 return Evaluate(GetInput()->AsIntConstant()); 1366 } else if (GetInput()->IsLongConstant()) { 1367 return Evaluate(GetInput()->AsLongConstant()); 1368 } else if (kEnableFloatingPointStaticEvaluation) { 1369 if (GetInput()->IsFloatConstant()) { 1370 return Evaluate(GetInput()->AsFloatConstant()); 1371 } else if (GetInput()->IsDoubleConstant()) { 1372 return Evaluate(GetInput()->AsDoubleConstant()); 1373 } 1374 } 1375 return nullptr; 1376 } 1377 1378 HConstant* HBinaryOperation::TryStaticEvaluation() const { 1379 if (GetLeft()->IsIntConstant() && GetRight()->IsIntConstant()) { 1380 return Evaluate(GetLeft()->AsIntConstant(), GetRight()->AsIntConstant()); 1381 } else if (GetLeft()->IsLongConstant()) { 1382 if (GetRight()->IsIntConstant()) { 1383 // The binop(long, int) case is only valid for shifts and rotations. 1384 DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName(); 1385 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsIntConstant()); 1386 } else if (GetRight()->IsLongConstant()) { 1387 return Evaluate(GetLeft()->AsLongConstant(), GetRight()->AsLongConstant()); 1388 } 1389 } else if (GetLeft()->IsNullConstant() && GetRight()->IsNullConstant()) { 1390 // The binop(null, null) case is only valid for equal and not-equal conditions. 1391 DCHECK(IsEqual() || IsNotEqual()) << DebugName(); 1392 return Evaluate(GetLeft()->AsNullConstant(), GetRight()->AsNullConstant()); 1393 } else if (kEnableFloatingPointStaticEvaluation) { 1394 if (GetLeft()->IsFloatConstant() && GetRight()->IsFloatConstant()) { 1395 return Evaluate(GetLeft()->AsFloatConstant(), GetRight()->AsFloatConstant()); 1396 } else if (GetLeft()->IsDoubleConstant() && GetRight()->IsDoubleConstant()) { 1397 return Evaluate(GetLeft()->AsDoubleConstant(), GetRight()->AsDoubleConstant()); 1398 } 1399 } 1400 return nullptr; 1401 } 1402 1403 HConstant* HBinaryOperation::GetConstantRight() const { 1404 if (GetRight()->IsConstant()) { 1405 return GetRight()->AsConstant(); 1406 } else if (IsCommutative() && GetLeft()->IsConstant()) { 1407 return GetLeft()->AsConstant(); 1408 } else { 1409 return nullptr; 1410 } 1411 } 1412 1413 // If `GetConstantRight()` returns one of the input, this returns the other 1414 // one. Otherwise it returns null. 1415 HInstruction* HBinaryOperation::GetLeastConstantLeft() const { 1416 HInstruction* most_constant_right = GetConstantRight(); 1417 if (most_constant_right == nullptr) { 1418 return nullptr; 1419 } else if (most_constant_right == GetLeft()) { 1420 return GetRight(); 1421 } else { 1422 return GetLeft(); 1423 } 1424 } 1425 1426 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs) { 1427 switch (rhs) { 1428 case ComparisonBias::kNoBias: 1429 return os << "no_bias"; 1430 case ComparisonBias::kGtBias: 1431 return os << "gt_bias"; 1432 case ComparisonBias::kLtBias: 1433 return os << "lt_bias"; 1434 default: 1435 LOG(FATAL) << "Unknown ComparisonBias: " << static_cast<int>(rhs); 1436 UNREACHABLE(); 1437 } 1438 } 1439 1440 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const { 1441 return this == instruction->GetPreviousDisregardingMoves(); 1442 } 1443 1444 bool HInstruction::Equals(const HInstruction* other) const { 1445 if (!InstructionTypeEquals(other)) return false; 1446 DCHECK_EQ(GetKind(), other->GetKind()); 1447 if (!InstructionDataEquals(other)) return false; 1448 if (GetType() != other->GetType()) return false; 1449 HConstInputsRef inputs = GetInputs(); 1450 HConstInputsRef other_inputs = other->GetInputs(); 1451 if (inputs.size() != other_inputs.size()) return false; 1452 for (size_t i = 0; i != inputs.size(); ++i) { 1453 if (inputs[i] != other_inputs[i]) return false; 1454 } 1455 1456 DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode()); 1457 return true; 1458 } 1459 1460 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs) { 1461 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break; 1462 switch (rhs) { 1463 FOR_EACH_INSTRUCTION(DECLARE_CASE) 1464 default: 1465 os << "Unknown instruction kind " << static_cast<int>(rhs); 1466 break; 1467 } 1468 #undef DECLARE_CASE 1469 return os; 1470 } 1471 1472 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) { 1473 if (do_checks) { 1474 DCHECK(!IsPhi()); 1475 DCHECK(!IsControlFlow()); 1476 DCHECK(CanBeMoved() || 1477 // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization. 1478 IsShouldDeoptimizeFlag()); 1479 DCHECK(!cursor->IsPhi()); 1480 } 1481 1482 next_->previous_ = previous_; 1483 if (previous_ != nullptr) { 1484 previous_->next_ = next_; 1485 } 1486 if (block_->instructions_.first_instruction_ == this) { 1487 block_->instructions_.first_instruction_ = next_; 1488 } 1489 DCHECK_NE(block_->instructions_.last_instruction_, this); 1490 1491 previous_ = cursor->previous_; 1492 if (previous_ != nullptr) { 1493 previous_->next_ = this; 1494 } 1495 next_ = cursor; 1496 cursor->previous_ = this; 1497 block_ = cursor->block_; 1498 1499 if (block_->instructions_.first_instruction_ == cursor) { 1500 block_->instructions_.first_instruction_ = this; 1501 } 1502 } 1503 1504 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() { 1505 DCHECK(!CanThrow()); 1506 DCHECK(!HasSideEffects()); 1507 DCHECK(!HasEnvironmentUses()); 1508 DCHECK(HasNonEnvironmentUses()); 1509 DCHECK(!IsPhi()); // Makes no sense for Phi. 1510 DCHECK_EQ(InputCount(), 0u); 1511 1512 // Find the target block. 1513 auto uses_it = GetUses().begin(); 1514 auto uses_end = GetUses().end(); 1515 HBasicBlock* target_block = uses_it->GetUser()->GetBlock(); 1516 ++uses_it; 1517 while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) { 1518 ++uses_it; 1519 } 1520 if (uses_it != uses_end) { 1521 // This instruction has uses in two or more blocks. Find the common dominator. 1522 CommonDominator finder(target_block); 1523 for (; uses_it != uses_end; ++uses_it) { 1524 finder.Update(uses_it->GetUser()->GetBlock()); 1525 } 1526 target_block = finder.Get(); 1527 DCHECK(target_block != nullptr); 1528 } 1529 // Move to the first dominator not in a loop. 1530 while (target_block->IsInLoop()) { 1531 target_block = target_block->GetDominator(); 1532 DCHECK(target_block != nullptr); 1533 } 1534 1535 // Find insertion position. 1536 HInstruction* insert_pos = nullptr; 1537 for (const HUseListNode<HInstruction*>& use : GetUses()) { 1538 if (use.GetUser()->GetBlock() == target_block && 1539 (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) { 1540 insert_pos = use.GetUser(); 1541 } 1542 } 1543 if (insert_pos == nullptr) { 1544 // No user in `target_block`, insert before the control flow instruction. 1545 insert_pos = target_block->GetLastInstruction(); 1546 DCHECK(insert_pos->IsControlFlow()); 1547 // Avoid splitting HCondition from HIf to prevent unnecessary materialization. 1548 if (insert_pos->IsIf()) { 1549 HInstruction* if_input = insert_pos->AsIf()->InputAt(0); 1550 if (if_input == insert_pos->GetPrevious()) { 1551 insert_pos = if_input; 1552 } 1553 } 1554 } 1555 MoveBefore(insert_pos); 1556 } 1557 1558 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor) { 1559 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented."; 1560 DCHECK_EQ(cursor->GetBlock(), this); 1561 1562 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), 1563 cursor->GetDexPc()); 1564 new_block->instructions_.first_instruction_ = cursor; 1565 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1566 instructions_.last_instruction_ = cursor->previous_; 1567 if (cursor->previous_ == nullptr) { 1568 instructions_.first_instruction_ = nullptr; 1569 } else { 1570 cursor->previous_->next_ = nullptr; 1571 cursor->previous_ = nullptr; 1572 } 1573 1574 new_block->instructions_.SetBlockOfInstructions(new_block); 1575 AddInstruction(new (GetGraph()->GetArena()) HGoto(new_block->GetDexPc())); 1576 1577 for (HBasicBlock* successor : GetSuccessors()) { 1578 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1579 } 1580 new_block->successors_.swap(successors_); 1581 DCHECK(successors_.empty()); 1582 AddSuccessor(new_block); 1583 1584 GetGraph()->AddBlock(new_block); 1585 return new_block; 1586 } 1587 1588 HBasicBlock* HBasicBlock::CreateImmediateDominator() { 1589 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented."; 1590 DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented."; 1591 1592 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), GetDexPc()); 1593 1594 for (HBasicBlock* predecessor : GetPredecessors()) { 1595 predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block; 1596 } 1597 new_block->predecessors_.swap(predecessors_); 1598 DCHECK(predecessors_.empty()); 1599 AddPredecessor(new_block); 1600 1601 GetGraph()->AddBlock(new_block); 1602 return new_block; 1603 } 1604 1605 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) { 1606 DCHECK_EQ(cursor->GetBlock(), this); 1607 1608 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), 1609 cursor->GetDexPc()); 1610 new_block->instructions_.first_instruction_ = cursor; 1611 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1612 instructions_.last_instruction_ = cursor->previous_; 1613 if (cursor->previous_ == nullptr) { 1614 instructions_.first_instruction_ = nullptr; 1615 } else { 1616 cursor->previous_->next_ = nullptr; 1617 cursor->previous_ = nullptr; 1618 } 1619 1620 new_block->instructions_.SetBlockOfInstructions(new_block); 1621 1622 for (HBasicBlock* successor : GetSuccessors()) { 1623 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1624 } 1625 new_block->successors_.swap(successors_); 1626 DCHECK(successors_.empty()); 1627 1628 for (HBasicBlock* dominated : GetDominatedBlocks()) { 1629 dominated->dominator_ = new_block; 1630 } 1631 new_block->dominated_blocks_.swap(dominated_blocks_); 1632 DCHECK(dominated_blocks_.empty()); 1633 return new_block; 1634 } 1635 1636 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) { 1637 DCHECK(!cursor->IsControlFlow()); 1638 DCHECK_NE(instructions_.last_instruction_, cursor); 1639 DCHECK_EQ(cursor->GetBlock(), this); 1640 1641 HBasicBlock* new_block = new (GetGraph()->GetArena()) HBasicBlock(GetGraph(), GetDexPc()); 1642 new_block->instructions_.first_instruction_ = cursor->GetNext(); 1643 new_block->instructions_.last_instruction_ = instructions_.last_instruction_; 1644 cursor->next_->previous_ = nullptr; 1645 cursor->next_ = nullptr; 1646 instructions_.last_instruction_ = cursor; 1647 1648 new_block->instructions_.SetBlockOfInstructions(new_block); 1649 for (HBasicBlock* successor : GetSuccessors()) { 1650 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block; 1651 } 1652 new_block->successors_.swap(successors_); 1653 DCHECK(successors_.empty()); 1654 1655 for (HBasicBlock* dominated : GetDominatedBlocks()) { 1656 dominated->dominator_ = new_block; 1657 } 1658 new_block->dominated_blocks_.swap(dominated_blocks_); 1659 DCHECK(dominated_blocks_.empty()); 1660 return new_block; 1661 } 1662 1663 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const { 1664 if (EndsWithTryBoundary()) { 1665 HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary(); 1666 if (try_boundary->IsEntry()) { 1667 DCHECK(!IsTryBlock()); 1668 return try_boundary; 1669 } else { 1670 DCHECK(IsTryBlock()); 1671 DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary)); 1672 return nullptr; 1673 } 1674 } else if (IsTryBlock()) { 1675 return &try_catch_information_->GetTryEntry(); 1676 } else { 1677 return nullptr; 1678 } 1679 } 1680 1681 bool HBasicBlock::HasThrowingInstructions() const { 1682 for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) { 1683 if (it.Current()->CanThrow()) { 1684 return true; 1685 } 1686 } 1687 return false; 1688 } 1689 1690 static bool HasOnlyOneInstruction(const HBasicBlock& block) { 1691 return block.GetPhis().IsEmpty() 1692 && !block.GetInstructions().IsEmpty() 1693 && block.GetFirstInstruction() == block.GetLastInstruction(); 1694 } 1695 1696 bool HBasicBlock::IsSingleGoto() const { 1697 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto(); 1698 } 1699 1700 bool HBasicBlock::IsSingleTryBoundary() const { 1701 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary(); 1702 } 1703 1704 bool HBasicBlock::EndsWithControlFlowInstruction() const { 1705 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow(); 1706 } 1707 1708 bool HBasicBlock::EndsWithIf() const { 1709 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf(); 1710 } 1711 1712 bool HBasicBlock::EndsWithTryBoundary() const { 1713 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary(); 1714 } 1715 1716 bool HBasicBlock::HasSinglePhi() const { 1717 return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr; 1718 } 1719 1720 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const { 1721 if (EndsWithTryBoundary()) { 1722 // The normal-flow successor of HTryBoundary is always stored at index zero. 1723 DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor()); 1724 return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u); 1725 } else { 1726 // All successors of blocks not ending with TryBoundary are normal. 1727 return ArrayRef<HBasicBlock* const>(successors_); 1728 } 1729 } 1730 1731 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const { 1732 if (EndsWithTryBoundary()) { 1733 return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers(); 1734 } else { 1735 // Blocks not ending with TryBoundary do not have exceptional successors. 1736 return ArrayRef<HBasicBlock* const>(); 1737 } 1738 } 1739 1740 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const { 1741 ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers(); 1742 ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers(); 1743 1744 size_t length = handlers1.size(); 1745 if (length != handlers2.size()) { 1746 return false; 1747 } 1748 1749 // Exception handlers need to be stored in the same order. 1750 for (size_t i = 0; i < length; ++i) { 1751 if (handlers1[i] != handlers2[i]) { 1752 return false; 1753 } 1754 } 1755 return true; 1756 } 1757 1758 size_t HInstructionList::CountSize() const { 1759 size_t size = 0; 1760 HInstruction* current = first_instruction_; 1761 for (; current != nullptr; current = current->GetNext()) { 1762 size++; 1763 } 1764 return size; 1765 } 1766 1767 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const { 1768 for (HInstruction* current = first_instruction_; 1769 current != nullptr; 1770 current = current->GetNext()) { 1771 current->SetBlock(block); 1772 } 1773 } 1774 1775 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) { 1776 DCHECK(Contains(cursor)); 1777 if (!instruction_list.IsEmpty()) { 1778 if (cursor == last_instruction_) { 1779 last_instruction_ = instruction_list.last_instruction_; 1780 } else { 1781 cursor->next_->previous_ = instruction_list.last_instruction_; 1782 } 1783 instruction_list.last_instruction_->next_ = cursor->next_; 1784 cursor->next_ = instruction_list.first_instruction_; 1785 instruction_list.first_instruction_->previous_ = cursor; 1786 } 1787 } 1788 1789 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) { 1790 DCHECK(Contains(cursor)); 1791 if (!instruction_list.IsEmpty()) { 1792 if (cursor == first_instruction_) { 1793 first_instruction_ = instruction_list.first_instruction_; 1794 } else { 1795 cursor->previous_->next_ = instruction_list.first_instruction_; 1796 } 1797 instruction_list.last_instruction_->next_ = cursor; 1798 instruction_list.first_instruction_->previous_ = cursor->previous_; 1799 cursor->previous_ = instruction_list.last_instruction_; 1800 } 1801 } 1802 1803 void HInstructionList::Add(const HInstructionList& instruction_list) { 1804 if (IsEmpty()) { 1805 first_instruction_ = instruction_list.first_instruction_; 1806 last_instruction_ = instruction_list.last_instruction_; 1807 } else { 1808 AddAfter(last_instruction_, instruction_list); 1809 } 1810 } 1811 1812 // Should be called on instructions in a dead block in post order. This method 1813 // assumes `insn` has been removed from all users with the exception of catch 1814 // phis because of missing exceptional edges in the graph. It removes the 1815 // instruction from catch phi uses, together with inputs of other catch phis in 1816 // the catch block at the same index, as these must be dead too. 1817 static void RemoveUsesOfDeadInstruction(HInstruction* insn) { 1818 DCHECK(!insn->HasEnvironmentUses()); 1819 while (insn->HasNonEnvironmentUses()) { 1820 const HUseListNode<HInstruction*>& use = insn->GetUses().front(); 1821 size_t use_index = use.GetIndex(); 1822 HBasicBlock* user_block = use.GetUser()->GetBlock(); 1823 DCHECK(use.GetUser()->IsPhi() && user_block->IsCatchBlock()); 1824 for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1825 phi_it.Current()->AsPhi()->RemoveInputAt(use_index); 1826 } 1827 } 1828 } 1829 1830 void HBasicBlock::DisconnectAndDelete() { 1831 // Dominators must be removed after all the blocks they dominate. This way 1832 // a loop header is removed last, a requirement for correct loop information 1833 // iteration. 1834 DCHECK(dominated_blocks_.empty()); 1835 1836 // The following steps gradually remove the block from all its dependants in 1837 // post order (b/27683071). 1838 1839 // (1) Store a basic block that we'll use in step (5) to find loops to be updated. 1840 // We need to do this before step (4) which destroys the predecessor list. 1841 HBasicBlock* loop_update_start = this; 1842 if (IsLoopHeader()) { 1843 HLoopInformation* loop_info = GetLoopInformation(); 1844 // All other blocks in this loop should have been removed because the header 1845 // was their dominator. 1846 // Note that we do not remove `this` from `loop_info` as it is unreachable. 1847 DCHECK(!loop_info->IsIrreducible()); 1848 DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u); 1849 DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId()); 1850 loop_update_start = loop_info->GetPreHeader(); 1851 } 1852 1853 // (2) Disconnect the block from its successors and update their phis. 1854 for (HBasicBlock* successor : successors_) { 1855 // Delete this block from the list of predecessors. 1856 size_t this_index = successor->GetPredecessorIndexOf(this); 1857 successor->predecessors_.erase(successor->predecessors_.begin() + this_index); 1858 1859 // Check that `successor` has other predecessors, otherwise `this` is the 1860 // dominator of `successor` which violates the order DCHECKed at the top. 1861 DCHECK(!successor->predecessors_.empty()); 1862 1863 // Remove this block's entries in the successor's phis. Skip exceptional 1864 // successors because catch phi inputs do not correspond to predecessor 1865 // blocks but throwing instructions. The inputs of the catch phis will be 1866 // updated in step (3). 1867 if (!successor->IsCatchBlock()) { 1868 if (successor->predecessors_.size() == 1u) { 1869 // The successor has just one predecessor left. Replace phis with the only 1870 // remaining input. 1871 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1872 HPhi* phi = phi_it.Current()->AsPhi(); 1873 phi->ReplaceWith(phi->InputAt(1 - this_index)); 1874 successor->RemovePhi(phi); 1875 } 1876 } else { 1877 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) { 1878 phi_it.Current()->AsPhi()->RemoveInputAt(this_index); 1879 } 1880 } 1881 } 1882 } 1883 successors_.clear(); 1884 1885 // (3) Remove instructions and phis. Instructions should have no remaining uses 1886 // except in catch phis. If an instruction is used by a catch phi at `index`, 1887 // remove `index`-th input of all phis in the catch block since they are 1888 // guaranteed dead. Note that we may miss dead inputs this way but the 1889 // graph will always remain consistent. 1890 for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) { 1891 HInstruction* insn = it.Current(); 1892 RemoveUsesOfDeadInstruction(insn); 1893 RemoveInstruction(insn); 1894 } 1895 for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) { 1896 HPhi* insn = it.Current()->AsPhi(); 1897 RemoveUsesOfDeadInstruction(insn); 1898 RemovePhi(insn); 1899 } 1900 1901 // (4) Disconnect the block from its predecessors and update their 1902 // control-flow instructions. 1903 for (HBasicBlock* predecessor : predecessors_) { 1904 // We should not see any back edges as they would have been removed by step (3). 1905 DCHECK(!IsInLoop() || !GetLoopInformation()->IsBackEdge(*predecessor)); 1906 1907 HInstruction* last_instruction = predecessor->GetLastInstruction(); 1908 if (last_instruction->IsTryBoundary() && !IsCatchBlock()) { 1909 // This block is the only normal-flow successor of the TryBoundary which 1910 // makes `predecessor` dead. Since DCE removes blocks in post order, 1911 // exception handlers of this TryBoundary were already visited and any 1912 // remaining handlers therefore must be live. We remove `predecessor` from 1913 // their list of predecessors. 1914 DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this); 1915 while (predecessor->GetSuccessors().size() > 1) { 1916 HBasicBlock* handler = predecessor->GetSuccessors()[1]; 1917 DCHECK(handler->IsCatchBlock()); 1918 predecessor->RemoveSuccessor(handler); 1919 handler->RemovePredecessor(predecessor); 1920 } 1921 } 1922 1923 predecessor->RemoveSuccessor(this); 1924 uint32_t num_pred_successors = predecessor->GetSuccessors().size(); 1925 if (num_pred_successors == 1u) { 1926 // If we have one successor after removing one, then we must have 1927 // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one 1928 // successor. Replace those with a HGoto. 1929 DCHECK(last_instruction->IsIf() || 1930 last_instruction->IsPackedSwitch() || 1931 (last_instruction->IsTryBoundary() && IsCatchBlock())); 1932 predecessor->RemoveInstruction(last_instruction); 1933 predecessor->AddInstruction(new (graph_->GetArena()) HGoto(last_instruction->GetDexPc())); 1934 } else if (num_pred_successors == 0u) { 1935 // The predecessor has no remaining successors and therefore must be dead. 1936 // We deliberately leave it without a control-flow instruction so that the 1937 // GraphChecker fails unless it is not removed during the pass too. 1938 predecessor->RemoveInstruction(last_instruction); 1939 } else { 1940 // There are multiple successors left. The removed block might be a successor 1941 // of a PackedSwitch which will be completely removed (perhaps replaced with 1942 // a Goto), or we are deleting a catch block from a TryBoundary. In either 1943 // case, leave `last_instruction` as is for now. 1944 DCHECK(last_instruction->IsPackedSwitch() || 1945 (last_instruction->IsTryBoundary() && IsCatchBlock())); 1946 } 1947 } 1948 predecessors_.clear(); 1949 1950 // (5) Remove the block from all loops it is included in. Skip the inner-most 1951 // loop if this is the loop header (see definition of `loop_update_start`) 1952 // because the loop header's predecessor list has been destroyed in step (4). 1953 for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) { 1954 HLoopInformation* loop_info = it.Current(); 1955 loop_info->Remove(this); 1956 if (loop_info->IsBackEdge(*this)) { 1957 // If this was the last back edge of the loop, we deliberately leave the 1958 // loop in an inconsistent state and will fail GraphChecker unless the 1959 // entire loop is removed during the pass. 1960 loop_info->RemoveBackEdge(this); 1961 } 1962 } 1963 1964 // (6) Disconnect from the dominator. 1965 dominator_->RemoveDominatedBlock(this); 1966 SetDominator(nullptr); 1967 1968 // (7) Delete from the graph, update reverse post order. 1969 graph_->DeleteDeadEmptyBlock(this); 1970 SetGraph(nullptr); 1971 } 1972 1973 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) { 1974 DCHECK(EndsWithControlFlowInstruction()); 1975 RemoveInstruction(GetLastInstruction()); 1976 instructions_.Add(other->GetInstructions()); 1977 other->instructions_.SetBlockOfInstructions(this); 1978 other->instructions_.Clear(); 1979 } 1980 1981 void HBasicBlock::MergeWith(HBasicBlock* other) { 1982 DCHECK_EQ(GetGraph(), other->GetGraph()); 1983 DCHECK(ContainsElement(dominated_blocks_, other)); 1984 DCHECK_EQ(GetSingleSuccessor(), other); 1985 DCHECK_EQ(other->GetSinglePredecessor(), this); 1986 DCHECK(other->GetPhis().IsEmpty()); 1987 1988 // Move instructions from `other` to `this`. 1989 MergeInstructionsWith(other); 1990 1991 // Remove `other` from the loops it is included in. 1992 for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) { 1993 HLoopInformation* loop_info = it.Current(); 1994 loop_info->Remove(other); 1995 if (loop_info->IsBackEdge(*other)) { 1996 loop_info->ReplaceBackEdge(other, this); 1997 } 1998 } 1999 2000 // Update links to the successors of `other`. 2001 successors_.clear(); 2002 for (HBasicBlock* successor : other->GetSuccessors()) { 2003 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this; 2004 } 2005 successors_.swap(other->successors_); 2006 DCHECK(other->successors_.empty()); 2007 2008 // Update the dominator tree. 2009 RemoveDominatedBlock(other); 2010 for (HBasicBlock* dominated : other->GetDominatedBlocks()) { 2011 dominated->SetDominator(this); 2012 } 2013 dominated_blocks_.insert( 2014 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end()); 2015 other->dominated_blocks_.clear(); 2016 other->dominator_ = nullptr; 2017 2018 // Clear the list of predecessors of `other` in preparation of deleting it. 2019 other->predecessors_.clear(); 2020 2021 // Delete `other` from the graph. The function updates reverse post order. 2022 graph_->DeleteDeadEmptyBlock(other); 2023 other->SetGraph(nullptr); 2024 } 2025 2026 void HBasicBlock::MergeWithInlined(HBasicBlock* other) { 2027 DCHECK_NE(GetGraph(), other->GetGraph()); 2028 DCHECK(GetDominatedBlocks().empty()); 2029 DCHECK(GetSuccessors().empty()); 2030 DCHECK(!EndsWithControlFlowInstruction()); 2031 DCHECK(other->GetSinglePredecessor()->IsEntryBlock()); 2032 DCHECK(other->GetPhis().IsEmpty()); 2033 DCHECK(!other->IsInLoop()); 2034 2035 // Move instructions from `other` to `this`. 2036 instructions_.Add(other->GetInstructions()); 2037 other->instructions_.SetBlockOfInstructions(this); 2038 2039 // Update links to the successors of `other`. 2040 successors_.clear(); 2041 for (HBasicBlock* successor : other->GetSuccessors()) { 2042 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this; 2043 } 2044 successors_.swap(other->successors_); 2045 DCHECK(other->successors_.empty()); 2046 2047 // Update the dominator tree. 2048 for (HBasicBlock* dominated : other->GetDominatedBlocks()) { 2049 dominated->SetDominator(this); 2050 } 2051 dominated_blocks_.insert( 2052 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end()); 2053 other->dominated_blocks_.clear(); 2054 other->dominator_ = nullptr; 2055 other->graph_ = nullptr; 2056 } 2057 2058 void HBasicBlock::ReplaceWith(HBasicBlock* other) { 2059 while (!GetPredecessors().empty()) { 2060 HBasicBlock* predecessor = GetPredecessors()[0]; 2061 predecessor->ReplaceSuccessor(this, other); 2062 } 2063 while (!GetSuccessors().empty()) { 2064 HBasicBlock* successor = GetSuccessors()[0]; 2065 successor->ReplacePredecessor(this, other); 2066 } 2067 for (HBasicBlock* dominated : GetDominatedBlocks()) { 2068 other->AddDominatedBlock(dominated); 2069 } 2070 GetDominator()->ReplaceDominatedBlock(this, other); 2071 other->SetDominator(GetDominator()); 2072 dominator_ = nullptr; 2073 graph_ = nullptr; 2074 } 2075 2076 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) { 2077 DCHECK_EQ(block->GetGraph(), this); 2078 DCHECK(block->GetSuccessors().empty()); 2079 DCHECK(block->GetPredecessors().empty()); 2080 DCHECK(block->GetDominatedBlocks().empty()); 2081 DCHECK(block->GetDominator() == nullptr); 2082 DCHECK(block->GetInstructions().IsEmpty()); 2083 DCHECK(block->GetPhis().IsEmpty()); 2084 2085 if (block->IsExitBlock()) { 2086 SetExitBlock(nullptr); 2087 } 2088 2089 RemoveElement(reverse_post_order_, block); 2090 blocks_[block->GetBlockId()] = nullptr; 2091 block->SetGraph(nullptr); 2092 } 2093 2094 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block, 2095 HBasicBlock* reference, 2096 bool replace_if_back_edge) { 2097 if (block->IsLoopHeader()) { 2098 // Clear the information of which blocks are contained in that loop. Since the 2099 // information is stored as a bit vector based on block ids, we have to update 2100 // it, as those block ids were specific to the callee graph and we are now adding 2101 // these blocks to the caller graph. 2102 block->GetLoopInformation()->ClearAllBlocks(); 2103 } 2104 2105 // If not already in a loop, update the loop information. 2106 if (!block->IsInLoop()) { 2107 block->SetLoopInformation(reference->GetLoopInformation()); 2108 } 2109 2110 // If the block is in a loop, update all its outward loops. 2111 HLoopInformation* loop_info = block->GetLoopInformation(); 2112 if (loop_info != nullptr) { 2113 for (HLoopInformationOutwardIterator loop_it(*block); 2114 !loop_it.Done(); 2115 loop_it.Advance()) { 2116 loop_it.Current()->Add(block); 2117 } 2118 if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) { 2119 loop_info->ReplaceBackEdge(reference, block); 2120 } 2121 } 2122 2123 // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block. 2124 TryCatchInformation* try_catch_info = reference->IsTryBlock() 2125 ? reference->GetTryCatchInformation() 2126 : nullptr; 2127 block->SetTryCatchInformation(try_catch_info); 2128 } 2129 2130 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) { 2131 DCHECK(HasExitBlock()) << "Unimplemented scenario"; 2132 // Update the environments in this graph to have the invoke's environment 2133 // as parent. 2134 { 2135 // Skip the entry block, we do not need to update the entry's suspend check. 2136 for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) { 2137 for (HInstructionIterator instr_it(block->GetInstructions()); 2138 !instr_it.Done(); 2139 instr_it.Advance()) { 2140 HInstruction* current = instr_it.Current(); 2141 if (current->NeedsEnvironment()) { 2142 DCHECK(current->HasEnvironment()); 2143 current->GetEnvironment()->SetAndCopyParentChain( 2144 outer_graph->GetArena(), invoke->GetEnvironment()); 2145 } 2146 } 2147 } 2148 } 2149 outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs()); 2150 2151 if (HasBoundsChecks()) { 2152 outer_graph->SetHasBoundsChecks(true); 2153 } 2154 if (HasLoops()) { 2155 outer_graph->SetHasLoops(true); 2156 } 2157 if (HasIrreducibleLoops()) { 2158 outer_graph->SetHasIrreducibleLoops(true); 2159 } 2160 if (HasTryCatch()) { 2161 outer_graph->SetHasTryCatch(true); 2162 } 2163 if (HasSIMD()) { 2164 outer_graph->SetHasSIMD(true); 2165 } 2166 2167 HInstruction* return_value = nullptr; 2168 if (GetBlocks().size() == 3) { 2169 // Inliner already made sure we don't inline methods that always throw. 2170 DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow()); 2171 // Simple case of an entry block, a body block, and an exit block. 2172 // Put the body block's instruction into `invoke`'s block. 2173 HBasicBlock* body = GetBlocks()[1]; 2174 DCHECK(GetBlocks()[0]->IsEntryBlock()); 2175 DCHECK(GetBlocks()[2]->IsExitBlock()); 2176 DCHECK(!body->IsExitBlock()); 2177 DCHECK(!body->IsInLoop()); 2178 HInstruction* last = body->GetLastInstruction(); 2179 2180 // Note that we add instructions before the invoke only to simplify polymorphic inlining. 2181 invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions()); 2182 body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock()); 2183 2184 // Replace the invoke with the return value of the inlined graph. 2185 if (last->IsReturn()) { 2186 return_value = last->InputAt(0); 2187 } else { 2188 DCHECK(last->IsReturnVoid()); 2189 } 2190 2191 invoke->GetBlock()->RemoveInstruction(last); 2192 } else { 2193 // Need to inline multiple blocks. We split `invoke`'s block 2194 // into two blocks, merge the first block of the inlined graph into 2195 // the first half, and replace the exit block of the inlined graph 2196 // with the second half. 2197 ArenaAllocator* allocator = outer_graph->GetArena(); 2198 HBasicBlock* at = invoke->GetBlock(); 2199 // Note that we split before the invoke only to simplify polymorphic inlining. 2200 HBasicBlock* to = at->SplitBeforeForInlining(invoke); 2201 2202 HBasicBlock* first = entry_block_->GetSuccessors()[0]; 2203 DCHECK(!first->IsInLoop()); 2204 at->MergeWithInlined(first); 2205 exit_block_->ReplaceWith(to); 2206 2207 // Update the meta information surrounding blocks: 2208 // (1) the graph they are now in, 2209 // (2) the reverse post order of that graph, 2210 // (3) their potential loop information, inner and outer, 2211 // (4) try block membership. 2212 // Note that we do not need to update catch phi inputs because they 2213 // correspond to the register file of the outer method which the inlinee 2214 // cannot modify. 2215 2216 // We don't add the entry block, the exit block, and the first block, which 2217 // has been merged with `at`. 2218 static constexpr int kNumberOfSkippedBlocksInCallee = 3; 2219 2220 // We add the `to` block. 2221 static constexpr int kNumberOfNewBlocksInCaller = 1; 2222 size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee) 2223 + kNumberOfNewBlocksInCaller; 2224 2225 // Find the location of `at` in the outer graph's reverse post order. The new 2226 // blocks will be added after it. 2227 size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at); 2228 MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at); 2229 2230 // Do a reverse post order of the blocks in the callee and do (1), (2), (3) 2231 // and (4) to the blocks that apply. 2232 for (HBasicBlock* current : GetReversePostOrder()) { 2233 if (current != exit_block_ && current != entry_block_ && current != first) { 2234 DCHECK(current->GetTryCatchInformation() == nullptr); 2235 DCHECK(current->GetGraph() == this); 2236 current->SetGraph(outer_graph); 2237 outer_graph->AddBlock(current); 2238 outer_graph->reverse_post_order_[++index_of_at] = current; 2239 UpdateLoopAndTryInformationOfNewBlock(current, at, /* replace_if_back_edge */ false); 2240 } 2241 } 2242 2243 // Do (1), (2), (3) and (4) to `to`. 2244 to->SetGraph(outer_graph); 2245 outer_graph->AddBlock(to); 2246 outer_graph->reverse_post_order_[++index_of_at] = to; 2247 // Only `to` can become a back edge, as the inlined blocks 2248 // are predecessors of `to`. 2249 UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge */ true); 2250 2251 // Update all predecessors of the exit block (now the `to` block) 2252 // to not `HReturn` but `HGoto` instead. Special case throwing blocks 2253 // to now get the outer graph exit block as successor. Note that the inliner 2254 // currently doesn't support inlining methods with try/catch. 2255 HPhi* return_value_phi = nullptr; 2256 bool rerun_dominance = false; 2257 bool rerun_loop_analysis = false; 2258 for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) { 2259 HBasicBlock* predecessor = to->GetPredecessors()[pred]; 2260 HInstruction* last = predecessor->GetLastInstruction(); 2261 if (last->IsThrow()) { 2262 DCHECK(!at->IsTryBlock()); 2263 predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock()); 2264 --pred; 2265 // We need to re-run dominance information, as the exit block now has 2266 // a new dominator. 2267 rerun_dominance = true; 2268 if (predecessor->GetLoopInformation() != nullptr) { 2269 // The exit block and blocks post dominated by the exit block do not belong 2270 // to any loop. Because we do not compute the post dominators, we need to re-run 2271 // loop analysis to get the loop information correct. 2272 rerun_loop_analysis = true; 2273 } 2274 } else { 2275 if (last->IsReturnVoid()) { 2276 DCHECK(return_value == nullptr); 2277 DCHECK(return_value_phi == nullptr); 2278 } else { 2279 DCHECK(last->IsReturn()); 2280 if (return_value_phi != nullptr) { 2281 return_value_phi->AddInput(last->InputAt(0)); 2282 } else if (return_value == nullptr) { 2283 return_value = last->InputAt(0); 2284 } else { 2285 // There will be multiple returns. 2286 return_value_phi = new (allocator) HPhi( 2287 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc()); 2288 to->AddPhi(return_value_phi); 2289 return_value_phi->AddInput(return_value); 2290 return_value_phi->AddInput(last->InputAt(0)); 2291 return_value = return_value_phi; 2292 } 2293 } 2294 predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc())); 2295 predecessor->RemoveInstruction(last); 2296 } 2297 } 2298 if (rerun_loop_analysis) { 2299 DCHECK(!outer_graph->HasIrreducibleLoops()) 2300 << "Recomputing loop information in graphs with irreducible loops " 2301 << "is unsupported, as it could lead to loop header changes"; 2302 outer_graph->ClearLoopInformation(); 2303 outer_graph->ClearDominanceInformation(); 2304 outer_graph->BuildDominatorTree(); 2305 } else if (rerun_dominance) { 2306 outer_graph->ClearDominanceInformation(); 2307 outer_graph->ComputeDominanceInformation(); 2308 } 2309 } 2310 2311 // Walk over the entry block and: 2312 // - Move constants from the entry block to the outer_graph's entry block, 2313 // - Replace HParameterValue instructions with their real value. 2314 // - Remove suspend checks, that hold an environment. 2315 // We must do this after the other blocks have been inlined, otherwise ids of 2316 // constants could overlap with the inner graph. 2317 size_t parameter_index = 0; 2318 for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) { 2319 HInstruction* current = it.Current(); 2320 HInstruction* replacement = nullptr; 2321 if (current->IsNullConstant()) { 2322 replacement = outer_graph->GetNullConstant(current->GetDexPc()); 2323 } else if (current->IsIntConstant()) { 2324 replacement = outer_graph->GetIntConstant( 2325 current->AsIntConstant()->GetValue(), current->GetDexPc()); 2326 } else if (current->IsLongConstant()) { 2327 replacement = outer_graph->GetLongConstant( 2328 current->AsLongConstant()->GetValue(), current->GetDexPc()); 2329 } else if (current->IsFloatConstant()) { 2330 replacement = outer_graph->GetFloatConstant( 2331 current->AsFloatConstant()->GetValue(), current->GetDexPc()); 2332 } else if (current->IsDoubleConstant()) { 2333 replacement = outer_graph->GetDoubleConstant( 2334 current->AsDoubleConstant()->GetValue(), current->GetDexPc()); 2335 } else if (current->IsParameterValue()) { 2336 if (kIsDebugBuild 2337 && invoke->IsInvokeStaticOrDirect() 2338 && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) { 2339 // Ensure we do not use the last input of `invoke`, as it 2340 // contains a clinit check which is not an actual argument. 2341 size_t last_input_index = invoke->InputCount() - 1; 2342 DCHECK(parameter_index != last_input_index); 2343 } 2344 replacement = invoke->InputAt(parameter_index++); 2345 } else if (current->IsCurrentMethod()) { 2346 replacement = outer_graph->GetCurrentMethod(); 2347 } else { 2348 DCHECK(current->IsGoto() || current->IsSuspendCheck()); 2349 entry_block_->RemoveInstruction(current); 2350 } 2351 if (replacement != nullptr) { 2352 current->ReplaceWith(replacement); 2353 // If the current is the return value then we need to update the latter. 2354 if (current == return_value) { 2355 DCHECK_EQ(entry_block_, return_value->GetBlock()); 2356 return_value = replacement; 2357 } 2358 } 2359 } 2360 2361 return return_value; 2362 } 2363 2364 /* 2365 * Loop will be transformed to: 2366 * old_pre_header 2367 * | 2368 * if_block 2369 * / \ 2370 * true_block false_block 2371 * \ / 2372 * new_pre_header 2373 * | 2374 * header 2375 */ 2376 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) { 2377 DCHECK(header->IsLoopHeader()); 2378 HBasicBlock* old_pre_header = header->GetDominator(); 2379 2380 // Need extra block to avoid critical edge. 2381 HBasicBlock* if_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2382 HBasicBlock* true_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2383 HBasicBlock* false_block = new (arena_) HBasicBlock(this, header->GetDexPc()); 2384 HBasicBlock* new_pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2385 AddBlock(if_block); 2386 AddBlock(true_block); 2387 AddBlock(false_block); 2388 AddBlock(new_pre_header); 2389 2390 header->ReplacePredecessor(old_pre_header, new_pre_header); 2391 old_pre_header->successors_.clear(); 2392 old_pre_header->dominated_blocks_.clear(); 2393 2394 old_pre_header->AddSuccessor(if_block); 2395 if_block->AddSuccessor(true_block); // True successor 2396 if_block->AddSuccessor(false_block); // False successor 2397 true_block->AddSuccessor(new_pre_header); 2398 false_block->AddSuccessor(new_pre_header); 2399 2400 old_pre_header->dominated_blocks_.push_back(if_block); 2401 if_block->SetDominator(old_pre_header); 2402 if_block->dominated_blocks_.push_back(true_block); 2403 true_block->SetDominator(if_block); 2404 if_block->dominated_blocks_.push_back(false_block); 2405 false_block->SetDominator(if_block); 2406 if_block->dominated_blocks_.push_back(new_pre_header); 2407 new_pre_header->SetDominator(if_block); 2408 new_pre_header->dominated_blocks_.push_back(header); 2409 header->SetDominator(new_pre_header); 2410 2411 // Fix reverse post order. 2412 size_t index_of_header = IndexOfElement(reverse_post_order_, header); 2413 MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1); 2414 reverse_post_order_[index_of_header++] = if_block; 2415 reverse_post_order_[index_of_header++] = true_block; 2416 reverse_post_order_[index_of_header++] = false_block; 2417 reverse_post_order_[index_of_header++] = new_pre_header; 2418 2419 // The pre_header can never be a back edge of a loop. 2420 DCHECK((old_pre_header->GetLoopInformation() == nullptr) || 2421 !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header)); 2422 UpdateLoopAndTryInformationOfNewBlock( 2423 if_block, old_pre_header, /* replace_if_back_edge */ false); 2424 UpdateLoopAndTryInformationOfNewBlock( 2425 true_block, old_pre_header, /* replace_if_back_edge */ false); 2426 UpdateLoopAndTryInformationOfNewBlock( 2427 false_block, old_pre_header, /* replace_if_back_edge */ false); 2428 UpdateLoopAndTryInformationOfNewBlock( 2429 new_pre_header, old_pre_header, /* replace_if_back_edge */ false); 2430 } 2431 2432 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header, 2433 HBasicBlock* body, 2434 HBasicBlock* exit) { 2435 DCHECK(header->IsLoopHeader()); 2436 HLoopInformation* loop = header->GetLoopInformation(); 2437 2438 // Add new loop blocks. 2439 HBasicBlock* new_pre_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2440 HBasicBlock* new_header = new (arena_) HBasicBlock(this, header->GetDexPc()); 2441 HBasicBlock* new_body = new (arena_) HBasicBlock(this, header->GetDexPc()); 2442 AddBlock(new_pre_header); 2443 AddBlock(new_header); 2444 AddBlock(new_body); 2445 2446 // Set up control flow. 2447 header->ReplaceSuccessor(exit, new_pre_header); 2448 new_pre_header->AddSuccessor(new_header); 2449 new_header->AddSuccessor(exit); 2450 new_header->AddSuccessor(new_body); 2451 new_body->AddSuccessor(new_header); 2452 2453 // Set up dominators. 2454 header->ReplaceDominatedBlock(exit, new_pre_header); 2455 new_pre_header->SetDominator(header); 2456 new_pre_header->dominated_blocks_.push_back(new_header); 2457 new_header->SetDominator(new_pre_header); 2458 new_header->dominated_blocks_.push_back(new_body); 2459 new_body->SetDominator(new_header); 2460 new_header->dominated_blocks_.push_back(exit); 2461 exit->SetDominator(new_header); 2462 2463 // Fix reverse post order. 2464 size_t index_of_header = IndexOfElement(reverse_post_order_, header); 2465 MakeRoomFor(&reverse_post_order_, 2, index_of_header); 2466 reverse_post_order_[++index_of_header] = new_pre_header; 2467 reverse_post_order_[++index_of_header] = new_header; 2468 size_t index_of_body = IndexOfElement(reverse_post_order_, body); 2469 MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1); 2470 reverse_post_order_[index_of_body] = new_body; 2471 2472 // Add gotos and suspend check (client must add conditional in header). 2473 new_pre_header->AddInstruction(new (arena_) HGoto()); 2474 HSuspendCheck* suspend_check = new (arena_) HSuspendCheck(header->GetDexPc()); 2475 new_header->AddInstruction(suspend_check); 2476 new_body->AddInstruction(new (arena_) HGoto()); 2477 suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment( 2478 loop->GetSuspendCheck()->GetEnvironment(), header); 2479 2480 // Update loop information. 2481 new_header->AddBackEdge(new_body); 2482 new_header->GetLoopInformation()->SetSuspendCheck(suspend_check); 2483 new_header->GetLoopInformation()->Populate(); 2484 new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation()); // outward 2485 HLoopInformationOutwardIterator it(*new_header); 2486 for (it.Advance(); !it.Done(); it.Advance()) { 2487 it.Current()->Add(new_pre_header); 2488 it.Current()->Add(new_header); 2489 it.Current()->Add(new_body); 2490 } 2491 return new_pre_header; 2492 } 2493 2494 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti) 2495 REQUIRES_SHARED(Locks::mutator_lock_) { 2496 if (rti.IsValid()) { 2497 DCHECK(upper_bound_rti.IsSupertypeOf(rti)) 2498 << " upper_bound_rti: " << upper_bound_rti 2499 << " rti: " << rti; 2500 DCHECK(!upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes() || rti.IsExact()) 2501 << " upper_bound_rti: " << upper_bound_rti 2502 << " rti: " << rti; 2503 } 2504 } 2505 2506 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) { 2507 if (kIsDebugBuild) { 2508 DCHECK_EQ(GetType(), Primitive::kPrimNot); 2509 ScopedObjectAccess soa(Thread::Current()); 2510 DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName(); 2511 if (IsBoundType()) { 2512 // Having the test here spares us from making the method virtual just for 2513 // the sake of a DCHECK. 2514 CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound()); 2515 } 2516 } 2517 reference_type_handle_ = rti.GetTypeHandle(); 2518 SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact()); 2519 } 2520 2521 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) { 2522 if (kIsDebugBuild) { 2523 ScopedObjectAccess soa(Thread::Current()); 2524 DCHECK(upper_bound.IsValid()); 2525 DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once."; 2526 CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound); 2527 } 2528 upper_bound_ = upper_bound; 2529 SetPackedFlag<kFlagUpperCanBeNull>(can_be_null); 2530 } 2531 2532 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) { 2533 if (kIsDebugBuild) { 2534 ScopedObjectAccess soa(Thread::Current()); 2535 DCHECK(IsValidHandle(type_handle)); 2536 if (!is_exact) { 2537 DCHECK(!type_handle->CannotBeAssignedFromOtherTypes()) 2538 << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed"; 2539 } 2540 } 2541 return ReferenceTypeInfo(type_handle, is_exact); 2542 } 2543 2544 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) { 2545 ScopedObjectAccess soa(Thread::Current()); 2546 os << "[" 2547 << " is_valid=" << rhs.IsValid() 2548 << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get())) 2549 << " is_exact=" << rhs.IsExact() 2550 << " ]"; 2551 return os; 2552 } 2553 2554 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) { 2555 // For now, assume that instructions in different blocks may use the 2556 // environment. 2557 // TODO: Use the control flow to decide if this is true. 2558 if (GetBlock() != other->GetBlock()) { 2559 return true; 2560 } 2561 2562 // We know that we are in the same block. Walk from 'this' to 'other', 2563 // checking to see if there is any instruction with an environment. 2564 HInstruction* current = this; 2565 for (; current != other && current != nullptr; current = current->GetNext()) { 2566 // This is a conservative check, as the instruction result may not be in 2567 // the referenced environment. 2568 if (current->HasEnvironment()) { 2569 return true; 2570 } 2571 } 2572 2573 // We should have been called with 'this' before 'other' in the block. 2574 // Just confirm this. 2575 DCHECK(current != nullptr); 2576 return false; 2577 } 2578 2579 void HInvoke::SetIntrinsic(Intrinsics intrinsic, 2580 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache, 2581 IntrinsicSideEffects side_effects, 2582 IntrinsicExceptions exceptions) { 2583 intrinsic_ = intrinsic; 2584 IntrinsicOptimizations opt(this); 2585 2586 // Adjust method's side effects from intrinsic table. 2587 switch (side_effects) { 2588 case kNoSideEffects: SetSideEffects(SideEffects::None()); break; 2589 case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break; 2590 case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break; 2591 case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break; 2592 } 2593 2594 if (needs_env_or_cache == kNoEnvironmentOrCache) { 2595 opt.SetDoesNotNeedDexCache(); 2596 opt.SetDoesNotNeedEnvironment(); 2597 } else { 2598 // If we need an environment, that means there will be a call, which can trigger GC. 2599 SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC())); 2600 } 2601 // Adjust method's exception status from intrinsic table. 2602 SetCanThrow(exceptions == kCanThrow); 2603 } 2604 2605 bool HNewInstance::IsStringAlloc() const { 2606 ScopedObjectAccess soa(Thread::Current()); 2607 return GetReferenceTypeInfo().IsStringClass(); 2608 } 2609 2610 bool HInvoke::NeedsEnvironment() const { 2611 if (!IsIntrinsic()) { 2612 return true; 2613 } 2614 IntrinsicOptimizations opt(*this); 2615 return !opt.GetDoesNotNeedEnvironment(); 2616 } 2617 2618 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const { 2619 ArtMethod* caller = GetEnvironment()->GetMethod(); 2620 ScopedObjectAccess soa(Thread::Current()); 2621 // `caller` is null for a top-level graph representing a method whose declaring 2622 // class was not resolved. 2623 return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile(); 2624 } 2625 2626 bool HInvokeStaticOrDirect::NeedsDexCacheOfDeclaringClass() const { 2627 if (GetMethodLoadKind() != MethodLoadKind::kRuntimeCall) { 2628 return false; 2629 } 2630 if (!IsIntrinsic()) { 2631 return true; 2632 } 2633 IntrinsicOptimizations opt(*this); 2634 return !opt.GetDoesNotNeedDexCache(); 2635 } 2636 2637 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs) { 2638 switch (rhs) { 2639 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: 2640 return os << "StringInit"; 2641 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive: 2642 return os << "Recursive"; 2643 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: 2644 return os << "BootImageLinkTimePcRelative"; 2645 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress: 2646 return os << "DirectAddress"; 2647 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: 2648 return os << "BssEntry"; 2649 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: 2650 return os << "RuntimeCall"; 2651 default: 2652 LOG(FATAL) << "Unknown MethodLoadKind: " << static_cast<int>(rhs); 2653 UNREACHABLE(); 2654 } 2655 } 2656 2657 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) { 2658 switch (rhs) { 2659 case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit: 2660 return os << "explicit"; 2661 case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit: 2662 return os << "implicit"; 2663 case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone: 2664 return os << "none"; 2665 default: 2666 LOG(FATAL) << "Unknown ClinitCheckRequirement: " << static_cast<int>(rhs); 2667 UNREACHABLE(); 2668 } 2669 } 2670 2671 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const { 2672 const HLoadClass* other_load_class = other->AsLoadClass(); 2673 // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type 2674 // names rather than type indexes. However, we shall also have to re-think the hash code. 2675 if (type_index_ != other_load_class->type_index_ || 2676 GetPackedFields() != other_load_class->GetPackedFields()) { 2677 return false; 2678 } 2679 switch (GetLoadKind()) { 2680 case LoadKind::kBootImageAddress: 2681 case LoadKind::kJitTableAddress: { 2682 ScopedObjectAccess soa(Thread::Current()); 2683 return GetClass().Get() == other_load_class->GetClass().Get(); 2684 } 2685 default: 2686 DCHECK(HasTypeReference(GetLoadKind())); 2687 return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile()); 2688 } 2689 } 2690 2691 void HLoadClass::SetLoadKind(LoadKind load_kind) { 2692 SetPackedField<LoadKindField>(load_kind); 2693 2694 if (load_kind != LoadKind::kRuntimeCall && 2695 load_kind != LoadKind::kReferrersClass) { 2696 RemoveAsUserOfInput(0u); 2697 SetRawInputAt(0u, nullptr); 2698 } 2699 2700 if (!NeedsEnvironment()) { 2701 RemoveEnvironment(); 2702 SetSideEffects(SideEffects::None()); 2703 } 2704 } 2705 2706 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs) { 2707 switch (rhs) { 2708 case HLoadClass::LoadKind::kReferrersClass: 2709 return os << "ReferrersClass"; 2710 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: 2711 return os << "BootImageLinkTimePcRelative"; 2712 case HLoadClass::LoadKind::kBootImageAddress: 2713 return os << "BootImageAddress"; 2714 case HLoadClass::LoadKind::kBssEntry: 2715 return os << "BssEntry"; 2716 case HLoadClass::LoadKind::kJitTableAddress: 2717 return os << "JitTableAddress"; 2718 case HLoadClass::LoadKind::kRuntimeCall: 2719 return os << "RuntimeCall"; 2720 default: 2721 LOG(FATAL) << "Unknown HLoadClass::LoadKind: " << static_cast<int>(rhs); 2722 UNREACHABLE(); 2723 } 2724 } 2725 2726 bool HLoadString::InstructionDataEquals(const HInstruction* other) const { 2727 const HLoadString* other_load_string = other->AsLoadString(); 2728 // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings 2729 // rather than their indexes. However, we shall also have to re-think the hash code. 2730 if (string_index_ != other_load_string->string_index_ || 2731 GetPackedFields() != other_load_string->GetPackedFields()) { 2732 return false; 2733 } 2734 switch (GetLoadKind()) { 2735 case LoadKind::kBootImageAddress: 2736 case LoadKind::kJitTableAddress: { 2737 ScopedObjectAccess soa(Thread::Current()); 2738 return GetString().Get() == other_load_string->GetString().Get(); 2739 } 2740 default: 2741 return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile()); 2742 } 2743 } 2744 2745 void HLoadString::SetLoadKind(LoadKind load_kind) { 2746 // Once sharpened, the load kind should not be changed again. 2747 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall); 2748 SetPackedField<LoadKindField>(load_kind); 2749 2750 if (load_kind != LoadKind::kRuntimeCall) { 2751 RemoveAsUserOfInput(0u); 2752 SetRawInputAt(0u, nullptr); 2753 } 2754 if (!NeedsEnvironment()) { 2755 RemoveEnvironment(); 2756 SetSideEffects(SideEffects::None()); 2757 } 2758 } 2759 2760 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs) { 2761 switch (rhs) { 2762 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: 2763 return os << "BootImageLinkTimePcRelative"; 2764 case HLoadString::LoadKind::kBootImageAddress: 2765 return os << "BootImageAddress"; 2766 case HLoadString::LoadKind::kBssEntry: 2767 return os << "BssEntry"; 2768 case HLoadString::LoadKind::kJitTableAddress: 2769 return os << "JitTableAddress"; 2770 case HLoadString::LoadKind::kRuntimeCall: 2771 return os << "RuntimeCall"; 2772 default: 2773 LOG(FATAL) << "Unknown HLoadString::LoadKind: " << static_cast<int>(rhs); 2774 UNREACHABLE(); 2775 } 2776 } 2777 2778 void HInstruction::RemoveEnvironmentUsers() { 2779 for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) { 2780 HEnvironment* user = use.GetUser(); 2781 user->SetRawEnvAt(use.GetIndex(), nullptr); 2782 } 2783 env_uses_.clear(); 2784 } 2785 2786 // Returns an instruction with the opposite Boolean value from 'cond'. 2787 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) { 2788 ArenaAllocator* allocator = GetArena(); 2789 2790 if (cond->IsCondition() && 2791 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType())) { 2792 // Can't reverse floating point conditions. We have to use HBooleanNot in that case. 2793 HInstruction* lhs = cond->InputAt(0); 2794 HInstruction* rhs = cond->InputAt(1); 2795 HInstruction* replacement = nullptr; 2796 switch (cond->AsCondition()->GetOppositeCondition()) { // get *opposite* 2797 case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break; 2798 case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break; 2799 case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break; 2800 case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break; 2801 case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break; 2802 case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break; 2803 case kCondB: replacement = new (allocator) HBelow(lhs, rhs); break; 2804 case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break; 2805 case kCondA: replacement = new (allocator) HAbove(lhs, rhs); break; 2806 case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break; 2807 default: 2808 LOG(FATAL) << "Unexpected condition"; 2809 UNREACHABLE(); 2810 } 2811 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor); 2812 return replacement; 2813 } else if (cond->IsIntConstant()) { 2814 HIntConstant* int_const = cond->AsIntConstant(); 2815 if (int_const->IsFalse()) { 2816 return GetIntConstant(1); 2817 } else { 2818 DCHECK(int_const->IsTrue()) << int_const->GetValue(); 2819 return GetIntConstant(0); 2820 } 2821 } else { 2822 HInstruction* replacement = new (allocator) HBooleanNot(cond); 2823 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor); 2824 return replacement; 2825 } 2826 } 2827 2828 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) { 2829 os << "[" 2830 << " source=" << rhs.GetSource() 2831 << " destination=" << rhs.GetDestination() 2832 << " type=" << rhs.GetType() 2833 << " instruction="; 2834 if (rhs.GetInstruction() != nullptr) { 2835 os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId(); 2836 } else { 2837 os << "null"; 2838 } 2839 os << " ]"; 2840 return os; 2841 } 2842 2843 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) { 2844 switch (rhs) { 2845 case TypeCheckKind::kUnresolvedCheck: 2846 return os << "unresolved_check"; 2847 case TypeCheckKind::kExactCheck: 2848 return os << "exact_check"; 2849 case TypeCheckKind::kClassHierarchyCheck: 2850 return os << "class_hierarchy_check"; 2851 case TypeCheckKind::kAbstractClassCheck: 2852 return os << "abstract_class_check"; 2853 case TypeCheckKind::kInterfaceCheck: 2854 return os << "interface_check"; 2855 case TypeCheckKind::kArrayObjectCheck: 2856 return os << "array_object_check"; 2857 case TypeCheckKind::kArrayCheck: 2858 return os << "array_check"; 2859 default: 2860 LOG(FATAL) << "Unknown TypeCheckKind: " << static_cast<int>(rhs); 2861 UNREACHABLE(); 2862 } 2863 } 2864 2865 std::ostream& operator<<(std::ostream& os, const MemBarrierKind& kind) { 2866 switch (kind) { 2867 case MemBarrierKind::kAnyStore: 2868 return os << "AnyStore"; 2869 case MemBarrierKind::kLoadAny: 2870 return os << "LoadAny"; 2871 case MemBarrierKind::kStoreStore: 2872 return os << "StoreStore"; 2873 case MemBarrierKind::kAnyAny: 2874 return os << "AnyAny"; 2875 case MemBarrierKind::kNTStoreStore: 2876 return os << "NTStoreStore"; 2877 2878 default: 2879 LOG(FATAL) << "Unknown MemBarrierKind: " << static_cast<int>(kind); 2880 UNREACHABLE(); 2881 } 2882 } 2883 2884 } // namespace art 2885