Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "prepare_for_register_allocation.h"
     18 
     19 #include "dex/dex_file_types.h"
     20 #include "driver/compiler_options.h"
     21 #include "jni/jni_internal.h"
     22 #include "optimizing_compiler_stats.h"
     23 #include "well_known_classes.h"
     24 
     25 namespace art {
     26 
     27 void PrepareForRegisterAllocation::Run() {
     28   // Order does not matter.
     29   for (HBasicBlock* block : GetGraph()->GetReversePostOrder()) {
     30     // No need to visit the phis.
     31     for (HInstructionIteratorHandleChanges inst_it(block->GetInstructions()); !inst_it.Done();
     32          inst_it.Advance()) {
     33       inst_it.Current()->Accept(this);
     34     }
     35   }
     36 }
     37 
     38 void PrepareForRegisterAllocation::VisitCheckCast(HCheckCast* check_cast) {
     39   // Record only those bitstring type checks that make it to the codegen stage.
     40   if (check_cast->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
     41     MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
     42   }
     43 }
     44 
     45 void PrepareForRegisterAllocation::VisitInstanceOf(HInstanceOf* instance_of) {
     46   // Record only those bitstring type checks that make it to the codegen stage.
     47   if (instance_of->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) {
     48     MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck);
     49   }
     50 }
     51 
     52 void PrepareForRegisterAllocation::VisitNullCheck(HNullCheck* check) {
     53   check->ReplaceWith(check->InputAt(0));
     54   if (compiler_options_.GetImplicitNullChecks()) {
     55     HInstruction* next = check->GetNext();
     56 
     57     // The `PrepareForRegisterAllocation` pass removes `HBoundType` from the graph,
     58     // so do it ourselves now to not prevent optimizations.
     59     while (next->IsBoundType()) {
     60       next = next->GetNext();
     61       VisitBoundType(next->GetPrevious()->AsBoundType());
     62     }
     63     if (next->CanDoImplicitNullCheckOn(check->InputAt(0))) {
     64       check->MarkEmittedAtUseSite();
     65     }
     66   }
     67 }
     68 
     69 void PrepareForRegisterAllocation::VisitDivZeroCheck(HDivZeroCheck* check) {
     70   check->ReplaceWith(check->InputAt(0));
     71 }
     72 
     73 void PrepareForRegisterAllocation::VisitDeoptimize(HDeoptimize* deoptimize) {
     74   if (deoptimize->GuardsAnInput()) {
     75     // Replace the uses with the actual guarded instruction.
     76     deoptimize->ReplaceWith(deoptimize->GuardedInput());
     77     deoptimize->RemoveGuard();
     78   }
     79 }
     80 
     81 void PrepareForRegisterAllocation::VisitBoundsCheck(HBoundsCheck* check) {
     82   check->ReplaceWith(check->InputAt(0));
     83   if (check->IsStringCharAt()) {
     84     // Add a fake environment for String.charAt() inline info as we want the exception
     85     // to appear as being thrown from there. Skip if we're compiling String.charAt() itself.
     86     ArtMethod* char_at_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
     87     if (GetGraph()->GetArtMethod() != char_at_method) {
     88       ArenaAllocator* allocator = GetGraph()->GetAllocator();
     89       HEnvironment* environment = new (allocator) HEnvironment(allocator,
     90                                                                /* number_of_vregs= */ 0u,
     91                                                                char_at_method,
     92                                                                /* dex_pc= */ dex::kDexNoIndex,
     93                                                                check);
     94       check->InsertRawEnvironment(environment);
     95     }
     96   }
     97 }
     98 
     99 void PrepareForRegisterAllocation::VisitBoundType(HBoundType* bound_type) {
    100   bound_type->ReplaceWith(bound_type->InputAt(0));
    101   bound_type->GetBlock()->RemoveInstruction(bound_type);
    102 }
    103 
    104 void PrepareForRegisterAllocation::VisitArraySet(HArraySet* instruction) {
    105   HInstruction* value = instruction->GetValue();
    106   // PrepareForRegisterAllocation::VisitBoundType may have replaced a
    107   // BoundType (as value input of this ArraySet) with a NullConstant.
    108   // If so, this ArraySet no longer needs a type check.
    109   if (value->IsNullConstant()) {
    110     DCHECK_EQ(value->GetType(), DataType::Type::kReference);
    111     if (instruction->NeedsTypeCheck()) {
    112       instruction->ClearNeedsTypeCheck();
    113     }
    114   }
    115 }
    116 
    117 void PrepareForRegisterAllocation::VisitClinitCheck(HClinitCheck* check) {
    118   // Try to find a static invoke or a new-instance from which this check originated.
    119   HInstruction* implicit_clinit = nullptr;
    120   for (const HUseListNode<HInstruction*>& use : check->GetUses()) {
    121     HInstruction* user = use.GetUser();
    122     if ((user->IsInvokeStaticOrDirect() || user->IsNewInstance()) &&
    123         CanMoveClinitCheck(check, user)) {
    124       implicit_clinit = user;
    125       if (user->IsInvokeStaticOrDirect()) {
    126         DCHECK(user->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck());
    127         user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck(
    128             HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
    129       } else {
    130         DCHECK(user->IsNewInstance());
    131         // We delegate the initialization duty to the allocation.
    132         if (user->AsNewInstance()->GetEntrypoint() == kQuickAllocObjectInitialized) {
    133           user->AsNewInstance()->SetEntrypoint(kQuickAllocObjectResolved);
    134         }
    135       }
    136       break;
    137     }
    138   }
    139   // If we found a static invoke or new-instance for merging, remove the check
    140   // from dominated static invokes.
    141   if (implicit_clinit != nullptr) {
    142     const HUseList<HInstruction*>& uses = check->GetUses();
    143     for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
    144       HInstruction* user = it->GetUser();
    145       // All other uses must be dominated.
    146       DCHECK(implicit_clinit->StrictlyDominates(user) || (implicit_clinit == user));
    147       ++it;  // Advance before we remove the node, reference to the next node is preserved.
    148       if (user->IsInvokeStaticOrDirect()) {
    149         user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck(
    150             HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
    151       }
    152     }
    153   }
    154 
    155   HLoadClass* load_class = check->GetLoadClass();
    156   bool can_merge_with_load_class = CanMoveClinitCheck(load_class, check);
    157 
    158   check->ReplaceWith(load_class);
    159 
    160   if (implicit_clinit != nullptr) {
    161     // Remove the check from the graph. It has been merged into the invoke or new-instance.
    162     check->GetBlock()->RemoveInstruction(check);
    163     // Check if we can merge the load class as well.
    164     if (can_merge_with_load_class && !load_class->HasUses()) {
    165       load_class->GetBlock()->RemoveInstruction(load_class);
    166     }
    167   } else if (can_merge_with_load_class &&
    168              load_class->GetLoadKind() != HLoadClass::LoadKind::kRuntimeCall) {
    169     DCHECK(!load_class->NeedsAccessCheck());
    170     // Pass the initialization duty to the `HLoadClass` instruction,
    171     // and remove the instruction from the graph.
    172     DCHECK(load_class->HasEnvironment());
    173     load_class->SetMustGenerateClinitCheck(true);
    174     check->GetBlock()->RemoveInstruction(check);
    175   }
    176 }
    177 
    178 bool PrepareForRegisterAllocation::CanEmitConditionAt(HCondition* condition,
    179                                                       HInstruction* user) const {
    180   if (condition->GetNext() != user) {
    181     return false;
    182   }
    183 
    184   if (user->IsIf() || user->IsDeoptimize()) {
    185     return true;
    186   }
    187 
    188   if (user->IsSelect() && user->AsSelect()->GetCondition() == condition) {
    189     return true;
    190   }
    191 
    192   return false;
    193 }
    194 
    195 void PrepareForRegisterAllocation::VisitCondition(HCondition* condition) {
    196   if (condition->HasOnlyOneNonEnvironmentUse()) {
    197     HInstruction* user = condition->GetUses().front().GetUser();
    198     if (CanEmitConditionAt(condition, user)) {
    199       condition->MarkEmittedAtUseSite();
    200     }
    201   }
    202 }
    203 
    204 void PrepareForRegisterAllocation::VisitConstructorFence(HConstructorFence* constructor_fence) {
    205   // Trivially remove redundant HConstructorFence when it immediately follows an HNewInstance
    206   // to an uninitialized class. In this special case, the art_quick_alloc_object_resolved
    207   // will already have the 'dmb' which is strictly stronger than an HConstructorFence.
    208   //
    209   // The instruction builder always emits "x = HNewInstance; HConstructorFence(x)" so this
    210   // is effectively pattern-matching that particular case and undoing the redundancy the builder
    211   // had introduced.
    212   //
    213   // TODO: Move this to a separate pass.
    214   HInstruction* allocation_inst = constructor_fence->GetAssociatedAllocation();
    215   if (allocation_inst != nullptr && allocation_inst->IsNewInstance()) {
    216     HNewInstance* new_inst = allocation_inst->AsNewInstance();
    217     // This relies on the entrypoint already being set to the more optimized version;
    218     // as that happens in this pass, this redundancy removal also cannot happen any earlier.
    219     if (new_inst != nullptr && new_inst->GetEntrypoint() == kQuickAllocObjectResolved) {
    220       // If this was done in an earlier pass, we would want to match that `previous` was an input
    221       // to the `constructor_fence`. However, since this pass removes the inputs to the fence,
    222       // we can ignore the inputs and just remove the instruction from its block.
    223       DCHECK_EQ(1u, constructor_fence->InputCount());
    224       // TODO: GetAssociatedAllocation should not care about multiple inputs
    225       // if we are in prepare_for_register_allocation pass only.
    226       constructor_fence->GetBlock()->RemoveInstruction(constructor_fence);
    227       MaybeRecordStat(stats_,
    228                       MethodCompilationStat::kConstructorFenceRemovedPFRA);
    229       return;
    230     }
    231 
    232     // HNewArray does not need this check because the art_quick_alloc_array does not itself
    233     // have a dmb in any normal situation (i.e. the array class is never exactly in the
    234     // "resolved" state). If the array class is not yet loaded, it will always go from
    235     // Unloaded->Initialized state.
    236   }
    237 
    238   // Remove all the inputs to the constructor fence;
    239   // they aren't used by the InstructionCodeGenerator and this lets us avoid creating a
    240   // LocationSummary in the LocationsBuilder.
    241   constructor_fence->RemoveAllInputs();
    242 }
    243 
    244 void PrepareForRegisterAllocation::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
    245   if (invoke->IsStaticWithExplicitClinitCheck()) {
    246     HInstruction* last_input = invoke->GetInputs().back();
    247     DCHECK(last_input->IsLoadClass())
    248         << "Last input is not HLoadClass. It is " << last_input->DebugName();
    249 
    250     // Detach the explicit class initialization check from the invoke.
    251     // Keeping track of the initializing instruction is no longer required
    252     // at this stage (i.e., after inlining has been performed).
    253     invoke->RemoveExplicitClinitCheck(HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
    254 
    255     // Merging with load class should have happened in VisitClinitCheck().
    256     DCHECK(!CanMoveClinitCheck(last_input, invoke));
    257   }
    258 }
    259 
    260 bool PrepareForRegisterAllocation::CanMoveClinitCheck(HInstruction* input,
    261                                                       HInstruction* user) const {
    262   // Determine if input and user come from the same dex instruction, so that we can move
    263   // the clinit check responsibility from one to the other, i.e. from HClinitCheck (user)
    264   // to HLoadClass (input), or from HClinitCheck (input) to HInvokeStaticOrDirect (user),
    265   // or from HLoadClass (input) to HNewInstance (user).
    266 
    267   // Start with a quick dex pc check.
    268   if (user->GetDexPc() != input->GetDexPc()) {
    269     return false;
    270   }
    271 
    272   // Now do a thorough environment check that this is really coming from the same instruction in
    273   // the same inlined graph. Unfortunately, we have to go through the whole environment chain.
    274   HEnvironment* user_environment = user->GetEnvironment();
    275   HEnvironment* input_environment = input->GetEnvironment();
    276   while (user_environment != nullptr || input_environment != nullptr) {
    277     if (user_environment == nullptr || input_environment == nullptr) {
    278       // Different environment chain length. This happens when a method is called
    279       // once directly and once indirectly through another inlined method.
    280       return false;
    281     }
    282     if (user_environment->GetDexPc() != input_environment->GetDexPc() ||
    283         user_environment->GetMethod() != input_environment->GetMethod()) {
    284       return false;
    285     }
    286     user_environment = user_environment->GetParent();
    287     input_environment = input_environment->GetParent();
    288   }
    289 
    290   // Check for code motion taking the input to a different block.
    291   if (user->GetBlock() != input->GetBlock()) {
    292     return false;
    293   }
    294 
    295   // In debug mode, check that we have not inserted a throwing instruction
    296   // or an instruction with side effects between input and user.
    297   if (kIsDebugBuild) {
    298     for (HInstruction* between = input->GetNext(); between != user; between = between->GetNext()) {
    299       CHECK(between != nullptr);  // User must be after input in the same block.
    300       CHECK(!between->CanThrow());
    301       CHECK(!between->HasSideEffects());
    302     }
    303   }
    304   return true;
    305 }
    306 
    307 void PrepareForRegisterAllocation::VisitTypeConversion(HTypeConversion* instruction) {
    308   // For simplicity, our code generators don't handle implicit type conversion, so ensure
    309   // there are none before hitting codegen.
    310   if (instruction->IsImplicitConversion()) {
    311     instruction->ReplaceWith(instruction->GetInput());
    312     instruction->GetBlock()->RemoveInstruction(instruction);
    313   }
    314 }
    315 
    316 }  // namespace art
    317