Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2009 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "indirect_reference_table-inl.h"
     18 
     19 #include "base/dumpable-inl.h"
     20 #include "base/systrace.h"
     21 #include "java_vm_ext.h"
     22 #include "jni_internal.h"
     23 #include "nth_caller_visitor.h"
     24 #include "reference_table.h"
     25 #include "runtime.h"
     26 #include "scoped_thread_state_change-inl.h"
     27 #include "thread.h"
     28 #include "utils.h"
     29 
     30 #include <cstdlib>
     31 
     32 namespace art {
     33 
     34 static constexpr bool kDumpStackOnNonLocalReference = false;
     35 static constexpr bool kDebugIRT = false;
     36 
     37 const char* GetIndirectRefKindString(const IndirectRefKind& kind) {
     38   switch (kind) {
     39     case kHandleScopeOrInvalid:
     40       return "HandleScopeOrInvalid";
     41     case kLocal:
     42       return "Local";
     43     case kGlobal:
     44       return "Global";
     45     case kWeakGlobal:
     46       return "WeakGlobal";
     47   }
     48   return "IndirectRefKind Error";
     49 }
     50 
     51 void IndirectReferenceTable::AbortIfNoCheckJNI(const std::string& msg) {
     52   // If -Xcheck:jni is on, it'll give a more detailed error before aborting.
     53   JavaVMExt* vm = Runtime::Current()->GetJavaVM();
     54   if (!vm->IsCheckJniEnabled()) {
     55     // Otherwise, we want to abort rather than hand back a bad reference.
     56     LOG(FATAL) << msg;
     57   } else {
     58     LOG(ERROR) << msg;
     59   }
     60 }
     61 
     62 IndirectReferenceTable::IndirectReferenceTable(size_t max_count,
     63                                                IndirectRefKind desired_kind,
     64                                                ResizableCapacity resizable,
     65                                                std::string* error_msg)
     66     : segment_state_(kIRTFirstSegment),
     67       kind_(desired_kind),
     68       max_entries_(max_count),
     69       current_num_holes_(0),
     70       resizable_(resizable) {
     71   CHECK(error_msg != nullptr);
     72   CHECK_NE(desired_kind, kHandleScopeOrInvalid);
     73 
     74   const size_t table_bytes = max_count * sizeof(IrtEntry);
     75   table_mem_map_.reset(MemMap::MapAnonymous("indirect ref table", nullptr, table_bytes,
     76                                             PROT_READ | PROT_WRITE, false, false, error_msg));
     77   if (table_mem_map_.get() == nullptr && error_msg->empty()) {
     78     *error_msg = "Unable to map memory for indirect ref table";
     79   }
     80 
     81   if (table_mem_map_.get() != nullptr) {
     82     table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin());
     83   } else {
     84     table_ = nullptr;
     85   }
     86   segment_state_ = kIRTFirstSegment;
     87   last_known_previous_state_ = kIRTFirstSegment;
     88 }
     89 
     90 IndirectReferenceTable::~IndirectReferenceTable() {
     91 }
     92 
     93 void IndirectReferenceTable::ConstexprChecks() {
     94   // Use this for some assertions. They can't be put into the header as C++ wants the class
     95   // to be complete.
     96 
     97   // Check kind.
     98   static_assert((EncodeIndirectRefKind(kLocal) & (~kKindMask)) == 0, "Kind encoding error");
     99   static_assert((EncodeIndirectRefKind(kGlobal) & (~kKindMask)) == 0, "Kind encoding error");
    100   static_assert((EncodeIndirectRefKind(kWeakGlobal) & (~kKindMask)) == 0, "Kind encoding error");
    101   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kLocal)) == kLocal,
    102                 "Kind encoding error");
    103   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kGlobal)) == kGlobal,
    104                 "Kind encoding error");
    105   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kWeakGlobal)) == kWeakGlobal,
    106                 "Kind encoding error");
    107 
    108   // Check serial.
    109   static_assert(DecodeSerial(EncodeSerial(0u)) == 0u, "Serial encoding error");
    110   static_assert(DecodeSerial(EncodeSerial(1u)) == 1u, "Serial encoding error");
    111   static_assert(DecodeSerial(EncodeSerial(2u)) == 2u, "Serial encoding error");
    112   static_assert(DecodeSerial(EncodeSerial(3u)) == 3u, "Serial encoding error");
    113 
    114   // Table index.
    115   static_assert(DecodeIndex(EncodeIndex(0u)) == 0u, "Index encoding error");
    116   static_assert(DecodeIndex(EncodeIndex(1u)) == 1u, "Index encoding error");
    117   static_assert(DecodeIndex(EncodeIndex(2u)) == 2u, "Index encoding error");
    118   static_assert(DecodeIndex(EncodeIndex(3u)) == 3u, "Index encoding error");
    119 }
    120 
    121 bool IndirectReferenceTable::IsValid() const {
    122   return table_mem_map_.get() != nullptr;
    123 }
    124 
    125 // Holes:
    126 //
    127 // To keep the IRT compact, we want to fill "holes" created by non-stack-discipline Add & Remove
    128 // operation sequences. For simplicity and lower memory overhead, we do not use a free list or
    129 // similar. Instead, we scan for holes, with the expectation that we will find holes fast as they
    130 // are usually near the end of the table (see the header, TODO: verify this assumption). To avoid
    131 // scans when there are no holes, the number of known holes should be tracked.
    132 //
    133 // A previous implementation stored the top index and the number of holes as the segment state.
    134 // This constraints the maximum number of references to 16-bit. We want to relax this, as it
    135 // is easy to require more references (e.g., to list all classes in large applications). Thus,
    136 // the implicitly stack-stored state, the IRTSegmentState, is only the top index.
    137 //
    138 // Thus, hole count is a local property of the current segment, and needs to be recovered when
    139 // (or after) a frame is pushed or popped. To keep JNI transitions simple (and inlineable), we
    140 // cannot do work when the segment changes. Thus, Add and Remove need to ensure the current
    141 // hole count is correct.
    142 //
    143 // To be able to detect segment changes, we require an additional local field that can describe
    144 // the known segment. This is last_known_previous_state_. The requirement will become clear with
    145 // the following (some non-trivial) cases that have to be supported:
    146 //
    147 // 1) Segment with holes (current_num_holes_ > 0), push new segment, add/remove reference
    148 // 2) Segment with holes (current_num_holes_ > 0), pop segment, add/remove reference
    149 // 3) Segment with holes (current_num_holes_ > 0), push new segment, pop segment, add/remove
    150 //    reference
    151 // 4) Empty segment, push new segment, create a hole, pop a segment, add/remove a reference
    152 // 5) Base segment, push new segment, create a hole, pop a segment, push new segment, add/remove
    153 //    reference
    154 //
    155 // Storing the last known *previous* state (bottom index) allows conservatively detecting all the
    156 // segment changes above. The condition is simply that the last known state is greater than or
    157 // equal to the current previous state, and smaller than the current state (top index). The
    158 // condition is conservative as it adds O(1) overhead to operations on an empty segment.
    159 
    160 static size_t CountNullEntries(const IrtEntry* table, size_t from, size_t to) {
    161   size_t count = 0;
    162   for (size_t index = from; index != to; ++index) {
    163     if (table[index].GetReference()->IsNull()) {
    164       count++;
    165     }
    166   }
    167   return count;
    168 }
    169 
    170 void IndirectReferenceTable::RecoverHoles(IRTSegmentState prev_state) {
    171   if (last_known_previous_state_.top_index >= segment_state_.top_index ||
    172       last_known_previous_state_.top_index < prev_state.top_index) {
    173     const size_t top_index = segment_state_.top_index;
    174     size_t count = CountNullEntries(table_, prev_state.top_index, top_index);
    175 
    176     if (kDebugIRT) {
    177       LOG(INFO) << "+++ Recovered holes: "
    178                 << " Current prev=" << prev_state.top_index
    179                 << " Current top_index=" << top_index
    180                 << " Old num_holes=" << current_num_holes_
    181                 << " New num_holes=" << count;
    182     }
    183 
    184     current_num_holes_ = count;
    185     last_known_previous_state_ = prev_state;
    186   } else if (kDebugIRT) {
    187     LOG(INFO) << "No need to recover holes";
    188   }
    189 }
    190 
    191 ALWAYS_INLINE
    192 static inline void CheckHoleCount(IrtEntry* table,
    193                                   size_t exp_num_holes,
    194                                   IRTSegmentState prev_state,
    195                                   IRTSegmentState cur_state) {
    196   if (kIsDebugBuild) {
    197     size_t count = CountNullEntries(table, prev_state.top_index, cur_state.top_index);
    198     CHECK_EQ(exp_num_holes, count) << "prevState=" << prev_state.top_index
    199                                    << " topIndex=" << cur_state.top_index;
    200   }
    201 }
    202 
    203 bool IndirectReferenceTable::Resize(size_t new_size, std::string* error_msg) {
    204   CHECK_GT(new_size, max_entries_);
    205 
    206   const size_t table_bytes = new_size * sizeof(IrtEntry);
    207   std::unique_ptr<MemMap> new_map(MemMap::MapAnonymous("indirect ref table",
    208                                                        nullptr,
    209                                                        table_bytes,
    210                                                        PROT_READ | PROT_WRITE,
    211                                                        false,
    212                                                        false,
    213                                                        error_msg));
    214   if (new_map == nullptr) {
    215     return false;
    216   }
    217 
    218   memcpy(new_map->Begin(), table_mem_map_->Begin(), table_mem_map_->Size());
    219   table_mem_map_ = std::move(new_map);
    220   table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin());
    221   max_entries_ = new_size;
    222 
    223   return true;
    224 }
    225 
    226 IndirectRef IndirectReferenceTable::Add(IRTSegmentState previous_state,
    227                                         ObjPtr<mirror::Object> obj) {
    228   if (kDebugIRT) {
    229     LOG(INFO) << "+++ Add: previous_state=" << previous_state.top_index
    230               << " top_index=" << segment_state_.top_index
    231               << " last_known_prev_top_index=" << last_known_previous_state_.top_index
    232               << " holes=" << current_num_holes_;
    233   }
    234 
    235   size_t top_index = segment_state_.top_index;
    236 
    237   CHECK(obj != nullptr);
    238   VerifyObject(obj);
    239   DCHECK(table_ != nullptr);
    240 
    241   if (top_index == max_entries_) {
    242     if (resizable_ == ResizableCapacity::kNo) {
    243       LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
    244                  << "(max=" << max_entries_ << ")\n"
    245                  << MutatorLockedDumpable<IndirectReferenceTable>(*this);
    246       UNREACHABLE();
    247     }
    248 
    249     // Try to double space.
    250     std::string error_msg;
    251     if (!Resize(max_entries_ * 2, &error_msg)) {
    252       LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
    253                  << "(max=" << max_entries_ << ")" << std::endl
    254                  << MutatorLockedDumpable<IndirectReferenceTable>(*this)
    255                  << " Resizing failed: " << error_msg;
    256       UNREACHABLE();
    257     }
    258   }
    259 
    260   RecoverHoles(previous_state);
    261   CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_);
    262 
    263   // We know there's enough room in the table.  Now we just need to find
    264   // the right spot.  If there's a hole, find it and fill it; otherwise,
    265   // add to the end of the list.
    266   IndirectRef result;
    267   size_t index;
    268   if (current_num_holes_ > 0) {
    269     DCHECK_GT(top_index, 1U);
    270     // Find the first hole; likely to be near the end of the list.
    271     IrtEntry* p_scan = &table_[top_index - 1];
    272     DCHECK(!p_scan->GetReference()->IsNull());
    273     --p_scan;
    274     while (!p_scan->GetReference()->IsNull()) {
    275       DCHECK_GE(p_scan, table_ + previous_state.top_index);
    276       --p_scan;
    277     }
    278     index = p_scan - table_;
    279     current_num_holes_--;
    280   } else {
    281     // Add to the end.
    282     index = top_index++;
    283     segment_state_.top_index = top_index;
    284   }
    285   table_[index].Add(obj);
    286   result = ToIndirectRef(index);
    287   if (kDebugIRT) {
    288     LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << segment_state_.top_index
    289               << " holes=" << current_num_holes_;
    290   }
    291 
    292   DCHECK(result != nullptr);
    293   return result;
    294 }
    295 
    296 void IndirectReferenceTable::AssertEmpty() {
    297   for (size_t i = 0; i < Capacity(); ++i) {
    298     if (!table_[i].GetReference()->IsNull()) {
    299       LOG(FATAL) << "Internal Error: non-empty local reference table\n"
    300                  << MutatorLockedDumpable<IndirectReferenceTable>(*this);
    301       UNREACHABLE();
    302     }
    303   }
    304 }
    305 
    306 // Removes an object. We extract the table offset bits from "iref"
    307 // and zap the corresponding entry, leaving a hole if it's not at the top.
    308 // If the entry is not between the current top index and the bottom index
    309 // specified by the cookie, we don't remove anything. This is the behavior
    310 // required by JNI's DeleteLocalRef function.
    311 // This method is not called when a local frame is popped; this is only used
    312 // for explicit single removals.
    313 // Returns "false" if nothing was removed.
    314 bool IndirectReferenceTable::Remove(IRTSegmentState previous_state, IndirectRef iref) {
    315   if (kDebugIRT) {
    316     LOG(INFO) << "+++ Remove: previous_state=" << previous_state.top_index
    317               << " top_index=" << segment_state_.top_index
    318               << " last_known_prev_top_index=" << last_known_previous_state_.top_index
    319               << " holes=" << current_num_holes_;
    320   }
    321 
    322   const uint32_t top_index = segment_state_.top_index;
    323   const uint32_t bottom_index = previous_state.top_index;
    324 
    325   DCHECK(table_ != nullptr);
    326 
    327   if (GetIndirectRefKind(iref) == kHandleScopeOrInvalid) {
    328     auto* self = Thread::Current();
    329     if (self->HandleScopeContains(reinterpret_cast<jobject>(iref))) {
    330       auto* env = self->GetJniEnv();
    331       DCHECK(env != nullptr);
    332       if (env->check_jni) {
    333         ScopedObjectAccess soa(self);
    334         LOG(WARNING) << "Attempt to remove non-JNI local reference, dumping thread";
    335         if (kDumpStackOnNonLocalReference) {
    336           self->Dump(LOG_STREAM(WARNING));
    337         }
    338       }
    339       return true;
    340     }
    341   }
    342   const uint32_t idx = ExtractIndex(iref);
    343   if (idx < bottom_index) {
    344     // Wrong segment.
    345     LOG(WARNING) << "Attempt to remove index outside index area (" << idx
    346                  << " vs " << bottom_index << "-" << top_index << ")";
    347     return false;
    348   }
    349   if (idx >= top_index) {
    350     // Bad --- stale reference?
    351     LOG(WARNING) << "Attempt to remove invalid index " << idx
    352                  << " (bottom=" << bottom_index << " top=" << top_index << ")";
    353     return false;
    354   }
    355 
    356   RecoverHoles(previous_state);
    357   CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_);
    358 
    359   if (idx == top_index - 1) {
    360     // Top-most entry.  Scan up and consume holes.
    361 
    362     if (!CheckEntry("remove", iref, idx)) {
    363       return false;
    364     }
    365 
    366     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
    367     if (current_num_holes_ != 0) {
    368       uint32_t collapse_top_index = top_index;
    369       while (--collapse_top_index > bottom_index && current_num_holes_ != 0) {
    370         if (kDebugIRT) {
    371           ScopedObjectAccess soa(Thread::Current());
    372           LOG(INFO) << "+++ checking for hole at " << collapse_top_index - 1
    373                     << " (previous_state=" << bottom_index << ") val="
    374                     << table_[collapse_top_index - 1].GetReference()->Read<kWithoutReadBarrier>();
    375         }
    376         if (!table_[collapse_top_index - 1].GetReference()->IsNull()) {
    377           break;
    378         }
    379         if (kDebugIRT) {
    380           LOG(INFO) << "+++ ate hole at " << (collapse_top_index - 1);
    381         }
    382         current_num_holes_--;
    383       }
    384       segment_state_.top_index = collapse_top_index;
    385 
    386       CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_);
    387     } else {
    388       segment_state_.top_index = top_index - 1;
    389       if (kDebugIRT) {
    390         LOG(INFO) << "+++ ate last entry " << top_index - 1;
    391       }
    392     }
    393   } else {
    394     // Not the top-most entry.  This creates a hole.  We null out the entry to prevent somebody
    395     // from deleting it twice and screwing up the hole count.
    396     if (table_[idx].GetReference()->IsNull()) {
    397       LOG(INFO) << "--- WEIRD: removing null entry " << idx;
    398       return false;
    399     }
    400     if (!CheckEntry("remove", iref, idx)) {
    401       return false;
    402     }
    403 
    404     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
    405     current_num_holes_++;
    406     CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_);
    407     if (kDebugIRT) {
    408       LOG(INFO) << "+++ left hole at " << idx << ", holes=" << current_num_holes_;
    409     }
    410   }
    411 
    412   return true;
    413 }
    414 
    415 void IndirectReferenceTable::Trim() {
    416   ScopedTrace trace(__PRETTY_FUNCTION__);
    417   const size_t top_index = Capacity();
    418   auto* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), kPageSize);
    419   uint8_t* release_end = table_mem_map_->End();
    420   madvise(release_start, release_end - release_start, MADV_DONTNEED);
    421 }
    422 
    423 void IndirectReferenceTable::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
    424   BufferedRootVisitor<kDefaultBufferedRootCount> root_visitor(visitor, root_info);
    425   for (auto ref : *this) {
    426     if (!ref->IsNull()) {
    427       root_visitor.VisitRoot(*ref);
    428       DCHECK(!ref->IsNull());
    429     }
    430   }
    431 }
    432 
    433 void IndirectReferenceTable::Dump(std::ostream& os) const {
    434   os << kind_ << " table dump:\n";
    435   ReferenceTable::Table entries;
    436   for (size_t i = 0; i < Capacity(); ++i) {
    437     ObjPtr<mirror::Object> obj = table_[i].GetReference()->Read<kWithoutReadBarrier>();
    438     if (obj != nullptr) {
    439       obj = table_[i].GetReference()->Read();
    440       entries.push_back(GcRoot<mirror::Object>(obj));
    441     }
    442   }
    443   ReferenceTable::Dump(os, entries);
    444 }
    445 
    446 void IndirectReferenceTable::SetSegmentState(IRTSegmentState new_state) {
    447   if (kDebugIRT) {
    448     LOG(INFO) << "Setting segment state: "
    449               << segment_state_.top_index
    450               << " -> "
    451               << new_state.top_index;
    452   }
    453   segment_state_ = new_state;
    454 }
    455 
    456 }  // namespace art
    457