Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_INTERN_TABLE_H_
     18 #define ART_RUNTIME_INTERN_TABLE_H_
     19 
     20 #include <unordered_set>
     21 
     22 #include "atomic.h"
     23 #include "base/allocator.h"
     24 #include "base/hash_set.h"
     25 #include "base/mutex.h"
     26 #include "gc_root.h"
     27 #include "gc/weak_root_state.h"
     28 #include "object_callbacks.h"
     29 
     30 namespace art {
     31 
     32 namespace gc {
     33 namespace space {
     34 class ImageSpace;
     35 }  // namespace space
     36 }  // namespace gc
     37 
     38 enum VisitRootFlags : uint8_t;
     39 
     40 namespace mirror {
     41 class String;
     42 }  // namespace mirror
     43 class Transaction;
     44 
     45 /**
     46  * Used to intern strings.
     47  *
     48  * There are actually two tables: one that holds strong references to its strings, and one that
     49  * holds weak references. The former is used for string literals, for which there is an effective
     50  * reference from the constant pool. The latter is used for strings interned at runtime via
     51  * String.intern. Some code (XML parsers being a prime example) relies on being able to intern
     52  * arbitrarily many strings for the duration of a parse without permanently increasing the memory
     53  * footprint.
     54  */
     55 class InternTable {
     56  public:
     57   InternTable();
     58 
     59   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     60   ObjPtr<mirror::String> InternStrong(int32_t utf16_length, const char* utf8_data)
     61       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
     62 
     63   // Only used by image writer. Special version that may not cause thread suspension since the GC
     64   // cannot be running while we are doing image writing. Maybe be called while while holding a
     65   // lock since there will not be thread suspension.
     66   ObjPtr<mirror::String> InternStrongImageString(ObjPtr<mirror::String> s)
     67       REQUIRES_SHARED(Locks::mutator_lock_);
     68 
     69   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     70   ObjPtr<mirror::String> InternStrong(const char* utf8_data) REQUIRES_SHARED(Locks::mutator_lock_)
     71       REQUIRES(!Roles::uninterruptible_);
     72 
     73   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     74   ObjPtr<mirror::String> InternStrong(ObjPtr<mirror::String> s)
     75       REQUIRES_SHARED(Locks::mutator_lock_)
     76       REQUIRES(!Roles::uninterruptible_);
     77 
     78   // Interns a potentially new string in the 'weak' table. May cause thread suspension.
     79   ObjPtr<mirror::String> InternWeak(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
     80       REQUIRES(!Roles::uninterruptible_);
     81 
     82   void SweepInternTableWeaks(IsMarkedVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_)
     83       REQUIRES(!Locks::intern_table_lock_);
     84 
     85   bool ContainsWeak(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
     86       REQUIRES(!Locks::intern_table_lock_);
     87 
     88   // Lookup a strong intern, returns null if not found.
     89   ObjPtr<mirror::String> LookupStrong(Thread* self, ObjPtr<mirror::String> s)
     90       REQUIRES(!Locks::intern_table_lock_)
     91       REQUIRES_SHARED(Locks::mutator_lock_);
     92   ObjPtr<mirror::String> LookupStrong(Thread* self, uint32_t utf16_length, const char* utf8_data)
     93       REQUIRES(!Locks::intern_table_lock_)
     94       REQUIRES_SHARED(Locks::mutator_lock_);
     95 
     96   // Lookup a weak intern, returns null if not found.
     97   ObjPtr<mirror::String> LookupWeak(Thread* self, ObjPtr<mirror::String> s)
     98       REQUIRES(!Locks::intern_table_lock_)
     99       REQUIRES_SHARED(Locks::mutator_lock_);
    100 
    101   // Total number of interned strings.
    102   size_t Size() const REQUIRES(!Locks::intern_table_lock_);
    103 
    104   // Total number of weakly live interned strings.
    105   size_t StrongSize() const REQUIRES(!Locks::intern_table_lock_);
    106 
    107   // Total number of strongly live interned strings.
    108   size_t WeakSize() const REQUIRES(!Locks::intern_table_lock_);
    109 
    110   void VisitRoots(RootVisitor* visitor, VisitRootFlags flags)
    111       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    112 
    113   void DumpForSigQuit(std::ostream& os) const REQUIRES(!Locks::intern_table_lock_);
    114 
    115   void BroadcastForNewInterns();
    116 
    117   // Adds all of the resolved image strings from the image spaces into the intern table. The
    118   // advantage of doing this is preventing expensive DexFile::FindStringId calls. Sets
    119   // images_added_to_intern_table_ to true.
    120   void AddImagesStringsToTable(const std::vector<gc::space::ImageSpace*>& image_spaces)
    121       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    122 
    123   // Add a new intern table for inserting to, previous intern tables are still there but no
    124   // longer inserted into and ideally unmodified. This is done to prevent dirty pages.
    125   void AddNewTable()
    126       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    127 
    128   // Read the intern table from memory. The elements aren't copied, the intern hash set data will
    129   // point to somewhere within ptr. Only reads the strong interns.
    130   size_t AddTableFromMemory(const uint8_t* ptr) REQUIRES(!Locks::intern_table_lock_)
    131       REQUIRES_SHARED(Locks::mutator_lock_);
    132 
    133   // Write the post zygote intern table to a pointer. Only writes the strong interns since it is
    134   // expected that there is no weak interns since this is called from the image writer.
    135   size_t WriteToMemory(uint8_t* ptr) REQUIRES_SHARED(Locks::mutator_lock_)
    136       REQUIRES(!Locks::intern_table_lock_);
    137 
    138   // Change the weak root state. May broadcast to waiters.
    139   void ChangeWeakRootState(gc::WeakRootState new_state)
    140       REQUIRES(!Locks::intern_table_lock_);
    141 
    142  private:
    143   // Modified UTF-8-encoded string treated as UTF16.
    144   class Utf8String {
    145    public:
    146     Utf8String(uint32_t utf16_length, const char* utf8_data, int32_t hash)
    147         : hash_(hash), utf16_length_(utf16_length), utf8_data_(utf8_data) { }
    148 
    149     int32_t GetHash() const { return hash_; }
    150     uint32_t GetUtf16Length() const { return utf16_length_; }
    151     const char* GetUtf8Data() const { return utf8_data_; }
    152 
    153    private:
    154     int32_t hash_;
    155     uint32_t utf16_length_;
    156     const char* utf8_data_;
    157   };
    158 
    159   class StringHashEquals {
    160    public:
    161     std::size_t operator()(const GcRoot<mirror::String>& root) const NO_THREAD_SAFETY_ANALYSIS;
    162     bool operator()(const GcRoot<mirror::String>& a, const GcRoot<mirror::String>& b) const
    163         NO_THREAD_SAFETY_ANALYSIS;
    164 
    165     // Utf8String can be used for lookup.
    166     std::size_t operator()(const Utf8String& key) const {
    167       // A cast to prevent undesired sign extension.
    168       return static_cast<uint32_t>(key.GetHash());
    169     }
    170 
    171     bool operator()(const GcRoot<mirror::String>& a, const Utf8String& b) const
    172         NO_THREAD_SAFETY_ANALYSIS;
    173   };
    174   class GcRootEmptyFn {
    175    public:
    176     void MakeEmpty(GcRoot<mirror::String>& item) const {
    177       item = GcRoot<mirror::String>();
    178     }
    179     bool IsEmpty(const GcRoot<mirror::String>& item) const {
    180       return item.IsNull();
    181     }
    182   };
    183 
    184   // Table which holds pre zygote and post zygote interned strings. There is one instance for
    185   // weak interns and strong interns.
    186   class Table {
    187    public:
    188     Table();
    189     ObjPtr<mirror::String> Find(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
    190         REQUIRES(Locks::intern_table_lock_);
    191     ObjPtr<mirror::String> Find(const Utf8String& string) REQUIRES_SHARED(Locks::mutator_lock_)
    192         REQUIRES(Locks::intern_table_lock_);
    193     void Insert(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
    194         REQUIRES(Locks::intern_table_lock_);
    195     void Remove(ObjPtr<mirror::String> s)
    196         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    197     void VisitRoots(RootVisitor* visitor)
    198         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    199     void SweepWeaks(IsMarkedVisitor* visitor)
    200         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    201     // Add a new intern table that will only be inserted into from now on.
    202     void AddNewTable() REQUIRES(Locks::intern_table_lock_);
    203     size_t Size() const REQUIRES(Locks::intern_table_lock_);
    204     // Read and add an intern table from ptr.
    205     // Tables read are inserted at the front of the table array. Only checks for conflicts in
    206     // debug builds. Returns how many bytes were read.
    207     size_t AddTableFromMemory(const uint8_t* ptr)
    208         REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    209     // Write the intern tables to ptr, if there are multiple tables they are combined into a single
    210     // one. Returns how many bytes were written.
    211     size_t WriteToMemory(uint8_t* ptr)
    212         REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    213 
    214    private:
    215     typedef HashSet<GcRoot<mirror::String>, GcRootEmptyFn, StringHashEquals, StringHashEquals,
    216         TrackingAllocator<GcRoot<mirror::String>, kAllocatorTagInternTable>> UnorderedSet;
    217 
    218     void SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor)
    219         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    220 
    221     // We call AddNewTable when we create the zygote to reduce private dirty pages caused by
    222     // modifying the zygote intern table. The back of table is modified when strings are interned.
    223     std::vector<UnorderedSet> tables_;
    224 
    225     ART_FRIEND_TEST(InternTableTest, CrossHash);
    226   };
    227 
    228   // Insert if non null, otherwise return null. Must be called holding the mutator lock.
    229   // If holding_locks is true, then we may also hold other locks. If holding_locks is true, then we
    230   // require GC is not running since it is not safe to wait while holding locks.
    231   ObjPtr<mirror::String> Insert(ObjPtr<mirror::String> s, bool is_strong, bool holding_locks)
    232       REQUIRES(!Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    233 
    234   ObjPtr<mirror::String> LookupStrongLocked(ObjPtr<mirror::String> s)
    235       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    236   ObjPtr<mirror::String> LookupWeakLocked(ObjPtr<mirror::String> s)
    237       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    238   ObjPtr<mirror::String> InsertStrong(ObjPtr<mirror::String> s)
    239       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    240   ObjPtr<mirror::String> InsertWeak(ObjPtr<mirror::String> s)
    241       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    242   void RemoveStrong(ObjPtr<mirror::String> s)
    243       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    244   void RemoveWeak(ObjPtr<mirror::String> s)
    245       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    246 
    247   // Transaction rollback access.
    248   ObjPtr<mirror::String> InsertStrongFromTransaction(ObjPtr<mirror::String> s)
    249       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    250   ObjPtr<mirror::String> InsertWeakFromTransaction(ObjPtr<mirror::String> s)
    251       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    252   void RemoveStrongFromTransaction(ObjPtr<mirror::String> s)
    253       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    254   void RemoveWeakFromTransaction(ObjPtr<mirror::String> s)
    255       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    256 
    257   size_t AddTableFromMemoryLocked(const uint8_t* ptr)
    258       REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    259 
    260   // Change the weak root state. May broadcast to waiters.
    261   void ChangeWeakRootStateLocked(gc::WeakRootState new_state)
    262       REQUIRES(Locks::intern_table_lock_);
    263 
    264   // Wait until we can read weak roots.
    265   void WaitUntilAccessible(Thread* self)
    266       REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    267 
    268   bool log_new_roots_ GUARDED_BY(Locks::intern_table_lock_);
    269   ConditionVariable weak_intern_condition_ GUARDED_BY(Locks::intern_table_lock_);
    270   // Since this contains (strong) roots, they need a read barrier to
    271   // enable concurrent intern table (strong) root scan. Do not
    272   // directly access the strings in it. Use functions that contain
    273   // read barriers.
    274   Table strong_interns_ GUARDED_BY(Locks::intern_table_lock_);
    275   std::vector<GcRoot<mirror::String>> new_strong_intern_roots_
    276       GUARDED_BY(Locks::intern_table_lock_);
    277   // Since this contains (weak) roots, they need a read barrier. Do
    278   // not directly access the strings in it. Use functions that contain
    279   // read barriers.
    280   Table weak_interns_ GUARDED_BY(Locks::intern_table_lock_);
    281   // Weak root state, used for concurrent system weak processing and more.
    282   gc::WeakRootState weak_root_state_ GUARDED_BY(Locks::intern_table_lock_);
    283 
    284   friend class Transaction;
    285   ART_FRIEND_TEST(InternTableTest, CrossHash);
    286   DISALLOW_COPY_AND_ASSIGN(InternTable);
    287 };
    288 
    289 }  // namespace art
    290 
    291 #endif  // ART_RUNTIME_INTERN_TABLE_H_
    292