Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_INTERN_TABLE_H_
     18 #define ART_RUNTIME_INTERN_TABLE_H_
     19 
     20 #include <unordered_set>
     21 
     22 #include "atomic.h"
     23 #include "base/allocator.h"
     24 #include "base/hash_set.h"
     25 #include "base/mutex.h"
     26 #include "gc_root.h"
     27 #include "gc/weak_root_state.h"
     28 
     29 namespace art {
     30 
     31 class IsMarkedVisitor;
     32 
     33 namespace gc {
     34 namespace space {
     35 class ImageSpace;
     36 }  // namespace space
     37 }  // namespace gc
     38 
     39 enum VisitRootFlags : uint8_t;
     40 
     41 namespace mirror {
     42 class String;
     43 }  // namespace mirror
     44 class Transaction;
     45 
     46 /**
     47  * Used to intern strings.
     48  *
     49  * There are actually two tables: one that holds strong references to its strings, and one that
     50  * holds weak references. The former is used for string literals, for which there is an effective
     51  * reference from the constant pool. The latter is used for strings interned at runtime via
     52  * String.intern. Some code (XML parsers being a prime example) relies on being able to intern
     53  * arbitrarily many strings for the duration of a parse without permanently increasing the memory
     54  * footprint.
     55  */
     56 class InternTable {
     57  public:
     58   InternTable();
     59 
     60   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     61   ObjPtr<mirror::String> InternStrong(int32_t utf16_length, const char* utf8_data)
     62       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
     63 
     64   // Only used by image writer. Special version that may not cause thread suspension since the GC
     65   // cannot be running while we are doing image writing. Maybe be called while while holding a
     66   // lock since there will not be thread suspension.
     67   ObjPtr<mirror::String> InternStrongImageString(ObjPtr<mirror::String> s)
     68       REQUIRES_SHARED(Locks::mutator_lock_);
     69 
     70   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     71   ObjPtr<mirror::String> InternStrong(const char* utf8_data) REQUIRES_SHARED(Locks::mutator_lock_)
     72       REQUIRES(!Roles::uninterruptible_);
     73 
     74   // Interns a potentially new string in the 'strong' table. May cause thread suspension.
     75   ObjPtr<mirror::String> InternStrong(ObjPtr<mirror::String> s)
     76       REQUIRES_SHARED(Locks::mutator_lock_)
     77       REQUIRES(!Roles::uninterruptible_);
     78 
     79   // Interns a potentially new string in the 'weak' table. May cause thread suspension.
     80   ObjPtr<mirror::String> InternWeak(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
     81       REQUIRES(!Roles::uninterruptible_);
     82 
     83   void SweepInternTableWeaks(IsMarkedVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_)
     84       REQUIRES(!Locks::intern_table_lock_);
     85 
     86   bool ContainsWeak(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
     87       REQUIRES(!Locks::intern_table_lock_);
     88 
     89   // Lookup a strong intern, returns null if not found.
     90   ObjPtr<mirror::String> LookupStrong(Thread* self, ObjPtr<mirror::String> s)
     91       REQUIRES(!Locks::intern_table_lock_)
     92       REQUIRES_SHARED(Locks::mutator_lock_);
     93   ObjPtr<mirror::String> LookupStrong(Thread* self, uint32_t utf16_length, const char* utf8_data)
     94       REQUIRES(!Locks::intern_table_lock_)
     95       REQUIRES_SHARED(Locks::mutator_lock_);
     96 
     97   // Lookup a weak intern, returns null if not found.
     98   ObjPtr<mirror::String> LookupWeak(Thread* self, ObjPtr<mirror::String> s)
     99       REQUIRES(!Locks::intern_table_lock_)
    100       REQUIRES_SHARED(Locks::mutator_lock_);
    101 
    102   // Total number of interned strings.
    103   size_t Size() const REQUIRES(!Locks::intern_table_lock_);
    104 
    105   // Total number of weakly live interned strings.
    106   size_t StrongSize() const REQUIRES(!Locks::intern_table_lock_);
    107 
    108   // Total number of strongly live interned strings.
    109   size_t WeakSize() const REQUIRES(!Locks::intern_table_lock_);
    110 
    111   void VisitRoots(RootVisitor* visitor, VisitRootFlags flags)
    112       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    113 
    114   void DumpForSigQuit(std::ostream& os) const REQUIRES(!Locks::intern_table_lock_);
    115 
    116   void BroadcastForNewInterns();
    117 
    118   // Adds all of the resolved image strings from the image spaces into the intern table. The
    119   // advantage of doing this is preventing expensive DexFile::FindStringId calls. Sets
    120   // images_added_to_intern_table_ to true.
    121   void AddImagesStringsToTable(const std::vector<gc::space::ImageSpace*>& image_spaces)
    122       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    123 
    124   // Add a new intern table for inserting to, previous intern tables are still there but no
    125   // longer inserted into and ideally unmodified. This is done to prevent dirty pages.
    126   void AddNewTable()
    127       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::intern_table_lock_);
    128 
    129   // Read the intern table from memory. The elements aren't copied, the intern hash set data will
    130   // point to somewhere within ptr. Only reads the strong interns.
    131   size_t AddTableFromMemory(const uint8_t* ptr) REQUIRES(!Locks::intern_table_lock_)
    132       REQUIRES_SHARED(Locks::mutator_lock_);
    133 
    134   // Write the post zygote intern table to a pointer. Only writes the strong interns since it is
    135   // expected that there is no weak interns since this is called from the image writer.
    136   size_t WriteToMemory(uint8_t* ptr) REQUIRES_SHARED(Locks::mutator_lock_)
    137       REQUIRES(!Locks::intern_table_lock_);
    138 
    139   // Change the weak root state. May broadcast to waiters.
    140   void ChangeWeakRootState(gc::WeakRootState new_state)
    141       REQUIRES(!Locks::intern_table_lock_);
    142 
    143  private:
    144   // Modified UTF-8-encoded string treated as UTF16.
    145   class Utf8String {
    146    public:
    147     Utf8String(uint32_t utf16_length, const char* utf8_data, int32_t hash)
    148         : hash_(hash), utf16_length_(utf16_length), utf8_data_(utf8_data) { }
    149 
    150     int32_t GetHash() const { return hash_; }
    151     uint32_t GetUtf16Length() const { return utf16_length_; }
    152     const char* GetUtf8Data() const { return utf8_data_; }
    153 
    154    private:
    155     int32_t hash_;
    156     uint32_t utf16_length_;
    157     const char* utf8_data_;
    158   };
    159 
    160   class StringHashEquals {
    161    public:
    162     std::size_t operator()(const GcRoot<mirror::String>& root) const NO_THREAD_SAFETY_ANALYSIS;
    163     bool operator()(const GcRoot<mirror::String>& a, const GcRoot<mirror::String>& b) const
    164         NO_THREAD_SAFETY_ANALYSIS;
    165 
    166     // Utf8String can be used for lookup.
    167     std::size_t operator()(const Utf8String& key) const {
    168       // A cast to prevent undesired sign extension.
    169       return static_cast<uint32_t>(key.GetHash());
    170     }
    171 
    172     bool operator()(const GcRoot<mirror::String>& a, const Utf8String& b) const
    173         NO_THREAD_SAFETY_ANALYSIS;
    174   };
    175   class GcRootEmptyFn {
    176    public:
    177     void MakeEmpty(GcRoot<mirror::String>& item) const {
    178       item = GcRoot<mirror::String>();
    179     }
    180     bool IsEmpty(const GcRoot<mirror::String>& item) const {
    181       return item.IsNull();
    182     }
    183   };
    184 
    185   // Table which holds pre zygote and post zygote interned strings. There is one instance for
    186   // weak interns and strong interns.
    187   class Table {
    188    public:
    189     Table();
    190     ObjPtr<mirror::String> Find(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
    191         REQUIRES(Locks::intern_table_lock_);
    192     ObjPtr<mirror::String> Find(const Utf8String& string) REQUIRES_SHARED(Locks::mutator_lock_)
    193         REQUIRES(Locks::intern_table_lock_);
    194     void Insert(ObjPtr<mirror::String> s) REQUIRES_SHARED(Locks::mutator_lock_)
    195         REQUIRES(Locks::intern_table_lock_);
    196     void Remove(ObjPtr<mirror::String> s)
    197         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    198     void VisitRoots(RootVisitor* visitor)
    199         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    200     void SweepWeaks(IsMarkedVisitor* visitor)
    201         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    202     // Add a new intern table that will only be inserted into from now on.
    203     void AddNewTable() REQUIRES(Locks::intern_table_lock_);
    204     size_t Size() const REQUIRES(Locks::intern_table_lock_);
    205     // Read and add an intern table from ptr.
    206     // Tables read are inserted at the front of the table array. Only checks for conflicts in
    207     // debug builds. Returns how many bytes were read.
    208     size_t AddTableFromMemory(const uint8_t* ptr)
    209         REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    210     // Write the intern tables to ptr, if there are multiple tables they are combined into a single
    211     // one. Returns how many bytes were written.
    212     size_t WriteToMemory(uint8_t* ptr)
    213         REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    214 
    215    private:
    216     typedef HashSet<GcRoot<mirror::String>, GcRootEmptyFn, StringHashEquals, StringHashEquals,
    217         TrackingAllocator<GcRoot<mirror::String>, kAllocatorTagInternTable>> UnorderedSet;
    218 
    219     void SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor)
    220         REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    221 
    222     // We call AddNewTable when we create the zygote to reduce private dirty pages caused by
    223     // modifying the zygote intern table. The back of table is modified when strings are interned.
    224     std::vector<UnorderedSet> tables_;
    225 
    226     ART_FRIEND_TEST(InternTableTest, CrossHash);
    227   };
    228 
    229   // Insert if non null, otherwise return null. Must be called holding the mutator lock.
    230   // If holding_locks is true, then we may also hold other locks. If holding_locks is true, then we
    231   // require GC is not running since it is not safe to wait while holding locks.
    232   ObjPtr<mirror::String> Insert(ObjPtr<mirror::String> s, bool is_strong, bool holding_locks)
    233       REQUIRES(!Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    234 
    235   ObjPtr<mirror::String> LookupStrongLocked(ObjPtr<mirror::String> s)
    236       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    237   ObjPtr<mirror::String> LookupWeakLocked(ObjPtr<mirror::String> s)
    238       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    239   ObjPtr<mirror::String> InsertStrong(ObjPtr<mirror::String> s)
    240       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    241   ObjPtr<mirror::String> InsertWeak(ObjPtr<mirror::String> s)
    242       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    243   void RemoveStrong(ObjPtr<mirror::String> s)
    244       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    245   void RemoveWeak(ObjPtr<mirror::String> s)
    246       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    247 
    248   // Transaction rollback access.
    249   ObjPtr<mirror::String> InsertStrongFromTransaction(ObjPtr<mirror::String> s)
    250       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    251   ObjPtr<mirror::String> InsertWeakFromTransaction(ObjPtr<mirror::String> s)
    252       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    253   void RemoveStrongFromTransaction(ObjPtr<mirror::String> s)
    254       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    255   void RemoveWeakFromTransaction(ObjPtr<mirror::String> s)
    256       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::intern_table_lock_);
    257 
    258   size_t AddTableFromMemoryLocked(const uint8_t* ptr)
    259       REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    260 
    261   // Change the weak root state. May broadcast to waiters.
    262   void ChangeWeakRootStateLocked(gc::WeakRootState new_state)
    263       REQUIRES(Locks::intern_table_lock_);
    264 
    265   // Wait until we can read weak roots.
    266   void WaitUntilAccessible(Thread* self)
    267       REQUIRES(Locks::intern_table_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
    268 
    269   bool log_new_roots_ GUARDED_BY(Locks::intern_table_lock_);
    270   ConditionVariable weak_intern_condition_ GUARDED_BY(Locks::intern_table_lock_);
    271   // Since this contains (strong) roots, they need a read barrier to
    272   // enable concurrent intern table (strong) root scan. Do not
    273   // directly access the strings in it. Use functions that contain
    274   // read barriers.
    275   Table strong_interns_ GUARDED_BY(Locks::intern_table_lock_);
    276   std::vector<GcRoot<mirror::String>> new_strong_intern_roots_
    277       GUARDED_BY(Locks::intern_table_lock_);
    278   // Since this contains (weak) roots, they need a read barrier. Do
    279   // not directly access the strings in it. Use functions that contain
    280   // read barriers.
    281   Table weak_interns_ GUARDED_BY(Locks::intern_table_lock_);
    282   // Weak root state, used for concurrent system weak processing and more.
    283   gc::WeakRootState weak_root_state_ GUARDED_BY(Locks::intern_table_lock_);
    284 
    285   friend class Transaction;
    286   ART_FRIEND_TEST(InternTableTest, CrossHash);
    287   DISALLOW_COPY_AND_ASSIGN(InternTable);
    288 };
    289 
    290 }  // namespace art
    291 
    292 #endif  // ART_RUNTIME_INTERN_TABLE_H_
    293