Home | History | Annotate | Download | only in blink_gc_plugin
      1 // Copyright 2014 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "Config.h"
      6 #include "RecordInfo.h"
      7 
      8 using namespace clang;
      9 using std::string;
     10 
     11 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
     12     : cache_(cache),
     13       record_(record),
     14       name_(record->getName()),
     15       fields_need_tracing_(TracingStatus::Unknown()),
     16       bases_(0),
     17       fields_(0),
     18       is_stack_allocated_(kNotComputed),
     19       is_non_newable_(kNotComputed),
     20       is_only_placement_newable_(kNotComputed),
     21       does_need_finalization_(kNotComputed),
     22       determined_trace_methods_(false),
     23       trace_method_(0),
     24       trace_dispatch_method_(0),
     25       finalize_dispatch_method_(0),
     26       is_gc_derived_(false),
     27       base_paths_(0) {}
     28 
     29 RecordInfo::~RecordInfo() {
     30   delete fields_;
     31   delete bases_;
     32   delete base_paths_;
     33 }
     34 
     35 // Get |count| number of template arguments. Returns false if there
     36 // are fewer than |count| arguments or any of the arguments are not
     37 // of a valid Type structure. If |count| is non-positive, all
     38 // arguments are collected.
     39 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
     40   ClassTemplateSpecializationDecl* tmpl =
     41       dyn_cast<ClassTemplateSpecializationDecl>(record_);
     42   if (!tmpl)
     43     return false;
     44   const TemplateArgumentList& args = tmpl->getTemplateArgs();
     45   if (args.size() < count)
     46     return false;
     47   if (count <= 0)
     48     count = args.size();
     49   for (unsigned i = 0; i < count; ++i) {
     50     TemplateArgument arg = args[i];
     51     if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
     52       output_args->push_back(arg.getAsType().getTypePtr());
     53     } else {
     54       return false;
     55     }
     56   }
     57   return true;
     58 }
     59 
     60 // Test if a record is a HeapAllocated collection.
     61 bool RecordInfo::IsHeapAllocatedCollection() {
     62   if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
     63     return false;
     64 
     65   TemplateArgs args;
     66   if (GetTemplateArgs(0, &args)) {
     67     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
     68       if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
     69         if (decl->getName() == kHeapAllocatorName)
     70           return true;
     71     }
     72   }
     73 
     74   return Config::IsGCCollection(name_);
     75 }
     76 
     77 static bool IsGCBaseCallback(const CXXBaseSpecifier* specifier,
     78                              CXXBasePath& path,
     79                              void* data) {
     80   if (CXXRecordDecl* record = specifier->getType()->getAsCXXRecordDecl())
     81     return Config::IsGCBase(record->getName());
     82   return false;
     83 }
     84 
     85 // Test if a record is derived from a garbage collected base.
     86 bool RecordInfo::IsGCDerived() {
     87   // If already computed, return the known result.
     88   if (base_paths_)
     89     return is_gc_derived_;
     90 
     91   base_paths_ = new CXXBasePaths(true, true, false);
     92 
     93   if (!record_->hasDefinition())
     94     return false;
     95 
     96   // The base classes are not themselves considered garbage collected objects.
     97   if (Config::IsGCBase(name_))
     98     return false;
     99 
    100   // Walk the inheritance tree to find GC base classes.
    101   is_gc_derived_ = record_->lookupInBases(IsGCBaseCallback, 0, *base_paths_);
    102   return is_gc_derived_;
    103 }
    104 
    105 bool RecordInfo::IsGCFinalized() {
    106   if (!IsGCDerived())
    107     return false;
    108   for (CXXBasePaths::paths_iterator it = base_paths_->begin();
    109        it != base_paths_->end();
    110        ++it) {
    111     const CXXBasePathElement& elem = (*it)[it->size() - 1];
    112     CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
    113     if (Config::IsGCFinalizedBase(base->getName()))
    114       return true;
    115   }
    116   return false;
    117 }
    118 
    119 // A GC mixin is a class that inherits from a GC mixin base and has
    120 // not yet been "mixed in" with another GC base class.
    121 bool RecordInfo::IsGCMixin() {
    122   if (!IsGCDerived() || base_paths_->begin() == base_paths_->end())
    123     return false;
    124   for (CXXBasePaths::paths_iterator it = base_paths_->begin();
    125        it != base_paths_->end();
    126        ++it) {
    127       // Get the last element of the path.
    128       const CXXBasePathElement& elem = (*it)[it->size() - 1];
    129       CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
    130       // If it is not a mixin base we are done.
    131       if (!Config::IsGCMixinBase(base->getName()))
    132           return false;
    133   }
    134   // This is a mixin if all GC bases are mixins.
    135   return true;
    136 }
    137 
    138 // Test if a record is allocated on the managed heap.
    139 bool RecordInfo::IsGCAllocated() {
    140   return IsGCDerived() || IsHeapAllocatedCollection();
    141 }
    142 
    143 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
    144   // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
    145   if (!record || Config::IsIgnoreAnnotated(record))
    146     return 0;
    147   Cache::iterator it = cache_.find(record);
    148   if (it != cache_.end())
    149     return &it->second;
    150   return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
    151               .first->second;
    152 }
    153 
    154 bool RecordInfo::IsStackAllocated() {
    155   if (is_stack_allocated_ == kNotComputed) {
    156     is_stack_allocated_ = kFalse;
    157     for (Bases::iterator it = GetBases().begin();
    158          it != GetBases().end();
    159          ++it) {
    160       if (it->second.info()->IsStackAllocated()) {
    161         is_stack_allocated_ = kTrue;
    162         return is_stack_allocated_;
    163       }
    164     }
    165     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    166          it != record_->method_end();
    167          ++it) {
    168       if (it->getNameAsString() == kNewOperatorName &&
    169           it->isDeleted() &&
    170           Config::IsStackAnnotated(*it)) {
    171         is_stack_allocated_ = kTrue;
    172         return is_stack_allocated_;
    173       }
    174     }
    175   }
    176   return is_stack_allocated_;
    177 }
    178 
    179 bool RecordInfo::IsNonNewable() {
    180   if (is_non_newable_ == kNotComputed) {
    181     bool deleted = false;
    182     bool all_deleted = true;
    183     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    184          it != record_->method_end();
    185          ++it) {
    186       if (it->getNameAsString() == kNewOperatorName) {
    187         deleted = it->isDeleted();
    188         all_deleted = all_deleted && deleted;
    189       }
    190     }
    191     is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
    192   }
    193   return is_non_newable_;
    194 }
    195 
    196 bool RecordInfo::IsOnlyPlacementNewable() {
    197   if (is_only_placement_newable_ == kNotComputed) {
    198     bool placement = false;
    199     bool new_deleted = false;
    200     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    201          it != record_->method_end();
    202          ++it) {
    203       if (it->getNameAsString() == kNewOperatorName) {
    204         if (it->getNumParams() == 1) {
    205           new_deleted = it->isDeleted();
    206         } else if (it->getNumParams() == 2) {
    207           placement = !it->isDeleted();
    208         }
    209       }
    210     }
    211     is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
    212   }
    213   return is_only_placement_newable_;
    214 }
    215 
    216 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
    217   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    218        it != record_->method_end();
    219        ++it) {
    220     if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
    221       return *it;
    222   }
    223   return 0;
    224 }
    225 
    226 // An object requires a tracing method if it has any fields that need tracing
    227 // or if it inherits from multiple bases that need tracing.
    228 bool RecordInfo::RequiresTraceMethod() {
    229   if (IsStackAllocated())
    230     return false;
    231   unsigned bases_with_trace = 0;
    232   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    233     if (it->second.NeedsTracing().IsNeeded())
    234       ++bases_with_trace;
    235   }
    236   if (bases_with_trace > 1)
    237     return true;
    238   GetFields();
    239   return fields_need_tracing_.IsNeeded();
    240 }
    241 
    242 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
    243 // dispatch method).
    244 CXXMethodDecl* RecordInfo::GetTraceMethod() {
    245   DetermineTracingMethods();
    246   return trace_method_;
    247 }
    248 
    249 // Get the static trace dispatch method.
    250 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
    251   DetermineTracingMethods();
    252   return trace_dispatch_method_;
    253 }
    254 
    255 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
    256   DetermineTracingMethods();
    257   return finalize_dispatch_method_;
    258 }
    259 
    260 RecordInfo::Bases& RecordInfo::GetBases() {
    261   if (!bases_)
    262     bases_ = CollectBases();
    263   return *bases_;
    264 }
    265 
    266 bool RecordInfo::InheritsTrace() {
    267   if (GetTraceMethod())
    268     return true;
    269   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    270     if (it->second.info()->InheritsTrace())
    271       return true;
    272   }
    273   return false;
    274 }
    275 
    276 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
    277   if (CXXMethodDecl* trace = GetTraceMethod())
    278     return trace->isVirtual() ? 0 : trace;
    279   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    280     if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
    281       return trace;
    282   }
    283   return 0;
    284 }
    285 
    286 // A (non-virtual) class is considered abstract in Blink if it has
    287 // no public constructors and no create methods.
    288 bool RecordInfo::IsConsideredAbstract() {
    289   for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
    290        it != record_->ctor_end();
    291        ++it) {
    292     if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
    293       return false;
    294   }
    295   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    296        it != record_->method_end();
    297        ++it) {
    298     if (it->getNameAsString() == kCreateName)
    299       return false;
    300   }
    301   return true;
    302 }
    303 
    304 RecordInfo::Bases* RecordInfo::CollectBases() {
    305   // Compute the collection locally to avoid inconsistent states.
    306   Bases* bases = new Bases;
    307   if (!record_->hasDefinition())
    308     return bases;
    309   for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
    310        it != record_->bases_end();
    311        ++it) {
    312     const CXXBaseSpecifier& spec = *it;
    313     RecordInfo* info = cache_->Lookup(spec.getType());
    314     if (!info)
    315       continue;
    316     CXXRecordDecl* base = info->record();
    317     TracingStatus status = info->InheritsTrace()
    318                                ? TracingStatus::Needed()
    319                                : TracingStatus::Unneeded();
    320     bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
    321   }
    322   return bases;
    323 }
    324 
    325 RecordInfo::Fields& RecordInfo::GetFields() {
    326   if (!fields_)
    327     fields_ = CollectFields();
    328   return *fields_;
    329 }
    330 
    331 RecordInfo::Fields* RecordInfo::CollectFields() {
    332   // Compute the collection locally to avoid inconsistent states.
    333   Fields* fields = new Fields;
    334   if (!record_->hasDefinition())
    335     return fields;
    336   TracingStatus fields_status = TracingStatus::Unneeded();
    337   for (RecordDecl::field_iterator it = record_->field_begin();
    338        it != record_->field_end();
    339        ++it) {
    340     FieldDecl* field = *it;
    341     // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
    342     if (Config::IsIgnoreAnnotated(field))
    343       continue;
    344     if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
    345       fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
    346       fields->insert(std::make_pair(field, FieldPoint(field, edge)));
    347     }
    348   }
    349   fields_need_tracing_ = fields_status;
    350   return fields;
    351 }
    352 
    353 void RecordInfo::DetermineTracingMethods() {
    354   if (determined_trace_methods_)
    355     return;
    356   determined_trace_methods_ = true;
    357   if (Config::IsGCBase(name_))
    358     return;
    359   CXXMethodDecl* trace = 0;
    360   CXXMethodDecl* traceAfterDispatch = 0;
    361   bool isTraceAfterDispatch;
    362   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    363        it != record_->method_end();
    364        ++it) {
    365     if (Config::IsTraceMethod(*it, &isTraceAfterDispatch)) {
    366       if (isTraceAfterDispatch) {
    367         traceAfterDispatch = *it;
    368       } else {
    369         trace = *it;
    370       }
    371     } else if (it->getNameAsString() == kFinalizeName) {
    372       finalize_dispatch_method_ = *it;
    373     }
    374   }
    375   if (traceAfterDispatch) {
    376     trace_method_ = traceAfterDispatch;
    377     trace_dispatch_method_ = trace;
    378   } else {
    379     // TODO: Can we never have a dispatch method called trace without the same
    380     // class defining a traceAfterDispatch method?
    381     trace_method_ = trace;
    382     trace_dispatch_method_ = 0;
    383   }
    384   if (trace_dispatch_method_ && finalize_dispatch_method_)
    385     return;
    386   // If this class does not define dispatching methods inherit them.
    387   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    388     // TODO: Does it make sense to inherit multiple dispatch methods?
    389     if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
    390       assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
    391       trace_dispatch_method_ = dispatch;
    392     }
    393     if (CXXMethodDecl* dispatch =
    394             it->second.info()->GetFinalizeDispatchMethod()) {
    395       assert(!finalize_dispatch_method_ &&
    396              "Multiple finalize dispatching methods");
    397       finalize_dispatch_method_ = dispatch;
    398     }
    399   }
    400 }
    401 
    402 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
    403 bool RecordInfo::NeedsFinalization() {
    404   if (does_need_finalization_ == kNotComputed) {
    405     // Rely on hasNonTrivialDestructor(), but if the only
    406     // identifiable reason for it being true is the presence
    407     // of a safely ignorable class as a direct base,
    408     // or we're processing such an 'ignorable' class, then it does
    409     // not need finalization.
    410     does_need_finalization_ =
    411         record_->hasNonTrivialDestructor() ? kTrue : kFalse;
    412     if (!does_need_finalization_)
    413       return does_need_finalization_;
    414 
    415     // Processing a class with a safely-ignorable destructor.
    416     NamespaceDecl* ns =
    417         dyn_cast<NamespaceDecl>(record_->getDeclContext());
    418     if (ns && Config::HasIgnorableDestructor(ns->getName(), name_)) {
    419       does_need_finalization_ = kFalse;
    420       return does_need_finalization_;
    421     }
    422 
    423     CXXDestructorDecl* dtor = record_->getDestructor();
    424     if (dtor && dtor->isUserProvided())
    425       return does_need_finalization_;
    426     for (Fields::iterator it = GetFields().begin();
    427          it != GetFields().end();
    428          ++it) {
    429       if (it->second.edge()->NeedsFinalization())
    430         return does_need_finalization_;
    431     }
    432 
    433     for (Bases::iterator it = GetBases().begin();
    434          it != GetBases().end();
    435          ++it) {
    436       if (it->second.info()->NeedsFinalization())
    437         return does_need_finalization_;
    438     }
    439     // Destructor was non-trivial due to bases with destructors that
    440     // can be safely ignored. Hence, no need for finalization.
    441     does_need_finalization_ = kFalse;
    442   }
    443   return does_need_finalization_;
    444 }
    445 
    446 // A class needs tracing if:
    447 // - it is allocated on the managed heap,
    448 // - it is derived from a class that needs tracing, or
    449 // - it contains fields that need tracing.
    450 // TODO: Defining NeedsTracing based on whether a class defines a trace method
    451 // (of the proper signature) over approximates too much. The use of transition
    452 // types causes some classes to have trace methods without them needing to be
    453 // traced.
    454 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
    455   if (IsGCAllocated())
    456     return TracingStatus::Needed();
    457 
    458   if (IsStackAllocated())
    459     return TracingStatus::Unneeded();
    460 
    461   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    462     if (it->second.info()->NeedsTracing(option).IsNeeded())
    463       return TracingStatus::Needed();
    464   }
    465 
    466   if (option == Edge::kRecursive)
    467     GetFields();
    468 
    469   return fields_need_tracing_;
    470 }
    471 
    472 Edge* RecordInfo::CreateEdge(const Type* type) {
    473   if (!type) {
    474     return 0;
    475   }
    476 
    477   if (type->isPointerType()) {
    478     if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
    479       return new RawPtr(ptr, false);
    480     return 0;
    481   }
    482 
    483   RecordInfo* info = cache_->Lookup(type);
    484 
    485   // If the type is neither a pointer or a C++ record we ignore it.
    486   if (!info) {
    487     return 0;
    488   }
    489 
    490   TemplateArgs args;
    491 
    492   if (Config::IsRawPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    493     if (Edge* ptr = CreateEdge(args[0]))
    494       return new RawPtr(ptr, true);
    495     return 0;
    496   }
    497 
    498   if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    499     if (Edge* ptr = CreateEdge(args[0]))
    500       return new RefPtr(ptr);
    501     return 0;
    502   }
    503 
    504   if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    505     if (Edge* ptr = CreateEdge(args[0]))
    506       return new OwnPtr(ptr);
    507     return 0;
    508   }
    509 
    510   if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    511     if (Edge* ptr = CreateEdge(args[0]))
    512       return new Member(ptr);
    513     return 0;
    514   }
    515 
    516   if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    517     if (Edge* ptr = CreateEdge(args[0]))
    518       return new WeakMember(ptr);
    519     return 0;
    520   }
    521 
    522   if (Config::IsPersistent(info->name())) {
    523     // Persistent might refer to v8::Persistent, so check the name space.
    524     // TODO: Consider using a more canonical identification than names.
    525     NamespaceDecl* ns =
    526         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    527     if (!ns || ns->getName() != "blink")
    528       return 0;
    529     if (!info->GetTemplateArgs(1, &args))
    530       return 0;
    531     if (Edge* ptr = CreateEdge(args[0]))
    532       return new Persistent(ptr);
    533     return 0;
    534   }
    535 
    536   if (Config::IsGCCollection(info->name()) ||
    537       Config::IsWTFCollection(info->name())) {
    538     bool is_root = Config::IsPersistentGCCollection(info->name());
    539     bool on_heap = is_root || info->IsHeapAllocatedCollection();
    540     size_t count = Config::CollectionDimension(info->name());
    541     if (!info->GetTemplateArgs(count, &args))
    542       return 0;
    543     Collection* edge = new Collection(info, on_heap, is_root);
    544     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
    545       if (Edge* member = CreateEdge(*it)) {
    546         edge->members().push_back(member);
    547       }
    548       // TODO: Handle the case where we fail to create an edge (eg, if the
    549       // argument is a primitive type or just not fully known yet).
    550     }
    551     return edge;
    552   }
    553 
    554   return new Value(info);
    555 }
    556