Home | History | Annotate | Download | only in blink_gc_plugin
      1 // Copyright 2014 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "Config.h"
      6 #include "RecordInfo.h"
      7 #include "clang/Sema/Sema.h"
      8 
      9 using namespace clang;
     10 using std::string;
     11 
     12 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
     13     : cache_(cache),
     14       record_(record),
     15       name_(record->getName()),
     16       fields_need_tracing_(TracingStatus::Unknown()),
     17       bases_(0),
     18       fields_(0),
     19       is_stack_allocated_(kNotComputed),
     20       is_non_newable_(kNotComputed),
     21       is_only_placement_newable_(kNotComputed),
     22       does_need_finalization_(kNotComputed),
     23       has_gc_mixin_methods_(kNotComputed),
     24       is_declaring_local_trace_(kNotComputed),
     25       is_eagerly_finalized_(kNotComputed),
     26       determined_trace_methods_(false),
     27       trace_method_(0),
     28       trace_dispatch_method_(0),
     29       finalize_dispatch_method_(0),
     30       is_gc_derived_(false) {}
     31 
     32 RecordInfo::~RecordInfo() {
     33   delete fields_;
     34   delete bases_;
     35 }
     36 
     37 // Get |count| number of template arguments. Returns false if there
     38 // are fewer than |count| arguments or any of the arguments are not
     39 // of a valid Type structure. If |count| is non-positive, all
     40 // arguments are collected.
     41 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
     42   ClassTemplateSpecializationDecl* tmpl =
     43       dyn_cast<ClassTemplateSpecializationDecl>(record_);
     44   if (!tmpl)
     45     return false;
     46   const TemplateArgumentList& args = tmpl->getTemplateArgs();
     47   if (args.size() < count)
     48     return false;
     49   if (count <= 0)
     50     count = args.size();
     51   for (unsigned i = 0; i < count; ++i) {
     52     TemplateArgument arg = args[i];
     53     if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
     54       output_args->push_back(arg.getAsType().getTypePtr());
     55     } else {
     56       return false;
     57     }
     58   }
     59   return true;
     60 }
     61 
     62 // Test if a record is a HeapAllocated collection.
     63 bool RecordInfo::IsHeapAllocatedCollection() {
     64   if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
     65     return false;
     66 
     67   TemplateArgs args;
     68   if (GetTemplateArgs(0, &args)) {
     69     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
     70       if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
     71         if (decl->getName() == kHeapAllocatorName)
     72           return true;
     73     }
     74   }
     75 
     76   return Config::IsGCCollection(name_);
     77 }
     78 
     79 // Test if a record is derived from a garbage collected base.
     80 bool RecordInfo::IsGCDerived() {
     81   // If already computed, return the known result.
     82   if (gc_base_names_.size())
     83     return is_gc_derived_;
     84 
     85   if (!record_->hasDefinition())
     86     return false;
     87 
     88   // The base classes are not themselves considered garbage collected objects.
     89   if (Config::IsGCBase(name_))
     90     return false;
     91 
     92   // Walk the inheritance tree to find GC base classes.
     93   walkBases();
     94   return is_gc_derived_;
     95 }
     96 
     97 CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
     98   const TemplateSpecializationType* tmpl_type =
     99       type.getAs<TemplateSpecializationType>();
    100   if (!tmpl_type)
    101     return 0;
    102 
    103   TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
    104   if (!tmpl_decl)
    105     return 0;
    106 
    107   return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
    108 }
    109 
    110 void RecordInfo::walkBases() {
    111   // This traversal is akin to CXXRecordDecl::forallBases()'s,
    112   // but without stepping over dependent bases -- these might also
    113   // have a "GC base name", so are to be included and considered.
    114   SmallVector<const CXXRecordDecl*, 8> queue;
    115 
    116   const CXXRecordDecl* base_record = record();
    117   while (true) {
    118     for (const auto& it : base_record->bases()) {
    119       const RecordType* type = it.getType()->getAs<RecordType>();
    120       CXXRecordDecl* base;
    121       if (!type)
    122         base = GetDependentTemplatedDecl(*it.getType());
    123       else {
    124         base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
    125         if (base)
    126           queue.push_back(base);
    127       }
    128       if (!base)
    129         continue;
    130 
    131       const std::string& name = base->getName();
    132       if (Config::IsGCBase(name)) {
    133         gc_base_names_.push_back(name);
    134         is_gc_derived_ = true;
    135       }
    136     }
    137 
    138     if (queue.empty())
    139       break;
    140     base_record = queue.pop_back_val(); // not actually a queue.
    141   }
    142 }
    143 
    144 bool RecordInfo::IsGCFinalized() {
    145   if (!IsGCDerived())
    146     return false;
    147   for (const auto& gc_base : gc_base_names_) {
    148     if (Config::IsGCFinalizedBase(gc_base))
    149       return true;
    150   }
    151   return false;
    152 }
    153 
    154 // A GC mixin is a class that inherits from a GC mixin base and has
    155 // not yet been "mixed in" with another GC base class.
    156 bool RecordInfo::IsGCMixin() {
    157   if (!IsGCDerived() || !gc_base_names_.size())
    158     return false;
    159   for (const auto& gc_base : gc_base_names_) {
    160       // If it is not a mixin base we are done.
    161       if (!Config::IsGCMixinBase(gc_base))
    162           return false;
    163   }
    164   // This is a mixin if all GC bases are mixins.
    165   return true;
    166 }
    167 
    168 // Test if a record is allocated on the managed heap.
    169 bool RecordInfo::IsGCAllocated() {
    170   return IsGCDerived() || IsHeapAllocatedCollection();
    171 }
    172 
    173 bool RecordInfo::IsEagerlyFinalized() {
    174   if (is_eagerly_finalized_ != kNotComputed)
    175     return is_eagerly_finalized_;
    176 
    177   is_eagerly_finalized_ = kFalse;
    178   if (!IsGCFinalized())
    179     return is_eagerly_finalized_;
    180 
    181   for (Decl* decl : record_->decls()) {
    182     if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) {
    183       if (typedef_decl->getNameAsString() != kIsEagerlyFinalizedName)
    184         continue;
    185       is_eagerly_finalized_ = kTrue;
    186       break;
    187     }
    188   }
    189   return is_eagerly_finalized_;
    190 }
    191 
    192 bool RecordInfo::HasDefinition() {
    193   return record_->hasDefinition();
    194 }
    195 
    196 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
    197   // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
    198   if (!record || Config::IsIgnoreAnnotated(record))
    199     return 0;
    200   Cache::iterator it = cache_.find(record);
    201   if (it != cache_.end())
    202     return &it->second;
    203   return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
    204               .first->second;
    205 }
    206 
    207 bool RecordInfo::IsStackAllocated() {
    208   if (is_stack_allocated_ == kNotComputed) {
    209     is_stack_allocated_ = kFalse;
    210     for (Bases::iterator it = GetBases().begin();
    211          it != GetBases().end();
    212          ++it) {
    213       if (it->second.info()->IsStackAllocated()) {
    214         is_stack_allocated_ = kTrue;
    215         return is_stack_allocated_;
    216       }
    217     }
    218     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    219          it != record_->method_end();
    220          ++it) {
    221       if (it->getNameAsString() == kNewOperatorName &&
    222           it->isDeleted() &&
    223           Config::IsStackAnnotated(*it)) {
    224         is_stack_allocated_ = kTrue;
    225         return is_stack_allocated_;
    226       }
    227     }
    228   }
    229   return is_stack_allocated_;
    230 }
    231 
    232 bool RecordInfo::IsNonNewable() {
    233   if (is_non_newable_ == kNotComputed) {
    234     bool deleted = false;
    235     bool all_deleted = true;
    236     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    237          it != record_->method_end();
    238          ++it) {
    239       if (it->getNameAsString() == kNewOperatorName) {
    240         deleted = it->isDeleted();
    241         all_deleted = all_deleted && deleted;
    242       }
    243     }
    244     is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
    245   }
    246   return is_non_newable_;
    247 }
    248 
    249 bool RecordInfo::IsOnlyPlacementNewable() {
    250   if (is_only_placement_newable_ == kNotComputed) {
    251     bool placement = false;
    252     bool new_deleted = false;
    253     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    254          it != record_->method_end();
    255          ++it) {
    256       if (it->getNameAsString() == kNewOperatorName) {
    257         if (it->getNumParams() == 1) {
    258           new_deleted = it->isDeleted();
    259         } else if (it->getNumParams() == 2) {
    260           placement = !it->isDeleted();
    261         }
    262       }
    263     }
    264     is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
    265   }
    266   return is_only_placement_newable_;
    267 }
    268 
    269 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
    270   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    271        it != record_->method_end();
    272        ++it) {
    273     if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
    274       return *it;
    275   }
    276   return 0;
    277 }
    278 
    279 // An object requires a tracing method if it has any fields that need tracing
    280 // or if it inherits from multiple bases that need tracing.
    281 bool RecordInfo::RequiresTraceMethod() {
    282   if (IsStackAllocated())
    283     return false;
    284   unsigned bases_with_trace = 0;
    285   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    286     if (it->second.NeedsTracing().IsNeeded())
    287       ++bases_with_trace;
    288   }
    289   if (bases_with_trace > 1)
    290     return true;
    291   GetFields();
    292   return fields_need_tracing_.IsNeeded();
    293 }
    294 
    295 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
    296 // dispatch method).
    297 CXXMethodDecl* RecordInfo::GetTraceMethod() {
    298   DetermineTracingMethods();
    299   return trace_method_;
    300 }
    301 
    302 // Get the static trace dispatch method.
    303 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
    304   DetermineTracingMethods();
    305   return trace_dispatch_method_;
    306 }
    307 
    308 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
    309   DetermineTracingMethods();
    310   return finalize_dispatch_method_;
    311 }
    312 
    313 RecordInfo::Bases& RecordInfo::GetBases() {
    314   if (!bases_)
    315     bases_ = CollectBases();
    316   return *bases_;
    317 }
    318 
    319 bool RecordInfo::InheritsTrace() {
    320   if (GetTraceMethod())
    321     return true;
    322   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    323     if (it->second.info()->InheritsTrace())
    324       return true;
    325   }
    326   return false;
    327 }
    328 
    329 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
    330   if (CXXMethodDecl* trace = GetTraceMethod())
    331     return trace->isVirtual() ? 0 : trace;
    332   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    333     if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
    334       return trace;
    335   }
    336   return 0;
    337 }
    338 
    339 bool RecordInfo::DeclaresGCMixinMethods() {
    340   DetermineTracingMethods();
    341   return has_gc_mixin_methods_;
    342 }
    343 
    344 bool RecordInfo::DeclaresLocalTraceMethod() {
    345   if (is_declaring_local_trace_ != kNotComputed)
    346     return is_declaring_local_trace_;
    347   DetermineTracingMethods();
    348   is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
    349   if (is_declaring_local_trace_) {
    350     for (auto it = record_->method_begin();
    351          it != record_->method_end(); ++it) {
    352       if (*it == trace_method_) {
    353         is_declaring_local_trace_ = kTrue;
    354         break;
    355       }
    356     }
    357   }
    358   return is_declaring_local_trace_;
    359 }
    360 
    361 // A (non-virtual) class is considered abstract in Blink if it has
    362 // no public constructors and no create methods.
    363 bool RecordInfo::IsConsideredAbstract() {
    364   for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
    365        it != record_->ctor_end();
    366        ++it) {
    367     if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
    368       return false;
    369   }
    370   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    371        it != record_->method_end();
    372        ++it) {
    373     if (it->getNameAsString() == kCreateName)
    374       return false;
    375   }
    376   return true;
    377 }
    378 
    379 RecordInfo::Bases* RecordInfo::CollectBases() {
    380   // Compute the collection locally to avoid inconsistent states.
    381   Bases* bases = new Bases;
    382   if (!record_->hasDefinition())
    383     return bases;
    384   for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
    385        it != record_->bases_end();
    386        ++it) {
    387     const CXXBaseSpecifier& spec = *it;
    388     RecordInfo* info = cache_->Lookup(spec.getType());
    389     if (!info)
    390       continue;
    391     CXXRecordDecl* base = info->record();
    392     TracingStatus status = info->InheritsTrace()
    393                                ? TracingStatus::Needed()
    394                                : TracingStatus::Unneeded();
    395     bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
    396   }
    397   return bases;
    398 }
    399 
    400 RecordInfo::Fields& RecordInfo::GetFields() {
    401   if (!fields_)
    402     fields_ = CollectFields();
    403   return *fields_;
    404 }
    405 
    406 RecordInfo::Fields* RecordInfo::CollectFields() {
    407   // Compute the collection locally to avoid inconsistent states.
    408   Fields* fields = new Fields;
    409   if (!record_->hasDefinition())
    410     return fields;
    411   TracingStatus fields_status = TracingStatus::Unneeded();
    412   for (RecordDecl::field_iterator it = record_->field_begin();
    413        it != record_->field_end();
    414        ++it) {
    415     FieldDecl* field = *it;
    416     // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
    417     if (Config::IsIgnoreAnnotated(field))
    418       continue;
    419     // Check if the unexpanded type should be recorded; needed
    420     // to track iterator aliases only
    421     const Type* unexpandedType = field->getType().getSplitUnqualifiedType().Ty;
    422     Edge* edge = CreateEdgeFromOriginalType(unexpandedType);
    423     if (!edge)
    424       edge = CreateEdge(field->getType().getTypePtrOrNull());
    425     if (edge) {
    426       fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
    427       fields->insert(std::make_pair(field, FieldPoint(field, edge)));
    428     }
    429   }
    430   fields_need_tracing_ = fields_status;
    431   return fields;
    432 }
    433 
    434 void RecordInfo::DetermineTracingMethods() {
    435   if (determined_trace_methods_)
    436     return;
    437   determined_trace_methods_ = true;
    438   if (Config::IsGCBase(name_))
    439     return;
    440   CXXMethodDecl* trace = nullptr;
    441   CXXMethodDecl* trace_impl = nullptr;
    442   CXXMethodDecl* trace_after_dispatch = nullptr;
    443   bool has_adjust_and_mark = false;
    444   bool has_is_heap_object_alive = false;
    445   for (Decl* decl : record_->decls()) {
    446     CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
    447     if (!method) {
    448       if (FunctionTemplateDecl* func_template =
    449           dyn_cast<FunctionTemplateDecl>(decl))
    450         method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
    451     }
    452     if (!method)
    453       continue;
    454 
    455     switch (Config::GetTraceMethodType(method)) {
    456       case Config::TRACE_METHOD:
    457         trace = method;
    458         break;
    459       case Config::TRACE_AFTER_DISPATCH_METHOD:
    460         trace_after_dispatch = method;
    461         break;
    462       case Config::TRACE_IMPL_METHOD:
    463         trace_impl = method;
    464         break;
    465       case Config::TRACE_AFTER_DISPATCH_IMPL_METHOD:
    466         break;
    467       case Config::NOT_TRACE_METHOD:
    468         if (method->getNameAsString() == kFinalizeName) {
    469           finalize_dispatch_method_ = method;
    470         } else if (method->getNameAsString() == kAdjustAndMarkName) {
    471           has_adjust_and_mark = true;
    472         } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
    473           has_is_heap_object_alive = true;
    474         }
    475         break;
    476     }
    477   }
    478 
    479   // Record if class defines the two GCMixin methods.
    480   has_gc_mixin_methods_ =
    481       has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
    482   if (trace_after_dispatch) {
    483     trace_method_ = trace_after_dispatch;
    484     trace_dispatch_method_ = trace_impl ? trace_impl : trace;
    485   } else {
    486     // TODO: Can we never have a dispatch method called trace without the same
    487     // class defining a traceAfterDispatch method?
    488     trace_method_ = trace;
    489     trace_dispatch_method_ = nullptr;
    490   }
    491   if (trace_dispatch_method_ && finalize_dispatch_method_)
    492     return;
    493   // If this class does not define dispatching methods inherit them.
    494   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    495     // TODO: Does it make sense to inherit multiple dispatch methods?
    496     if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
    497       assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
    498       trace_dispatch_method_ = dispatch;
    499     }
    500     if (CXXMethodDecl* dispatch =
    501             it->second.info()->GetFinalizeDispatchMethod()) {
    502       assert(!finalize_dispatch_method_ &&
    503              "Multiple finalize dispatching methods");
    504       finalize_dispatch_method_ = dispatch;
    505     }
    506   }
    507 }
    508 
    509 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
    510 bool RecordInfo::NeedsFinalization() {
    511   if (does_need_finalization_ == kNotComputed) {
    512     // Rely on hasNonTrivialDestructor(), but if the only
    513     // identifiable reason for it being true is the presence
    514     // of a safely ignorable class as a direct base,
    515     // or we're processing such an 'ignorable' class, then it does
    516     // not need finalization.
    517     does_need_finalization_ =
    518         record_->hasNonTrivialDestructor() ? kTrue : kFalse;
    519     if (!does_need_finalization_)
    520       return does_need_finalization_;
    521 
    522     CXXDestructorDecl* dtor = record_->getDestructor();
    523     if (dtor && dtor->isUserProvided())
    524       return does_need_finalization_;
    525     for (Fields::iterator it = GetFields().begin();
    526          it != GetFields().end();
    527          ++it) {
    528       if (it->second.edge()->NeedsFinalization())
    529         return does_need_finalization_;
    530     }
    531 
    532     for (Bases::iterator it = GetBases().begin();
    533          it != GetBases().end();
    534          ++it) {
    535       if (it->second.info()->NeedsFinalization())
    536         return does_need_finalization_;
    537     }
    538     // Destructor was non-trivial due to bases with destructors that
    539     // can be safely ignored. Hence, no need for finalization.
    540     does_need_finalization_ = kFalse;
    541   }
    542   return does_need_finalization_;
    543 }
    544 
    545 // A class needs tracing if:
    546 // - it is allocated on the managed heap,
    547 // - it is derived from a class that needs tracing, or
    548 // - it contains fields that need tracing.
    549 //
    550 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
    551   if (IsGCAllocated())
    552     return TracingStatus::Needed();
    553 
    554   if (IsStackAllocated())
    555     return TracingStatus::Unneeded();
    556 
    557   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    558     if (it->second.info()->NeedsTracing(option).IsNeeded())
    559       return TracingStatus::Needed();
    560   }
    561 
    562   if (option == Edge::kRecursive)
    563     GetFields();
    564 
    565   return fields_need_tracing_;
    566 }
    567 
    568 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
    569 {
    570   while (ns) {
    571     if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
    572       return true;
    573     ns = dyn_cast<NamespaceDecl>(ns->getParent());
    574   }
    575   return false;
    576 }
    577 
    578 Edge* RecordInfo::CreateEdgeFromOriginalType(const Type* type) {
    579   if (!type)
    580     return nullptr;
    581 
    582   // look for "typedef ... iterator;"
    583   if (!isa<ElaboratedType>(type))
    584     return nullptr;
    585   const ElaboratedType* elaboratedType = cast<ElaboratedType>(type);
    586   if (!isa<TypedefType>(elaboratedType->getNamedType()))
    587     return nullptr;
    588   const TypedefType* typedefType =
    589       cast<TypedefType>(elaboratedType->getNamedType());
    590   std::string typeName = typedefType->getDecl()->getNameAsString();
    591   if (!Config::IsIterator(typeName))
    592     return nullptr;
    593   RecordInfo* info =
    594       cache_->Lookup(elaboratedType->getQualifier()->getAsType());
    595 
    596   bool on_heap = false;
    597   bool is_unsafe = false;
    598   // Silently handle unknown types; the on-heap collection types will
    599   // have to be in scope for the declaration to compile, though.
    600   if (info) {
    601     is_unsafe = Config::IsGCCollectionWithUnsafeIterator(info->name());
    602     // Don't mark iterator as being on the heap if it is not supported.
    603     on_heap = !is_unsafe && Config::IsGCCollection(info->name());
    604   }
    605   return new Iterator(info, on_heap, is_unsafe);
    606 }
    607 
    608 Edge* RecordInfo::CreateEdge(const Type* type) {
    609   if (!type) {
    610     return 0;
    611   }
    612 
    613   if (type->isPointerType() || type->isReferenceType()) {
    614     if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
    615       return new RawPtr(ptr, type->isReferenceType());
    616     return 0;
    617   }
    618 
    619   RecordInfo* info = cache_->Lookup(type);
    620 
    621   // If the type is neither a pointer or a C++ record we ignore it.
    622   if (!info) {
    623     return 0;
    624   }
    625 
    626   TemplateArgs args;
    627 
    628   if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    629     if (Edge* ptr = CreateEdge(args[0]))
    630       return new RefPtr(ptr);
    631     return 0;
    632   }
    633 
    634   if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    635     if (Edge* ptr = CreateEdge(args[0]))
    636       return new OwnPtr(ptr);
    637     return 0;
    638   }
    639 
    640   if (Config::IsUniquePtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    641     // Check that this is std::unique_ptr
    642     NamespaceDecl* ns =
    643         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    644     clang::Sema& sema = cache_->instance().getSema();
    645     if (!isInStdNamespace(sema, ns))
    646       return 0;
    647     if (Edge* ptr = CreateEdge(args[0]))
    648       return new UniquePtr(ptr);
    649     return 0;
    650   }
    651 
    652   if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    653     if (Edge* ptr = CreateEdge(args[0]))
    654       return new Member(ptr);
    655     return 0;
    656   }
    657 
    658   if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    659     if (Edge* ptr = CreateEdge(args[0]))
    660       return new WeakMember(ptr);
    661     return 0;
    662   }
    663 
    664   bool is_persistent = Config::IsPersistent(info->name());
    665   if (is_persistent || Config::IsCrossThreadPersistent(info->name())) {
    666     // Persistent might refer to v8::Persistent, so check the name space.
    667     // TODO: Consider using a more canonical identification than names.
    668     NamespaceDecl* ns =
    669         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    670     if (!ns || ns->getName() != "blink")
    671       return 0;
    672     if (!info->GetTemplateArgs(1, &args))
    673       return 0;
    674     if (Edge* ptr = CreateEdge(args[0])) {
    675       if (is_persistent)
    676         return new Persistent(ptr);
    677       else
    678         return new CrossThreadPersistent(ptr);
    679     }
    680     return 0;
    681   }
    682 
    683   if (Config::IsGCCollection(info->name()) ||
    684       Config::IsWTFCollection(info->name())) {
    685     bool is_root = Config::IsPersistentGCCollection(info->name());
    686     bool on_heap = is_root || info->IsHeapAllocatedCollection();
    687     size_t count = Config::CollectionDimension(info->name());
    688     if (!info->GetTemplateArgs(count, &args))
    689       return 0;
    690     Collection* edge = new Collection(info, on_heap, is_root);
    691     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
    692       if (Edge* member = CreateEdge(*it)) {
    693         edge->members().push_back(member);
    694       }
    695       // TODO: Handle the case where we fail to create an edge (eg, if the
    696       // argument is a primitive type or just not fully known yet).
    697     }
    698     return edge;
    699   }
    700 
    701   return new Value(info);
    702 }
    703