Home | History | Annotate | Download | only in blink_gc_plugin
      1 // Copyright 2014 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "Config.h"
      6 #include "RecordInfo.h"
      7 #include "clang/Sema/Sema.h"
      8 
      9 using namespace clang;
     10 using std::string;
     11 
     12 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
     13     : cache_(cache),
     14       record_(record),
     15       name_(record->getName()),
     16       fields_need_tracing_(TracingStatus::Unknown()),
     17       bases_(0),
     18       fields_(0),
     19       is_stack_allocated_(kNotComputed),
     20       is_non_newable_(kNotComputed),
     21       is_only_placement_newable_(kNotComputed),
     22       does_need_finalization_(kNotComputed),
     23       has_gc_mixin_methods_(kNotComputed),
     24       is_declaring_local_trace_(kNotComputed),
     25       is_eagerly_finalized_(kNotComputed),
     26       determined_trace_methods_(false),
     27       trace_method_(0),
     28       trace_dispatch_method_(0),
     29       finalize_dispatch_method_(0),
     30       is_gc_derived_(false) {}
     31 
     32 RecordInfo::~RecordInfo() {
     33   delete fields_;
     34   delete bases_;
     35 }
     36 
     37 // Get |count| number of template arguments. Returns false if there
     38 // are fewer than |count| arguments or any of the arguments are not
     39 // of a valid Type structure. If |count| is non-positive, all
     40 // arguments are collected.
     41 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
     42   ClassTemplateSpecializationDecl* tmpl =
     43       dyn_cast<ClassTemplateSpecializationDecl>(record_);
     44   if (!tmpl)
     45     return false;
     46   const TemplateArgumentList& args = tmpl->getTemplateArgs();
     47   if (args.size() < count)
     48     return false;
     49   if (count <= 0)
     50     count = args.size();
     51   for (unsigned i = 0; i < count; ++i) {
     52     TemplateArgument arg = args[i];
     53     if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
     54       output_args->push_back(arg.getAsType().getTypePtr());
     55     } else {
     56       return false;
     57     }
     58   }
     59   return true;
     60 }
     61 
     62 // Test if a record is a HeapAllocated collection.
     63 bool RecordInfo::IsHeapAllocatedCollection() {
     64   if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
     65     return false;
     66 
     67   TemplateArgs args;
     68   if (GetTemplateArgs(0, &args)) {
     69     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
     70       if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
     71         if (decl->getName() == kHeapAllocatorName)
     72           return true;
     73     }
     74   }
     75 
     76   return Config::IsGCCollection(name_);
     77 }
     78 
     79 bool RecordInfo::HasOptionalFinalizer() {
     80   if (!IsHeapAllocatedCollection())
     81     return false;
     82   // Heap collections may have a finalizer but it is optional (i.e. may be
     83   // delayed until FinalizeGarbageCollectedObject() gets called), unless there
     84   // is an inline buffer. Vector, Deque, and ListHashSet can have an inline
     85   // buffer.
     86   if (name_ != "Vector" && name_ != "Deque" && name_ != "HeapVector" &&
     87       name_ != "HeapDeque")
     88     return true;
     89   ClassTemplateSpecializationDecl* tmpl =
     90       dyn_cast<ClassTemplateSpecializationDecl>(record_);
     91   // These collections require template specialization so tmpl should always be
     92   // non-null for valid code.
     93   if (!tmpl)
     94     return false;
     95   const TemplateArgumentList& args = tmpl->getTemplateArgs();
     96   if (args.size() < 2)
     97     return true;
     98   TemplateArgument arg = args[1];
     99   // The second template argument must be void or 0 so there is no inline
    100   // buffer.
    101   return (arg.getKind() == TemplateArgument::Type &&
    102           arg.getAsType()->isVoidType()) ||
    103          (arg.getKind() == TemplateArgument::Integral &&
    104           arg.getAsIntegral().getExtValue() == 0);
    105 }
    106 
    107 // Test if a record is derived from a garbage collected base.
    108 bool RecordInfo::IsGCDerived() {
    109   // If already computed, return the known result.
    110   if (gc_base_names_.size())
    111     return is_gc_derived_;
    112 
    113   if (!record_->hasDefinition())
    114     return false;
    115 
    116   // The base classes are not themselves considered garbage collected objects.
    117   if (Config::IsGCBase(name_))
    118     return false;
    119 
    120   // Walk the inheritance tree to find GC base classes.
    121   walkBases();
    122   return is_gc_derived_;
    123 }
    124 
    125 CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
    126   const TemplateSpecializationType* tmpl_type =
    127       type.getAs<TemplateSpecializationType>();
    128   if (!tmpl_type)
    129     return 0;
    130 
    131   TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
    132   if (!tmpl_decl)
    133     return 0;
    134 
    135   return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
    136 }
    137 
    138 void RecordInfo::walkBases() {
    139   // This traversal is akin to CXXRecordDecl::forallBases()'s,
    140   // but without stepping over dependent bases -- these might also
    141   // have a "GC base name", so are to be included and considered.
    142   SmallVector<const CXXRecordDecl*, 8> queue;
    143 
    144   const CXXRecordDecl* base_record = record();
    145   while (true) {
    146     for (const auto& it : base_record->bases()) {
    147       const RecordType* type = it.getType()->getAs<RecordType>();
    148       CXXRecordDecl* base;
    149       if (!type)
    150         base = GetDependentTemplatedDecl(*it.getType());
    151       else {
    152         base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
    153         if (base)
    154           queue.push_back(base);
    155       }
    156       if (!base)
    157         continue;
    158 
    159       const std::string& name = base->getName();
    160       if (Config::IsGCBase(name)) {
    161         gc_base_names_.push_back(name);
    162         is_gc_derived_ = true;
    163       }
    164     }
    165 
    166     if (queue.empty())
    167       break;
    168     base_record = queue.pop_back_val(); // not actually a queue.
    169   }
    170 }
    171 
    172 bool RecordInfo::IsGCFinalized() {
    173   if (!IsGCDerived())
    174     return false;
    175   for (const auto& gc_base : gc_base_names_) {
    176     if (Config::IsGCFinalizedBase(gc_base))
    177       return true;
    178   }
    179   return false;
    180 }
    181 
    182 // A GC mixin is a class that inherits from a GC mixin base and has
    183 // not yet been "mixed in" with another GC base class.
    184 bool RecordInfo::IsGCMixin() {
    185   if (!IsGCDerived() || !gc_base_names_.size())
    186     return false;
    187   for (const auto& gc_base : gc_base_names_) {
    188       // If it is not a mixin base we are done.
    189       if (!Config::IsGCMixinBase(gc_base))
    190           return false;
    191   }
    192   // This is a mixin if all GC bases are mixins.
    193   return true;
    194 }
    195 
    196 // Test if a record is allocated on the managed heap.
    197 bool RecordInfo::IsGCAllocated() {
    198   return IsGCDerived() || IsHeapAllocatedCollection();
    199 }
    200 
    201 bool RecordInfo::IsEagerlyFinalized() {
    202   if (is_eagerly_finalized_ != kNotComputed)
    203     return is_eagerly_finalized_;
    204 
    205   is_eagerly_finalized_ = kFalse;
    206   if (!IsGCFinalized())
    207     return is_eagerly_finalized_;
    208 
    209   for (Decl* decl : record_->decls()) {
    210     if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) {
    211       if (typedef_decl->getNameAsString() != kIsEagerlyFinalizedName)
    212         continue;
    213       is_eagerly_finalized_ = kTrue;
    214       break;
    215     }
    216   }
    217   return is_eagerly_finalized_;
    218 }
    219 
    220 bool RecordInfo::HasDefinition() {
    221   return record_->hasDefinition();
    222 }
    223 
    224 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
    225   // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
    226   if (!record || Config::IsIgnoreAnnotated(record))
    227     return 0;
    228   Cache::iterator it = cache_.find(record);
    229   if (it != cache_.end())
    230     return &it->second;
    231   return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
    232               .first->second;
    233 }
    234 
    235 bool RecordInfo::IsStackAllocated() {
    236   if (is_stack_allocated_ == kNotComputed) {
    237     is_stack_allocated_ = kFalse;
    238     for (Bases::iterator it = GetBases().begin();
    239          it != GetBases().end();
    240          ++it) {
    241       if (it->second.info()->IsStackAllocated()) {
    242         is_stack_allocated_ = kTrue;
    243         return is_stack_allocated_;
    244       }
    245     }
    246     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    247          it != record_->method_end();
    248          ++it) {
    249       if (it->getNameAsString() == kNewOperatorName &&
    250           it->isDeleted() &&
    251           Config::IsStackAnnotated(*it)) {
    252         is_stack_allocated_ = kTrue;
    253         return is_stack_allocated_;
    254       }
    255     }
    256   }
    257   return is_stack_allocated_;
    258 }
    259 
    260 bool RecordInfo::IsNonNewable() {
    261   if (is_non_newable_ == kNotComputed) {
    262     bool deleted = false;
    263     bool all_deleted = true;
    264     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    265          it != record_->method_end();
    266          ++it) {
    267       if (it->getNameAsString() == kNewOperatorName) {
    268         deleted = it->isDeleted();
    269         all_deleted = all_deleted && deleted;
    270       }
    271     }
    272     is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
    273   }
    274   return is_non_newable_;
    275 }
    276 
    277 bool RecordInfo::IsOnlyPlacementNewable() {
    278   if (is_only_placement_newable_ == kNotComputed) {
    279     bool placement = false;
    280     bool new_deleted = false;
    281     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    282          it != record_->method_end();
    283          ++it) {
    284       if (it->getNameAsString() == kNewOperatorName) {
    285         if (it->getNumParams() == 1) {
    286           new_deleted = it->isDeleted();
    287         } else if (it->getNumParams() == 2) {
    288           placement = !it->isDeleted();
    289         }
    290       }
    291     }
    292     is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
    293   }
    294   return is_only_placement_newable_;
    295 }
    296 
    297 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
    298   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    299        it != record_->method_end();
    300        ++it) {
    301     if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
    302       return *it;
    303   }
    304   return 0;
    305 }
    306 
    307 // An object requires a tracing method if it has any fields that need tracing
    308 // or if it inherits from multiple bases that need tracing.
    309 bool RecordInfo::RequiresTraceMethod() {
    310   if (IsStackAllocated())
    311     return false;
    312   unsigned bases_with_trace = 0;
    313   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    314     if (it->second.NeedsTracing().IsNeeded())
    315       ++bases_with_trace;
    316   }
    317   if (bases_with_trace > 1)
    318     return true;
    319   GetFields();
    320   return fields_need_tracing_.IsNeeded();
    321 }
    322 
    323 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
    324 // dispatch method).
    325 CXXMethodDecl* RecordInfo::GetTraceMethod() {
    326   DetermineTracingMethods();
    327   return trace_method_;
    328 }
    329 
    330 // Get the static trace dispatch method.
    331 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
    332   DetermineTracingMethods();
    333   return trace_dispatch_method_;
    334 }
    335 
    336 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
    337   DetermineTracingMethods();
    338   return finalize_dispatch_method_;
    339 }
    340 
    341 RecordInfo::Bases& RecordInfo::GetBases() {
    342   if (!bases_)
    343     bases_ = CollectBases();
    344   return *bases_;
    345 }
    346 
    347 bool RecordInfo::InheritsTrace() {
    348   if (GetTraceMethod())
    349     return true;
    350   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    351     if (it->second.info()->InheritsTrace())
    352       return true;
    353   }
    354   return false;
    355 }
    356 
    357 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
    358   if (CXXMethodDecl* trace = GetTraceMethod())
    359     return trace->isVirtual() ? 0 : trace;
    360   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    361     if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
    362       return trace;
    363   }
    364   return 0;
    365 }
    366 
    367 bool RecordInfo::DeclaresGCMixinMethods() {
    368   DetermineTracingMethods();
    369   return has_gc_mixin_methods_;
    370 }
    371 
    372 bool RecordInfo::DeclaresLocalTraceMethod() {
    373   if (is_declaring_local_trace_ != kNotComputed)
    374     return is_declaring_local_trace_;
    375   DetermineTracingMethods();
    376   is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
    377   if (is_declaring_local_trace_) {
    378     for (auto it = record_->method_begin();
    379          it != record_->method_end(); ++it) {
    380       if (*it == trace_method_) {
    381         is_declaring_local_trace_ = kTrue;
    382         break;
    383       }
    384     }
    385   }
    386   return is_declaring_local_trace_;
    387 }
    388 
    389 // A (non-virtual) class is considered abstract in Blink if it has
    390 // no public constructors and no create methods.
    391 bool RecordInfo::IsConsideredAbstract() {
    392   for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
    393        it != record_->ctor_end();
    394        ++it) {
    395     if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
    396       return false;
    397   }
    398   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    399        it != record_->method_end();
    400        ++it) {
    401     if (it->getNameAsString() == kCreateName)
    402       return false;
    403   }
    404   return true;
    405 }
    406 
    407 RecordInfo::Bases* RecordInfo::CollectBases() {
    408   // Compute the collection locally to avoid inconsistent states.
    409   Bases* bases = new Bases;
    410   if (!record_->hasDefinition())
    411     return bases;
    412   for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
    413        it != record_->bases_end();
    414        ++it) {
    415     const CXXBaseSpecifier& spec = *it;
    416     RecordInfo* info = cache_->Lookup(spec.getType());
    417     if (!info)
    418       continue;
    419     CXXRecordDecl* base = info->record();
    420     TracingStatus status = info->InheritsTrace()
    421                                ? TracingStatus::Needed()
    422                                : TracingStatus::Unneeded();
    423     bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
    424   }
    425   return bases;
    426 }
    427 
    428 RecordInfo::Fields& RecordInfo::GetFields() {
    429   if (!fields_)
    430     fields_ = CollectFields();
    431   return *fields_;
    432 }
    433 
    434 RecordInfo::Fields* RecordInfo::CollectFields() {
    435   // Compute the collection locally to avoid inconsistent states.
    436   Fields* fields = new Fields;
    437   if (!record_->hasDefinition())
    438     return fields;
    439   TracingStatus fields_status = TracingStatus::Unneeded();
    440   for (RecordDecl::field_iterator it = record_->field_begin();
    441        it != record_->field_end();
    442        ++it) {
    443     FieldDecl* field = *it;
    444     // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
    445     if (Config::IsIgnoreAnnotated(field))
    446       continue;
    447     // Check if the unexpanded type should be recorded; needed
    448     // to track iterator aliases only
    449     const Type* unexpandedType = field->getType().getSplitUnqualifiedType().Ty;
    450     Edge* edge = CreateEdgeFromOriginalType(unexpandedType);
    451     if (!edge)
    452       edge = CreateEdge(field->getType().getTypePtrOrNull());
    453     if (edge) {
    454       fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
    455       fields->insert(std::make_pair(field, FieldPoint(field, edge)));
    456     }
    457   }
    458   fields_need_tracing_ = fields_status;
    459   return fields;
    460 }
    461 
    462 void RecordInfo::DetermineTracingMethods() {
    463   if (determined_trace_methods_)
    464     return;
    465   determined_trace_methods_ = true;
    466   if (Config::IsGCBase(name_))
    467     return;
    468   CXXMethodDecl* trace = nullptr;
    469   CXXMethodDecl* trace_after_dispatch = nullptr;
    470   bool has_adjust_and_mark = false;
    471   bool has_is_heap_object_alive = false;
    472   for (Decl* decl : record_->decls()) {
    473     CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
    474     if (!method) {
    475       if (FunctionTemplateDecl* func_template =
    476           dyn_cast<FunctionTemplateDecl>(decl))
    477         method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
    478     }
    479     if (!method)
    480       continue;
    481 
    482     switch (Config::GetTraceMethodType(method)) {
    483       case Config::TRACE_METHOD:
    484         trace = method;
    485         break;
    486       case Config::TRACE_AFTER_DISPATCH_METHOD:
    487         trace_after_dispatch = method;
    488         break;
    489       case Config::NOT_TRACE_METHOD:
    490         if (method->getNameAsString() == kFinalizeName) {
    491           finalize_dispatch_method_ = method;
    492         } else if (method->getNameAsString() == kAdjustAndMarkName) {
    493           has_adjust_and_mark = true;
    494         } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
    495           has_is_heap_object_alive = true;
    496         }
    497         break;
    498     }
    499   }
    500 
    501   // Record if class defines the two GCMixin methods.
    502   has_gc_mixin_methods_ =
    503       has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
    504   if (trace_after_dispatch) {
    505     trace_method_ = trace_after_dispatch;
    506     trace_dispatch_method_ = trace;
    507   } else {
    508     // TODO: Can we never have a dispatch method called trace without the same
    509     // class defining a traceAfterDispatch method?
    510     trace_method_ = trace;
    511     trace_dispatch_method_ = nullptr;
    512   }
    513   if (trace_dispatch_method_ && finalize_dispatch_method_)
    514     return;
    515   // If this class does not define dispatching methods inherit them.
    516   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    517     // TODO: Does it make sense to inherit multiple dispatch methods?
    518     if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
    519       assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
    520       trace_dispatch_method_ = dispatch;
    521     }
    522     if (CXXMethodDecl* dispatch =
    523             it->second.info()->GetFinalizeDispatchMethod()) {
    524       assert(!finalize_dispatch_method_ &&
    525              "Multiple finalize dispatching methods");
    526       finalize_dispatch_method_ = dispatch;
    527     }
    528   }
    529 }
    530 
    531 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
    532 bool RecordInfo::NeedsFinalization() {
    533   if (does_need_finalization_ == kNotComputed) {
    534     if (HasOptionalFinalizer()) {
    535       does_need_finalization_ = kFalse;
    536       return does_need_finalization_;
    537     }
    538 
    539     // Rely on hasNonTrivialDestructor(), but if the only
    540     // identifiable reason for it being true is the presence
    541     // of a safely ignorable class as a direct base,
    542     // or we're processing such an 'ignorable' class, then it does
    543     // not need finalization.
    544     does_need_finalization_ =
    545         record_->hasNonTrivialDestructor() ? kTrue : kFalse;
    546     if (!does_need_finalization_)
    547       return does_need_finalization_;
    548 
    549     CXXDestructorDecl* dtor = record_->getDestructor();
    550     if (dtor && dtor->isUserProvided())
    551       return does_need_finalization_;
    552     for (Fields::iterator it = GetFields().begin();
    553          it != GetFields().end();
    554          ++it) {
    555       if (it->second.edge()->NeedsFinalization())
    556         return does_need_finalization_;
    557     }
    558 
    559     for (Bases::iterator it = GetBases().begin();
    560          it != GetBases().end();
    561          ++it) {
    562       if (it->second.info()->NeedsFinalization())
    563         return does_need_finalization_;
    564     }
    565     // Destructor was non-trivial due to bases with destructors that
    566     // can be safely ignored. Hence, no need for finalization.
    567     does_need_finalization_ = kFalse;
    568   }
    569   return does_need_finalization_;
    570 }
    571 
    572 // A class needs tracing if:
    573 // - it is allocated on the managed heap,
    574 // - it is derived from a class that needs tracing, or
    575 // - it contains fields that need tracing.
    576 //
    577 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
    578   if (IsGCAllocated())
    579     return TracingStatus::Needed();
    580 
    581   if (IsStackAllocated())
    582     return TracingStatus::Unneeded();
    583 
    584   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    585     if (it->second.info()->NeedsTracing(option).IsNeeded())
    586       return TracingStatus::Needed();
    587   }
    588 
    589   if (option == Edge::kRecursive)
    590     GetFields();
    591 
    592   return fields_need_tracing_;
    593 }
    594 
    595 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
    596 {
    597   while (ns) {
    598     if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
    599       return true;
    600     ns = dyn_cast<NamespaceDecl>(ns->getParent());
    601   }
    602   return false;
    603 }
    604 
    605 Edge* RecordInfo::CreateEdgeFromOriginalType(const Type* type) {
    606   if (!type)
    607     return nullptr;
    608 
    609   // look for "typedef ... iterator;"
    610   if (!isa<ElaboratedType>(type))
    611     return nullptr;
    612   const ElaboratedType* elaboratedType = cast<ElaboratedType>(type);
    613   if (!isa<TypedefType>(elaboratedType->getNamedType()))
    614     return nullptr;
    615   const TypedefType* typedefType =
    616       cast<TypedefType>(elaboratedType->getNamedType());
    617   std::string typeName = typedefType->getDecl()->getNameAsString();
    618   if (!Config::IsIterator(typeName))
    619     return nullptr;
    620   RecordInfo* info =
    621       cache_->Lookup(elaboratedType->getQualifier()->getAsType());
    622 
    623   bool on_heap = false;
    624   bool is_unsafe = false;
    625   // Silently handle unknown types; the on-heap collection types will
    626   // have to be in scope for the declaration to compile, though.
    627   if (info) {
    628     is_unsafe = Config::IsGCCollectionWithUnsafeIterator(info->name());
    629     // Don't mark iterator as being on the heap if it is not supported.
    630     on_heap = !is_unsafe && Config::IsGCCollection(info->name());
    631   }
    632   return new Iterator(info, on_heap, is_unsafe);
    633 }
    634 
    635 Edge* RecordInfo::CreateEdge(const Type* type) {
    636   if (!type) {
    637     return 0;
    638   }
    639 
    640   if (type->isPointerType() || type->isReferenceType()) {
    641     if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
    642       return new RawPtr(ptr, type->isReferenceType());
    643     return 0;
    644   }
    645 
    646   RecordInfo* info = cache_->Lookup(type);
    647 
    648   // If the type is neither a pointer or a C++ record we ignore it.
    649   if (!info) {
    650     return 0;
    651   }
    652 
    653   TemplateArgs args;
    654 
    655   if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    656     if (Edge* ptr = CreateEdge(args[0]))
    657       return new RefPtr(ptr);
    658     return 0;
    659   }
    660 
    661   if (Config::IsUniquePtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    662     // Check that this is std::unique_ptr
    663     NamespaceDecl* ns =
    664         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    665     clang::Sema& sema = cache_->instance().getSema();
    666     if (!isInStdNamespace(sema, ns))
    667       return 0;
    668     if (Edge* ptr = CreateEdge(args[0]))
    669       return new UniquePtr(ptr);
    670     return 0;
    671   }
    672 
    673   if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    674     if (Edge* ptr = CreateEdge(args[0]))
    675       return new Member(ptr);
    676     return 0;
    677   }
    678 
    679   if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    680     if (Edge* ptr = CreateEdge(args[0]))
    681       return new WeakMember(ptr);
    682     return 0;
    683   }
    684 
    685   bool is_persistent = Config::IsPersistent(info->name());
    686   if (is_persistent || Config::IsCrossThreadPersistent(info->name())) {
    687     // Persistent might refer to v8::Persistent, so check the name space.
    688     // TODO: Consider using a more canonical identification than names.
    689     NamespaceDecl* ns =
    690         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    691     if (!ns || ns->getName() != "blink")
    692       return 0;
    693     if (!info->GetTemplateArgs(1, &args))
    694       return 0;
    695     if (Edge* ptr = CreateEdge(args[0])) {
    696       if (is_persistent)
    697         return new Persistent(ptr);
    698       else
    699         return new CrossThreadPersistent(ptr);
    700     }
    701     return 0;
    702   }
    703 
    704   if (Config::IsGCCollection(info->name()) ||
    705       Config::IsWTFCollection(info->name())) {
    706     bool is_root = Config::IsPersistentGCCollection(info->name());
    707     bool on_heap = is_root || info->IsHeapAllocatedCollection();
    708     size_t count = Config::CollectionDimension(info->name());
    709     if (!info->GetTemplateArgs(count, &args))
    710       return 0;
    711     Collection* edge = new Collection(info, on_heap, is_root);
    712     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
    713       if (Edge* member = CreateEdge(*it)) {
    714         edge->members().push_back(member);
    715       }
    716       // TODO: Handle the case where we fail to create an edge (eg, if the
    717       // argument is a primitive type or just not fully known yet).
    718     }
    719     return edge;
    720   }
    721 
    722   return new Value(info);
    723 }
    724