Home | History | Annotate | Download | only in blink_gc_plugin
      1 // Copyright 2014 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "Config.h"
      6 #include "RecordInfo.h"
      7 #include "clang/Sema/Sema.h"
      8 
      9 using namespace clang;
     10 using std::string;
     11 
     12 RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache)
     13     : cache_(cache),
     14       record_(record),
     15       name_(record->getName()),
     16       fields_need_tracing_(TracingStatus::Unknown()),
     17       bases_(0),
     18       fields_(0),
     19       is_stack_allocated_(kNotComputed),
     20       is_non_newable_(kNotComputed),
     21       is_only_placement_newable_(kNotComputed),
     22       does_need_finalization_(kNotComputed),
     23       has_gc_mixin_methods_(kNotComputed),
     24       is_declaring_local_trace_(kNotComputed),
     25       is_eagerly_finalized_(kNotComputed),
     26       determined_trace_methods_(false),
     27       trace_method_(0),
     28       trace_dispatch_method_(0),
     29       finalize_dispatch_method_(0),
     30       is_gc_derived_(false) {}
     31 
     32 RecordInfo::~RecordInfo() {
     33   delete fields_;
     34   delete bases_;
     35 }
     36 
     37 // Get |count| number of template arguments. Returns false if there
     38 // are fewer than |count| arguments or any of the arguments are not
     39 // of a valid Type structure. If |count| is non-positive, all
     40 // arguments are collected.
     41 bool RecordInfo::GetTemplateArgs(size_t count, TemplateArgs* output_args) {
     42   ClassTemplateSpecializationDecl* tmpl =
     43       dyn_cast<ClassTemplateSpecializationDecl>(record_);
     44   if (!tmpl)
     45     return false;
     46   const TemplateArgumentList& args = tmpl->getTemplateArgs();
     47   if (args.size() < count)
     48     return false;
     49   if (count <= 0)
     50     count = args.size();
     51   for (unsigned i = 0; i < count; ++i) {
     52     TemplateArgument arg = args[i];
     53     if (arg.getKind() == TemplateArgument::Type && !arg.getAsType().isNull()) {
     54       output_args->push_back(arg.getAsType().getTypePtr());
     55     } else {
     56       return false;
     57     }
     58   }
     59   return true;
     60 }
     61 
     62 // Test if a record is a HeapAllocated collection.
     63 bool RecordInfo::IsHeapAllocatedCollection() {
     64   if (!Config::IsGCCollection(name_) && !Config::IsWTFCollection(name_))
     65     return false;
     66 
     67   TemplateArgs args;
     68   if (GetTemplateArgs(0, &args)) {
     69     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
     70       if (CXXRecordDecl* decl = (*it)->getAsCXXRecordDecl())
     71         if (decl->getName() == kHeapAllocatorName)
     72           return true;
     73     }
     74   }
     75 
     76   return Config::IsGCCollection(name_);
     77 }
     78 
     79 // Test if a record is derived from a garbage collected base.
     80 bool RecordInfo::IsGCDerived() {
     81   // If already computed, return the known result.
     82   if (gc_base_names_.size())
     83     return is_gc_derived_;
     84 
     85   if (!record_->hasDefinition())
     86     return false;
     87 
     88   // The base classes are not themselves considered garbage collected objects.
     89   if (Config::IsGCBase(name_))
     90     return false;
     91 
     92   // Walk the inheritance tree to find GC base classes.
     93   walkBases();
     94   return is_gc_derived_;
     95 }
     96 
     97 CXXRecordDecl* RecordInfo::GetDependentTemplatedDecl(const Type& type) {
     98   const TemplateSpecializationType* tmpl_type =
     99       type.getAs<TemplateSpecializationType>();
    100   if (!tmpl_type)
    101     return 0;
    102 
    103   TemplateDecl* tmpl_decl = tmpl_type->getTemplateName().getAsTemplateDecl();
    104   if (!tmpl_decl)
    105     return 0;
    106 
    107   return dyn_cast_or_null<CXXRecordDecl>(tmpl_decl->getTemplatedDecl());
    108 }
    109 
    110 void RecordInfo::walkBases() {
    111   // This traversal is akin to CXXRecordDecl::forallBases()'s,
    112   // but without stepping over dependent bases -- these might also
    113   // have a "GC base name", so are to be included and considered.
    114   SmallVector<const CXXRecordDecl*, 8> queue;
    115 
    116   const CXXRecordDecl *base_record = record();
    117   while (true) {
    118     for (const auto& it : base_record->bases()) {
    119       const RecordType *type = it.getType()->getAs<RecordType>();
    120       CXXRecordDecl* base;
    121       if (!type)
    122         base = GetDependentTemplatedDecl(*it.getType());
    123       else {
    124         base = cast_or_null<CXXRecordDecl>(type->getDecl()->getDefinition());
    125         if (base)
    126           queue.push_back(base);
    127       }
    128       if (!base)
    129         continue;
    130 
    131       const std::string& name = base->getName();
    132       if (Config::IsGCBase(name)) {
    133         gc_base_names_.push_back(name);
    134         is_gc_derived_ = true;
    135       }
    136     }
    137 
    138     if (queue.empty())
    139       break;
    140     base_record = queue.pop_back_val(); // not actually a queue.
    141   }
    142 }
    143 
    144 bool RecordInfo::IsGCFinalized() {
    145   if (!IsGCDerived())
    146     return false;
    147   for (const auto& gc_base : gc_base_names_) {
    148     if (Config::IsGCFinalizedBase(gc_base))
    149       return true;
    150   }
    151   return false;
    152 }
    153 
    154 // A GC mixin is a class that inherits from a GC mixin base and has
    155 // not yet been "mixed in" with another GC base class.
    156 bool RecordInfo::IsGCMixin() {
    157   if (!IsGCDerived() || !gc_base_names_.size())
    158     return false;
    159   for (const auto& gc_base : gc_base_names_) {
    160       // If it is not a mixin base we are done.
    161       if (!Config::IsGCMixinBase(gc_base))
    162           return false;
    163   }
    164   // This is a mixin if all GC bases are mixins.
    165   return true;
    166 }
    167 
    168 // Test if a record is allocated on the managed heap.
    169 bool RecordInfo::IsGCAllocated() {
    170   return IsGCDerived() || IsHeapAllocatedCollection();
    171 }
    172 
    173 bool RecordInfo::IsEagerlyFinalized() {
    174   if (is_eagerly_finalized_ == kNotComputed) {
    175     is_eagerly_finalized_ = kFalse;
    176     if (IsGCFinalized()) {
    177       for (Decl* decl : record_->decls()) {
    178         if (TypedefDecl* typedef_decl = dyn_cast<TypedefDecl>(decl)) {
    179           if (typedef_decl->getNameAsString() == kIsEagerlyFinalizedName) {
    180             is_eagerly_finalized_ = kTrue;
    181             break;
    182           }
    183         }
    184       }
    185     }
    186   }
    187   return is_eagerly_finalized_;
    188 }
    189 
    190 bool RecordInfo::HasDefinition() {
    191   return record_->hasDefinition();
    192 }
    193 
    194 RecordInfo* RecordCache::Lookup(CXXRecordDecl* record) {
    195   // Ignore classes annotated with the GC_PLUGIN_IGNORE macro.
    196   if (!record || Config::IsIgnoreAnnotated(record))
    197     return 0;
    198   Cache::iterator it = cache_.find(record);
    199   if (it != cache_.end())
    200     return &it->second;
    201   return &cache_.insert(std::make_pair(record, RecordInfo(record, this)))
    202               .first->second;
    203 }
    204 
    205 bool RecordInfo::IsStackAllocated() {
    206   if (is_stack_allocated_ == kNotComputed) {
    207     is_stack_allocated_ = kFalse;
    208     for (Bases::iterator it = GetBases().begin();
    209          it != GetBases().end();
    210          ++it) {
    211       if (it->second.info()->IsStackAllocated()) {
    212         is_stack_allocated_ = kTrue;
    213         return is_stack_allocated_;
    214       }
    215     }
    216     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    217          it != record_->method_end();
    218          ++it) {
    219       if (it->getNameAsString() == kNewOperatorName &&
    220           it->isDeleted() &&
    221           Config::IsStackAnnotated(*it)) {
    222         is_stack_allocated_ = kTrue;
    223         return is_stack_allocated_;
    224       }
    225     }
    226   }
    227   return is_stack_allocated_;
    228 }
    229 
    230 bool RecordInfo::IsNonNewable() {
    231   if (is_non_newable_ == kNotComputed) {
    232     bool deleted = false;
    233     bool all_deleted = true;
    234     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    235          it != record_->method_end();
    236          ++it) {
    237       if (it->getNameAsString() == kNewOperatorName) {
    238         deleted = it->isDeleted();
    239         all_deleted = all_deleted && deleted;
    240       }
    241     }
    242     is_non_newable_ = (deleted && all_deleted) ? kTrue : kFalse;
    243   }
    244   return is_non_newable_;
    245 }
    246 
    247 bool RecordInfo::IsOnlyPlacementNewable() {
    248   if (is_only_placement_newable_ == kNotComputed) {
    249     bool placement = false;
    250     bool new_deleted = false;
    251     for (CXXRecordDecl::method_iterator it = record_->method_begin();
    252          it != record_->method_end();
    253          ++it) {
    254       if (it->getNameAsString() == kNewOperatorName) {
    255         if (it->getNumParams() == 1) {
    256           new_deleted = it->isDeleted();
    257         } else if (it->getNumParams() == 2) {
    258           placement = !it->isDeleted();
    259         }
    260       }
    261     }
    262     is_only_placement_newable_ = (placement && new_deleted) ? kTrue : kFalse;
    263   }
    264   return is_only_placement_newable_;
    265 }
    266 
    267 CXXMethodDecl* RecordInfo::DeclaresNewOperator() {
    268   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    269        it != record_->method_end();
    270        ++it) {
    271     if (it->getNameAsString() == kNewOperatorName && it->getNumParams() == 1)
    272       return *it;
    273   }
    274   return 0;
    275 }
    276 
    277 // An object requires a tracing method if it has any fields that need tracing
    278 // or if it inherits from multiple bases that need tracing.
    279 bool RecordInfo::RequiresTraceMethod() {
    280   if (IsStackAllocated())
    281     return false;
    282   unsigned bases_with_trace = 0;
    283   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    284     if (it->second.NeedsTracing().IsNeeded())
    285       ++bases_with_trace;
    286   }
    287   if (bases_with_trace > 1)
    288     return true;
    289   GetFields();
    290   return fields_need_tracing_.IsNeeded();
    291 }
    292 
    293 // Get the actual tracing method (ie, can be traceAfterDispatch if there is a
    294 // dispatch method).
    295 CXXMethodDecl* RecordInfo::GetTraceMethod() {
    296   DetermineTracingMethods();
    297   return trace_method_;
    298 }
    299 
    300 // Get the static trace dispatch method.
    301 CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() {
    302   DetermineTracingMethods();
    303   return trace_dispatch_method_;
    304 }
    305 
    306 CXXMethodDecl* RecordInfo::GetFinalizeDispatchMethod() {
    307   DetermineTracingMethods();
    308   return finalize_dispatch_method_;
    309 }
    310 
    311 RecordInfo::Bases& RecordInfo::GetBases() {
    312   if (!bases_)
    313     bases_ = CollectBases();
    314   return *bases_;
    315 }
    316 
    317 bool RecordInfo::InheritsTrace() {
    318   if (GetTraceMethod())
    319     return true;
    320   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    321     if (it->second.info()->InheritsTrace())
    322       return true;
    323   }
    324   return false;
    325 }
    326 
    327 CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() {
    328   if (CXXMethodDecl* trace = GetTraceMethod())
    329     return trace->isVirtual() ? 0 : trace;
    330   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    331     if (CXXMethodDecl* trace = it->second.info()->InheritsNonVirtualTrace())
    332       return trace;
    333   }
    334   return 0;
    335 }
    336 
    337 bool RecordInfo::DeclaresGCMixinMethods() {
    338   DetermineTracingMethods();
    339   return has_gc_mixin_methods_;
    340 }
    341 
    342 bool RecordInfo::DeclaresLocalTraceMethod() {
    343   if (is_declaring_local_trace_ != kNotComputed)
    344     return is_declaring_local_trace_;
    345   DetermineTracingMethods();
    346   is_declaring_local_trace_ = trace_method_ ? kTrue : kFalse;
    347   if (is_declaring_local_trace_) {
    348     for (auto it = record_->method_begin();
    349          it != record_->method_end(); ++it) {
    350       if (*it == trace_method_) {
    351         is_declaring_local_trace_ = kTrue;
    352         break;
    353       }
    354     }
    355   }
    356   return is_declaring_local_trace_;
    357 }
    358 
    359 // A (non-virtual) class is considered abstract in Blink if it has
    360 // no public constructors and no create methods.
    361 bool RecordInfo::IsConsideredAbstract() {
    362   for (CXXRecordDecl::ctor_iterator it = record_->ctor_begin();
    363        it != record_->ctor_end();
    364        ++it) {
    365     if (!it->isCopyOrMoveConstructor() && it->getAccess() == AS_public)
    366       return false;
    367   }
    368   for (CXXRecordDecl::method_iterator it = record_->method_begin();
    369        it != record_->method_end();
    370        ++it) {
    371     if (it->getNameAsString() == kCreateName)
    372       return false;
    373   }
    374   return true;
    375 }
    376 
    377 RecordInfo::Bases* RecordInfo::CollectBases() {
    378   // Compute the collection locally to avoid inconsistent states.
    379   Bases* bases = new Bases;
    380   if (!record_->hasDefinition())
    381     return bases;
    382   for (CXXRecordDecl::base_class_iterator it = record_->bases_begin();
    383        it != record_->bases_end();
    384        ++it) {
    385     const CXXBaseSpecifier& spec = *it;
    386     RecordInfo* info = cache_->Lookup(spec.getType());
    387     if (!info)
    388       continue;
    389     CXXRecordDecl* base = info->record();
    390     TracingStatus status = info->InheritsTrace()
    391                                ? TracingStatus::Needed()
    392                                : TracingStatus::Unneeded();
    393     bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
    394   }
    395   return bases;
    396 }
    397 
    398 RecordInfo::Fields& RecordInfo::GetFields() {
    399   if (!fields_)
    400     fields_ = CollectFields();
    401   return *fields_;
    402 }
    403 
    404 RecordInfo::Fields* RecordInfo::CollectFields() {
    405   // Compute the collection locally to avoid inconsistent states.
    406   Fields* fields = new Fields;
    407   if (!record_->hasDefinition())
    408     return fields;
    409   TracingStatus fields_status = TracingStatus::Unneeded();
    410   for (RecordDecl::field_iterator it = record_->field_begin();
    411        it != record_->field_end();
    412        ++it) {
    413     FieldDecl* field = *it;
    414     // Ignore fields annotated with the GC_PLUGIN_IGNORE macro.
    415     if (Config::IsIgnoreAnnotated(field))
    416       continue;
    417     if (Edge* edge = CreateEdge(field->getType().getTypePtrOrNull())) {
    418       fields_status = fields_status.LUB(edge->NeedsTracing(Edge::kRecursive));
    419       fields->insert(std::make_pair(field, FieldPoint(field, edge)));
    420     }
    421   }
    422   fields_need_tracing_ = fields_status;
    423   return fields;
    424 }
    425 
    426 void RecordInfo::DetermineTracingMethods() {
    427   if (determined_trace_methods_)
    428     return;
    429   determined_trace_methods_ = true;
    430   if (Config::IsGCBase(name_))
    431     return;
    432   CXXMethodDecl* trace = nullptr;
    433   CXXMethodDecl* trace_impl = nullptr;
    434   CXXMethodDecl* trace_after_dispatch = nullptr;
    435   bool has_adjust_and_mark = false;
    436   bool has_is_heap_object_alive = false;
    437   for (Decl* decl : record_->decls()) {
    438     CXXMethodDecl* method = dyn_cast<CXXMethodDecl>(decl);
    439     if (!method) {
    440       if (FunctionTemplateDecl* func_template =
    441           dyn_cast<FunctionTemplateDecl>(decl))
    442         method = dyn_cast<CXXMethodDecl>(func_template->getTemplatedDecl());
    443     }
    444     if (!method)
    445       continue;
    446 
    447     switch (Config::GetTraceMethodType(method)) {
    448       case Config::TRACE_METHOD:
    449         trace = method;
    450         break;
    451       case Config::TRACE_AFTER_DISPATCH_METHOD:
    452         trace_after_dispatch = method;
    453         break;
    454       case Config::TRACE_IMPL_METHOD:
    455         trace_impl = method;
    456         break;
    457       case Config::TRACE_AFTER_DISPATCH_IMPL_METHOD:
    458         break;
    459       case Config::NOT_TRACE_METHOD:
    460         if (method->getNameAsString() == kFinalizeName) {
    461           finalize_dispatch_method_ = method;
    462         } else if (method->getNameAsString() == kAdjustAndMarkName) {
    463           has_adjust_and_mark = true;
    464         } else if (method->getNameAsString() == kIsHeapObjectAliveName) {
    465           has_is_heap_object_alive = true;
    466         }
    467         break;
    468     }
    469   }
    470 
    471   // Record if class defines the two GCMixin methods.
    472   has_gc_mixin_methods_ =
    473       has_adjust_and_mark && has_is_heap_object_alive ? kTrue : kFalse;
    474   if (trace_after_dispatch) {
    475     trace_method_ = trace_after_dispatch;
    476     trace_dispatch_method_ = trace_impl ? trace_impl : trace;
    477   } else {
    478     // TODO: Can we never have a dispatch method called trace without the same
    479     // class defining a traceAfterDispatch method?
    480     trace_method_ = trace;
    481     trace_dispatch_method_ = nullptr;
    482   }
    483   if (trace_dispatch_method_ && finalize_dispatch_method_)
    484     return;
    485   // If this class does not define dispatching methods inherit them.
    486   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    487     // TODO: Does it make sense to inherit multiple dispatch methods?
    488     if (CXXMethodDecl* dispatch = it->second.info()->GetTraceDispatchMethod()) {
    489       assert(!trace_dispatch_method_ && "Multiple trace dispatching methods");
    490       trace_dispatch_method_ = dispatch;
    491     }
    492     if (CXXMethodDecl* dispatch =
    493             it->second.info()->GetFinalizeDispatchMethod()) {
    494       assert(!finalize_dispatch_method_ &&
    495              "Multiple finalize dispatching methods");
    496       finalize_dispatch_method_ = dispatch;
    497     }
    498   }
    499 }
    500 
    501 // TODO: Add classes with a finalize() method that specialize FinalizerTrait.
    502 bool RecordInfo::NeedsFinalization() {
    503   if (does_need_finalization_ == kNotComputed) {
    504     // Rely on hasNonTrivialDestructor(), but if the only
    505     // identifiable reason for it being true is the presence
    506     // of a safely ignorable class as a direct base,
    507     // or we're processing such an 'ignorable' class, then it does
    508     // not need finalization.
    509     does_need_finalization_ =
    510         record_->hasNonTrivialDestructor() ? kTrue : kFalse;
    511     if (!does_need_finalization_)
    512       return does_need_finalization_;
    513 
    514     CXXDestructorDecl* dtor = record_->getDestructor();
    515     if (dtor && dtor->isUserProvided())
    516       return does_need_finalization_;
    517     for (Fields::iterator it = GetFields().begin();
    518          it != GetFields().end();
    519          ++it) {
    520       if (it->second.edge()->NeedsFinalization())
    521         return does_need_finalization_;
    522     }
    523 
    524     for (Bases::iterator it = GetBases().begin();
    525          it != GetBases().end();
    526          ++it) {
    527       if (it->second.info()->NeedsFinalization())
    528         return does_need_finalization_;
    529     }
    530     // Destructor was non-trivial due to bases with destructors that
    531     // can be safely ignored. Hence, no need for finalization.
    532     does_need_finalization_ = kFalse;
    533   }
    534   return does_need_finalization_;
    535 }
    536 
    537 // A class needs tracing if:
    538 // - it is allocated on the managed heap,
    539 // - it is derived from a class that needs tracing, or
    540 // - it contains fields that need tracing.
    541 //
    542 TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) {
    543   if (IsGCAllocated())
    544     return TracingStatus::Needed();
    545 
    546   if (IsStackAllocated())
    547     return TracingStatus::Unneeded();
    548 
    549   for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) {
    550     if (it->second.info()->NeedsTracing(option).IsNeeded())
    551       return TracingStatus::Needed();
    552   }
    553 
    554   if (option == Edge::kRecursive)
    555     GetFields();
    556 
    557   return fields_need_tracing_;
    558 }
    559 
    560 static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns)
    561 {
    562   while (ns) {
    563     if (sema.getStdNamespace()->InEnclosingNamespaceSetOf(ns))
    564       return true;
    565     ns = dyn_cast<NamespaceDecl>(ns->getParent());
    566   }
    567   return false;
    568 }
    569 
    570 Edge* RecordInfo::CreateEdge(const Type* type) {
    571   if (!type) {
    572     return 0;
    573   }
    574 
    575   if (type->isPointerType() || type->isReferenceType()) {
    576     if (Edge* ptr = CreateEdge(type->getPointeeType().getTypePtrOrNull()))
    577       return new RawPtr(ptr, type->isReferenceType());
    578     return 0;
    579   }
    580 
    581   RecordInfo* info = cache_->Lookup(type);
    582 
    583   // If the type is neither a pointer or a C++ record we ignore it.
    584   if (!info) {
    585     return 0;
    586   }
    587 
    588   TemplateArgs args;
    589 
    590   if (Config::IsRefPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    591     if (Edge* ptr = CreateEdge(args[0]))
    592       return new RefPtr(ptr);
    593     return 0;
    594   }
    595 
    596   if (Config::IsOwnPtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    597     if (Edge* ptr = CreateEdge(args[0]))
    598       return new OwnPtr(ptr);
    599     return 0;
    600   }
    601 
    602   if (Config::IsUniquePtr(info->name()) && info->GetTemplateArgs(1, &args)) {
    603     // Check that this is std::unique_ptr
    604     NamespaceDecl* ns =
    605         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    606     clang::Sema& sema = cache_->instance().getSema();
    607     if (!isInStdNamespace(sema, ns))
    608       return 0;
    609     if (Edge* ptr = CreateEdge(args[0]))
    610       return new UniquePtr(ptr);
    611     return 0;
    612   }
    613 
    614   if (Config::IsMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    615     if (Edge* ptr = CreateEdge(args[0]))
    616       return new Member(ptr);
    617     return 0;
    618   }
    619 
    620   if (Config::IsWeakMember(info->name()) && info->GetTemplateArgs(1, &args)) {
    621     if (Edge* ptr = CreateEdge(args[0]))
    622       return new WeakMember(ptr);
    623     return 0;
    624   }
    625 
    626   bool is_persistent = Config::IsPersistent(info->name());
    627   if (is_persistent || Config::IsCrossThreadPersistent(info->name())) {
    628     // Persistent might refer to v8::Persistent, so check the name space.
    629     // TODO: Consider using a more canonical identification than names.
    630     NamespaceDecl* ns =
    631         dyn_cast<NamespaceDecl>(info->record()->getDeclContext());
    632     if (!ns || ns->getName() != "blink")
    633       return 0;
    634     if (!info->GetTemplateArgs(1, &args))
    635       return 0;
    636     if (Edge* ptr = CreateEdge(args[0])) {
    637       if (is_persistent)
    638         return new Persistent(ptr);
    639       else
    640         return new CrossThreadPersistent(ptr);
    641     }
    642     return 0;
    643   }
    644 
    645   if (Config::IsGCCollection(info->name()) ||
    646       Config::IsWTFCollection(info->name())) {
    647     bool is_root = Config::IsPersistentGCCollection(info->name());
    648     bool on_heap = is_root || info->IsHeapAllocatedCollection();
    649     size_t count = Config::CollectionDimension(info->name());
    650     if (!info->GetTemplateArgs(count, &args))
    651       return 0;
    652     Collection* edge = new Collection(info, on_heap, is_root);
    653     for (TemplateArgs::iterator it = args.begin(); it != args.end(); ++it) {
    654       if (Edge* member = CreateEdge(*it)) {
    655         edge->members().push_back(member);
    656       }
    657       // TODO: Handle the case where we fail to create an edge (eg, if the
    658       // argument is a primitive type or just not fully known yet).
    659     }
    660     return edge;
    661   }
    662 
    663   return new Value(info);
    664 }
    665