Home | History | Annotate | Download | only in CodeGen
      1 //===--- CGDecl.cpp - Emit LLVM Code for declarations ---------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This contains code to emit Decl nodes as LLVM code.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #include "CodeGenFunction.h"
     15 #include "CGDebugInfo.h"
     16 #include "CGOpenCLRuntime.h"
     17 #include "CodeGenModule.h"
     18 #include "clang/AST/ASTContext.h"
     19 #include "clang/AST/CharUnits.h"
     20 #include "clang/AST/Decl.h"
     21 #include "clang/AST/DeclObjC.h"
     22 #include "clang/Basic/SourceManager.h"
     23 #include "clang/Basic/TargetInfo.h"
     24 #include "clang/Frontend/CodeGenOptions.h"
     25 #include "llvm/IR/DataLayout.h"
     26 #include "llvm/IR/GlobalVariable.h"
     27 #include "llvm/IR/Intrinsics.h"
     28 #include "llvm/IR/Type.h"
     29 using namespace clang;
     30 using namespace CodeGen;
     31 
     32 
     33 void CodeGenFunction::EmitDecl(const Decl &D) {
     34   switch (D.getKind()) {
     35   case Decl::TranslationUnit:
     36   case Decl::Namespace:
     37   case Decl::UnresolvedUsingTypename:
     38   case Decl::ClassTemplateSpecialization:
     39   case Decl::ClassTemplatePartialSpecialization:
     40   case Decl::TemplateTypeParm:
     41   case Decl::UnresolvedUsingValue:
     42   case Decl::NonTypeTemplateParm:
     43   case Decl::CXXMethod:
     44   case Decl::CXXConstructor:
     45   case Decl::CXXDestructor:
     46   case Decl::CXXConversion:
     47   case Decl::Field:
     48   case Decl::IndirectField:
     49   case Decl::ObjCIvar:
     50   case Decl::ObjCAtDefsField:
     51   case Decl::ParmVar:
     52   case Decl::ImplicitParam:
     53   case Decl::ClassTemplate:
     54   case Decl::FunctionTemplate:
     55   case Decl::TypeAliasTemplate:
     56   case Decl::TemplateTemplateParm:
     57   case Decl::ObjCMethod:
     58   case Decl::ObjCCategory:
     59   case Decl::ObjCProtocol:
     60   case Decl::ObjCInterface:
     61   case Decl::ObjCCategoryImpl:
     62   case Decl::ObjCImplementation:
     63   case Decl::ObjCProperty:
     64   case Decl::ObjCCompatibleAlias:
     65   case Decl::AccessSpec:
     66   case Decl::LinkageSpec:
     67   case Decl::ObjCPropertyImpl:
     68   case Decl::FileScopeAsm:
     69   case Decl::Friend:
     70   case Decl::FriendTemplate:
     71   case Decl::Block:
     72   case Decl::ClassScopeFunctionSpecialization:
     73     llvm_unreachable("Declaration should not be in declstmts!");
     74   case Decl::Function:  // void X();
     75   case Decl::Record:    // struct/union/class X;
     76   case Decl::Enum:      // enum X;
     77   case Decl::EnumConstant: // enum ? { X = ? }
     78   case Decl::CXXRecord: // struct/union/class X; [C++]
     79   case Decl::Using:          // using X; [C++]
     80   case Decl::UsingShadow:
     81   case Decl::UsingDirective: // using namespace X; [C++]
     82   case Decl::NamespaceAlias:
     83   case Decl::StaticAssert: // static_assert(X, ""); [C++0x]
     84   case Decl::Label:        // __label__ x;
     85   case Decl::Import:
     86   case Decl::Empty:
     87     // None of these decls require codegen support.
     88     return;
     89 
     90   case Decl::Var: {
     91     const VarDecl &VD = cast<VarDecl>(D);
     92     assert(VD.isLocalVarDecl() &&
     93            "Should not see file-scope variables inside a function!");
     94     return EmitVarDecl(VD);
     95   }
     96 
     97   case Decl::Typedef:      // typedef int X;
     98   case Decl::TypeAlias: {  // using X = int; [C++0x]
     99     const TypedefNameDecl &TD = cast<TypedefNameDecl>(D);
    100     QualType Ty = TD.getUnderlyingType();
    101 
    102     if (Ty->isVariablyModifiedType())
    103       EmitVariablyModifiedType(Ty);
    104   }
    105   }
    106 }
    107 
    108 /// EmitVarDecl - This method handles emission of any variable declaration
    109 /// inside a function, including static vars etc.
    110 void CodeGenFunction::EmitVarDecl(const VarDecl &D) {
    111   switch (D.getStorageClassAsWritten()) {
    112   case SC_None:
    113   case SC_Auto:
    114   case SC_Register:
    115     return EmitAutoVarDecl(D);
    116   case SC_Static: {
    117     llvm::GlobalValue::LinkageTypes Linkage =
    118       llvm::GlobalValue::InternalLinkage;
    119 
    120     // If the function definition has some sort of weak linkage, its
    121     // static variables should also be weak so that they get properly
    122     // uniqued.  We can't do this in C, though, because there's no
    123     // standard way to agree on which variables are the same (i.e.
    124     // there's no mangling).
    125     if (getLangOpts().CPlusPlus)
    126       if (llvm::GlobalValue::isWeakForLinker(CurFn->getLinkage()))
    127         Linkage = CurFn->getLinkage();
    128 
    129     return EmitStaticVarDecl(D, Linkage);
    130   }
    131   case SC_Extern:
    132   case SC_PrivateExtern:
    133     // Don't emit it now, allow it to be emitted lazily on its first use.
    134     return;
    135   case SC_OpenCLWorkGroupLocal:
    136     return CGM.getOpenCLRuntime().EmitWorkGroupLocalVarDecl(*this, D);
    137   }
    138 
    139   llvm_unreachable("Unknown storage class");
    140 }
    141 
    142 static std::string GetStaticDeclName(CodeGenFunction &CGF, const VarDecl &D,
    143                                      const char *Separator) {
    144   CodeGenModule &CGM = CGF.CGM;
    145   if (CGF.getLangOpts().CPlusPlus) {
    146     StringRef Name = CGM.getMangledName(&D);
    147     return Name.str();
    148   }
    149 
    150   std::string ContextName;
    151   if (!CGF.CurFuncDecl) {
    152     // Better be in a block declared in global scope.
    153     const NamedDecl *ND = cast<NamedDecl>(&D);
    154     const DeclContext *DC = ND->getDeclContext();
    155     if (const BlockDecl *BD = dyn_cast<BlockDecl>(DC)) {
    156       MangleBuffer Name;
    157       CGM.getBlockMangledName(GlobalDecl(), Name, BD);
    158       ContextName = Name.getString();
    159     }
    160     else
    161       llvm_unreachable("Unknown context for block static var decl");
    162   } else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CGF.CurFuncDecl)) {
    163     StringRef Name = CGM.getMangledName(FD);
    164     ContextName = Name.str();
    165   } else if (isa<ObjCMethodDecl>(CGF.CurFuncDecl))
    166     ContextName = CGF.CurFn->getName();
    167   else
    168     llvm_unreachable("Unknown context for static var decl");
    169 
    170   return ContextName + Separator + D.getNameAsString();
    171 }
    172 
    173 llvm::GlobalVariable *
    174 CodeGenFunction::CreateStaticVarDecl(const VarDecl &D,
    175                                      const char *Separator,
    176                                      llvm::GlobalValue::LinkageTypes Linkage) {
    177   QualType Ty = D.getType();
    178   assert(Ty->isConstantSizeType() && "VLAs can't be static");
    179 
    180   // Use the label if the variable is renamed with the asm-label extension.
    181   std::string Name;
    182   if (D.hasAttr<AsmLabelAttr>())
    183     Name = CGM.getMangledName(&D);
    184   else
    185     Name = GetStaticDeclName(*this, D, Separator);
    186 
    187   llvm::Type *LTy = CGM.getTypes().ConvertTypeForMem(Ty);
    188   unsigned AddrSpace =
    189    CGM.GetGlobalVarAddressSpace(&D, CGM.getContext().getTargetAddressSpace(Ty));
    190   llvm::GlobalVariable *GV =
    191     new llvm::GlobalVariable(CGM.getModule(), LTy,
    192                              Ty.isConstant(getContext()), Linkage,
    193                              CGM.EmitNullConstant(D.getType()), Name, 0,
    194                              llvm::GlobalVariable::NotThreadLocal,
    195                              AddrSpace);
    196   GV->setAlignment(getContext().getDeclAlign(&D).getQuantity());
    197   if (Linkage != llvm::GlobalValue::InternalLinkage)
    198     GV->setVisibility(CurFn->getVisibility());
    199 
    200   if (D.isThreadSpecified())
    201     CGM.setTLSMode(GV, D);
    202 
    203   return GV;
    204 }
    205 
    206 /// hasNontrivialDestruction - Determine whether a type's destruction is
    207 /// non-trivial. If so, and the variable uses static initialization, we must
    208 /// register its destructor to run on exit.
    209 static bool hasNontrivialDestruction(QualType T) {
    210   CXXRecordDecl *RD = T->getBaseElementTypeUnsafe()->getAsCXXRecordDecl();
    211   return RD && !RD->hasTrivialDestructor();
    212 }
    213 
    214 /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the
    215 /// global variable that has already been created for it.  If the initializer
    216 /// has a different type than GV does, this may free GV and return a different
    217 /// one.  Otherwise it just returns GV.
    218 llvm::GlobalVariable *
    219 CodeGenFunction::AddInitializerToStaticVarDecl(const VarDecl &D,
    220                                                llvm::GlobalVariable *GV) {
    221   llvm::Constant *Init = CGM.EmitConstantInit(D, this);
    222 
    223   // If constant emission failed, then this should be a C++ static
    224   // initializer.
    225   if (!Init) {
    226     if (!getLangOpts().CPlusPlus)
    227       CGM.ErrorUnsupported(D.getInit(), "constant l-value expression");
    228     else if (Builder.GetInsertBlock()) {
    229       // Since we have a static initializer, this global variable can't
    230       // be constant.
    231       GV->setConstant(false);
    232 
    233       EmitCXXGuardedInit(D, GV, /*PerformInit*/true);
    234     }
    235     return GV;
    236   }
    237 
    238   // The initializer may differ in type from the global. Rewrite
    239   // the global to match the initializer.  (We have to do this
    240   // because some types, like unions, can't be completely represented
    241   // in the LLVM type system.)
    242   if (GV->getType()->getElementType() != Init->getType()) {
    243     llvm::GlobalVariable *OldGV = GV;
    244 
    245     GV = new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
    246                                   OldGV->isConstant(),
    247                                   OldGV->getLinkage(), Init, "",
    248                                   /*InsertBefore*/ OldGV,
    249                                   OldGV->getThreadLocalMode(),
    250                            CGM.getContext().getTargetAddressSpace(D.getType()));
    251     GV->setVisibility(OldGV->getVisibility());
    252 
    253     // Steal the name of the old global
    254     GV->takeName(OldGV);
    255 
    256     // Replace all uses of the old global with the new global
    257     llvm::Constant *NewPtrForOldDecl =
    258     llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
    259     OldGV->replaceAllUsesWith(NewPtrForOldDecl);
    260 
    261     // Erase the old global, since it is no longer used.
    262     OldGV->eraseFromParent();
    263   }
    264 
    265   GV->setConstant(CGM.isTypeConstant(D.getType(), true));
    266   GV->setInitializer(Init);
    267 
    268   if (hasNontrivialDestruction(D.getType())) {
    269     // We have a constant initializer, but a nontrivial destructor. We still
    270     // need to perform a guarded "initialization" in order to register the
    271     // destructor.
    272     EmitCXXGuardedInit(D, GV, /*PerformInit*/false);
    273   }
    274 
    275   return GV;
    276 }
    277 
    278 void CodeGenFunction::EmitStaticVarDecl(const VarDecl &D,
    279                                       llvm::GlobalValue::LinkageTypes Linkage) {
    280   llvm::Value *&DMEntry = LocalDeclMap[&D];
    281   assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
    282 
    283   // Check to see if we already have a global variable for this
    284   // declaration.  This can happen when double-emitting function
    285   // bodies, e.g. with complete and base constructors.
    286   llvm::Constant *addr =
    287     CGM.getStaticLocalDeclAddress(&D);
    288 
    289   llvm::GlobalVariable *var;
    290   if (addr) {
    291     var = cast<llvm::GlobalVariable>(addr->stripPointerCasts());
    292   } else {
    293     addr = var = CreateStaticVarDecl(D, ".", Linkage);
    294   }
    295 
    296   // Store into LocalDeclMap before generating initializer to handle
    297   // circular references.
    298   DMEntry = addr;
    299   CGM.setStaticLocalDeclAddress(&D, addr);
    300 
    301   // We can't have a VLA here, but we can have a pointer to a VLA,
    302   // even though that doesn't really make any sense.
    303   // Make sure to evaluate VLA bounds now so that we have them for later.
    304   if (D.getType()->isVariablyModifiedType())
    305     EmitVariablyModifiedType(D.getType());
    306 
    307   // Save the type in case adding the initializer forces a type change.
    308   llvm::Type *expectedType = addr->getType();
    309 
    310   // If this value has an initializer, emit it.
    311   if (D.getInit())
    312     var = AddInitializerToStaticVarDecl(D, var);
    313 
    314   var->setAlignment(getContext().getDeclAlign(&D).getQuantity());
    315 
    316   if (D.hasAttr<AnnotateAttr>())
    317     CGM.AddGlobalAnnotations(&D, var);
    318 
    319   if (const SectionAttr *SA = D.getAttr<SectionAttr>())
    320     var->setSection(SA->getName());
    321 
    322   if (D.hasAttr<UsedAttr>())
    323     CGM.AddUsedGlobal(var);
    324 
    325   // We may have to cast the constant because of the initializer
    326   // mismatch above.
    327   //
    328   // FIXME: It is really dangerous to store this in the map; if anyone
    329   // RAUW's the GV uses of this constant will be invalid.
    330   llvm::Constant *castedAddr = llvm::ConstantExpr::getBitCast(var, expectedType);
    331   DMEntry = castedAddr;
    332   CGM.setStaticLocalDeclAddress(&D, castedAddr);
    333 
    334   // Emit global variable debug descriptor for static vars.
    335   CGDebugInfo *DI = getDebugInfo();
    336   if (DI &&
    337       CGM.getCodeGenOpts().getDebugInfo() >= CodeGenOptions::LimitedDebugInfo) {
    338     DI->setLocation(D.getLocation());
    339     DI->EmitGlobalVariable(var, &D);
    340   }
    341 }
    342 
    343 namespace {
    344   struct DestroyObject : EHScopeStack::Cleanup {
    345     DestroyObject(llvm::Value *addr, QualType type,
    346                   CodeGenFunction::Destroyer *destroyer,
    347                   bool useEHCleanupForArray)
    348       : addr(addr), type(type), destroyer(destroyer),
    349         useEHCleanupForArray(useEHCleanupForArray) {}
    350 
    351     llvm::Value *addr;
    352     QualType type;
    353     CodeGenFunction::Destroyer *destroyer;
    354     bool useEHCleanupForArray;
    355 
    356     void Emit(CodeGenFunction &CGF, Flags flags) {
    357       // Don't use an EH cleanup recursively from an EH cleanup.
    358       bool useEHCleanupForArray =
    359         flags.isForNormalCleanup() && this->useEHCleanupForArray;
    360 
    361       CGF.emitDestroy(addr, type, destroyer, useEHCleanupForArray);
    362     }
    363   };
    364 
    365   struct DestroyNRVOVariable : EHScopeStack::Cleanup {
    366     DestroyNRVOVariable(llvm::Value *addr,
    367                         const CXXDestructorDecl *Dtor,
    368                         llvm::Value *NRVOFlag)
    369       : Dtor(Dtor), NRVOFlag(NRVOFlag), Loc(addr) {}
    370 
    371     const CXXDestructorDecl *Dtor;
    372     llvm::Value *NRVOFlag;
    373     llvm::Value *Loc;
    374 
    375     void Emit(CodeGenFunction &CGF, Flags flags) {
    376       // Along the exceptions path we always execute the dtor.
    377       bool NRVO = flags.isForNormalCleanup() && NRVOFlag;
    378 
    379       llvm::BasicBlock *SkipDtorBB = 0;
    380       if (NRVO) {
    381         // If we exited via NRVO, we skip the destructor call.
    382         llvm::BasicBlock *RunDtorBB = CGF.createBasicBlock("nrvo.unused");
    383         SkipDtorBB = CGF.createBasicBlock("nrvo.skipdtor");
    384         llvm::Value *DidNRVO = CGF.Builder.CreateLoad(NRVOFlag, "nrvo.val");
    385         CGF.Builder.CreateCondBr(DidNRVO, SkipDtorBB, RunDtorBB);
    386         CGF.EmitBlock(RunDtorBB);
    387       }
    388 
    389       CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
    390                                 /*ForVirtualBase=*/false,
    391                                 /*Delegating=*/false,
    392                                 Loc);
    393 
    394       if (NRVO) CGF.EmitBlock(SkipDtorBB);
    395     }
    396   };
    397 
    398   struct CallStackRestore : EHScopeStack::Cleanup {
    399     llvm::Value *Stack;
    400     CallStackRestore(llvm::Value *Stack) : Stack(Stack) {}
    401     void Emit(CodeGenFunction &CGF, Flags flags) {
    402       llvm::Value *V = CGF.Builder.CreateLoad(Stack);
    403       llvm::Value *F = CGF.CGM.getIntrinsic(llvm::Intrinsic::stackrestore);
    404       CGF.Builder.CreateCall(F, V);
    405     }
    406   };
    407 
    408   struct ExtendGCLifetime : EHScopeStack::Cleanup {
    409     const VarDecl &Var;
    410     ExtendGCLifetime(const VarDecl *var) : Var(*var) {}
    411 
    412     void Emit(CodeGenFunction &CGF, Flags flags) {
    413       // Compute the address of the local variable, in case it's a
    414       // byref or something.
    415       DeclRefExpr DRE(const_cast<VarDecl*>(&Var), false,
    416                       Var.getType(), VK_LValue, SourceLocation());
    417       llvm::Value *value = CGF.EmitLoadOfScalar(CGF.EmitDeclRefLValue(&DRE));
    418       CGF.EmitExtendGCLifetime(value);
    419     }
    420   };
    421 
    422   struct CallCleanupFunction : EHScopeStack::Cleanup {
    423     llvm::Constant *CleanupFn;
    424     const CGFunctionInfo &FnInfo;
    425     const VarDecl &Var;
    426 
    427     CallCleanupFunction(llvm::Constant *CleanupFn, const CGFunctionInfo *Info,
    428                         const VarDecl *Var)
    429       : CleanupFn(CleanupFn), FnInfo(*Info), Var(*Var) {}
    430 
    431     void Emit(CodeGenFunction &CGF, Flags flags) {
    432       DeclRefExpr DRE(const_cast<VarDecl*>(&Var), false,
    433                       Var.getType(), VK_LValue, SourceLocation());
    434       // Compute the address of the local variable, in case it's a byref
    435       // or something.
    436       llvm::Value *Addr = CGF.EmitDeclRefLValue(&DRE).getAddress();
    437 
    438       // In some cases, the type of the function argument will be different from
    439       // the type of the pointer. An example of this is
    440       // void f(void* arg);
    441       // __attribute__((cleanup(f))) void *g;
    442       //
    443       // To fix this we insert a bitcast here.
    444       QualType ArgTy = FnInfo.arg_begin()->type;
    445       llvm::Value *Arg =
    446         CGF.Builder.CreateBitCast(Addr, CGF.ConvertType(ArgTy));
    447 
    448       CallArgList Args;
    449       Args.add(RValue::get(Arg),
    450                CGF.getContext().getPointerType(Var.getType()));
    451       CGF.EmitCall(FnInfo, CleanupFn, ReturnValueSlot(), Args);
    452     }
    453   };
    454 }
    455 
    456 /// EmitAutoVarWithLifetime - Does the setup required for an automatic
    457 /// variable with lifetime.
    458 static void EmitAutoVarWithLifetime(CodeGenFunction &CGF, const VarDecl &var,
    459                                     llvm::Value *addr,
    460                                     Qualifiers::ObjCLifetime lifetime) {
    461   switch (lifetime) {
    462   case Qualifiers::OCL_None:
    463     llvm_unreachable("present but none");
    464 
    465   case Qualifiers::OCL_ExplicitNone:
    466     // nothing to do
    467     break;
    468 
    469   case Qualifiers::OCL_Strong: {
    470     CodeGenFunction::Destroyer *destroyer =
    471       (var.hasAttr<ObjCPreciseLifetimeAttr>()
    472        ? CodeGenFunction::destroyARCStrongPrecise
    473        : CodeGenFunction::destroyARCStrongImprecise);
    474 
    475     CleanupKind cleanupKind = CGF.getARCCleanupKind();
    476     CGF.pushDestroy(cleanupKind, addr, var.getType(), destroyer,
    477                     cleanupKind & EHCleanup);
    478     break;
    479   }
    480   case Qualifiers::OCL_Autoreleasing:
    481     // nothing to do
    482     break;
    483 
    484   case Qualifiers::OCL_Weak:
    485     // __weak objects always get EH cleanups; otherwise, exceptions
    486     // could cause really nasty crashes instead of mere leaks.
    487     CGF.pushDestroy(NormalAndEHCleanup, addr, var.getType(),
    488                     CodeGenFunction::destroyARCWeak,
    489                     /*useEHCleanup*/ true);
    490     break;
    491   }
    492 }
    493 
    494 static bool isAccessedBy(const VarDecl &var, const Stmt *s) {
    495   if (const Expr *e = dyn_cast<Expr>(s)) {
    496     // Skip the most common kinds of expressions that make
    497     // hierarchy-walking expensive.
    498     s = e = e->IgnoreParenCasts();
    499 
    500     if (const DeclRefExpr *ref = dyn_cast<DeclRefExpr>(e))
    501       return (ref->getDecl() == &var);
    502     if (const BlockExpr *be = dyn_cast<BlockExpr>(e)) {
    503       const BlockDecl *block = be->getBlockDecl();
    504       for (BlockDecl::capture_const_iterator i = block->capture_begin(),
    505            e = block->capture_end(); i != e; ++i) {
    506         if (i->getVariable() == &var)
    507           return true;
    508       }
    509     }
    510   }
    511 
    512   for (Stmt::const_child_range children = s->children(); children; ++children)
    513     // children might be null; as in missing decl or conditional of an if-stmt.
    514     if ((*children) && isAccessedBy(var, *children))
    515       return true;
    516 
    517   return false;
    518 }
    519 
    520 static bool isAccessedBy(const ValueDecl *decl, const Expr *e) {
    521   if (!decl) return false;
    522   if (!isa<VarDecl>(decl)) return false;
    523   const VarDecl *var = cast<VarDecl>(decl);
    524   return isAccessedBy(*var, e);
    525 }
    526 
    527 static void drillIntoBlockVariable(CodeGenFunction &CGF,
    528                                    LValue &lvalue,
    529                                    const VarDecl *var) {
    530   lvalue.setAddress(CGF.BuildBlockByrefAddress(lvalue.getAddress(), var));
    531 }
    532 
    533 void CodeGenFunction::EmitScalarInit(const Expr *init,
    534                                      const ValueDecl *D,
    535                                      LValue lvalue,
    536                                      bool capturedByInit) {
    537   Qualifiers::ObjCLifetime lifetime = lvalue.getObjCLifetime();
    538   if (!lifetime) {
    539     llvm::Value *value = EmitScalarExpr(init);
    540     if (capturedByInit)
    541       drillIntoBlockVariable(*this, lvalue, cast<VarDecl>(D));
    542     EmitStoreThroughLValue(RValue::get(value), lvalue, true);
    543     return;
    544   }
    545 
    546   // If we're emitting a value with lifetime, we have to do the
    547   // initialization *before* we leave the cleanup scopes.
    548   if (const ExprWithCleanups *ewc = dyn_cast<ExprWithCleanups>(init)) {
    549     enterFullExpression(ewc);
    550     init = ewc->getSubExpr();
    551   }
    552   CodeGenFunction::RunCleanupsScope Scope(*this);
    553 
    554   // We have to maintain the illusion that the variable is
    555   // zero-initialized.  If the variable might be accessed in its
    556   // initializer, zero-initialize before running the initializer, then
    557   // actually perform the initialization with an assign.
    558   bool accessedByInit = false;
    559   if (lifetime != Qualifiers::OCL_ExplicitNone)
    560     accessedByInit = (capturedByInit || isAccessedBy(D, init));
    561   if (accessedByInit) {
    562     LValue tempLV = lvalue;
    563     // Drill down to the __block object if necessary.
    564     if (capturedByInit) {
    565       // We can use a simple GEP for this because it can't have been
    566       // moved yet.
    567       tempLV.setAddress(Builder.CreateStructGEP(tempLV.getAddress(),
    568                                    getByRefValueLLVMField(cast<VarDecl>(D))));
    569     }
    570 
    571     llvm::PointerType *ty
    572       = cast<llvm::PointerType>(tempLV.getAddress()->getType());
    573     ty = cast<llvm::PointerType>(ty->getElementType());
    574 
    575     llvm::Value *zero = llvm::ConstantPointerNull::get(ty);
    576 
    577     // If __weak, we want to use a barrier under certain conditions.
    578     if (lifetime == Qualifiers::OCL_Weak)
    579       EmitARCInitWeak(tempLV.getAddress(), zero);
    580 
    581     // Otherwise just do a simple store.
    582     else
    583       EmitStoreOfScalar(zero, tempLV, /* isInitialization */ true);
    584   }
    585 
    586   // Emit the initializer.
    587   llvm::Value *value = 0;
    588 
    589   switch (lifetime) {
    590   case Qualifiers::OCL_None:
    591     llvm_unreachable("present but none");
    592 
    593   case Qualifiers::OCL_ExplicitNone:
    594     // nothing to do
    595     value = EmitScalarExpr(init);
    596     break;
    597 
    598   case Qualifiers::OCL_Strong: {
    599     value = EmitARCRetainScalarExpr(init);
    600     break;
    601   }
    602 
    603   case Qualifiers::OCL_Weak: {
    604     // No way to optimize a producing initializer into this.  It's not
    605     // worth optimizing for, because the value will immediately
    606     // disappear in the common case.
    607     value = EmitScalarExpr(init);
    608 
    609     if (capturedByInit) drillIntoBlockVariable(*this, lvalue, cast<VarDecl>(D));
    610     if (accessedByInit)
    611       EmitARCStoreWeak(lvalue.getAddress(), value, /*ignored*/ true);
    612     else
    613       EmitARCInitWeak(lvalue.getAddress(), value);
    614     return;
    615   }
    616 
    617   case Qualifiers::OCL_Autoreleasing:
    618     value = EmitARCRetainAutoreleaseScalarExpr(init);
    619     break;
    620   }
    621 
    622   if (capturedByInit) drillIntoBlockVariable(*this, lvalue, cast<VarDecl>(D));
    623 
    624   // If the variable might have been accessed by its initializer, we
    625   // might have to initialize with a barrier.  We have to do this for
    626   // both __weak and __strong, but __weak got filtered out above.
    627   if (accessedByInit && lifetime == Qualifiers::OCL_Strong) {
    628     llvm::Value *oldValue = EmitLoadOfScalar(lvalue);
    629     EmitStoreOfScalar(value, lvalue, /* isInitialization */ true);
    630     EmitARCRelease(oldValue, ARCImpreciseLifetime);
    631     return;
    632   }
    633 
    634   EmitStoreOfScalar(value, lvalue, /* isInitialization */ true);
    635 }
    636 
    637 /// EmitScalarInit - Initialize the given lvalue with the given object.
    638 void CodeGenFunction::EmitScalarInit(llvm::Value *init, LValue lvalue) {
    639   Qualifiers::ObjCLifetime lifetime = lvalue.getObjCLifetime();
    640   if (!lifetime)
    641     return EmitStoreThroughLValue(RValue::get(init), lvalue, true);
    642 
    643   switch (lifetime) {
    644   case Qualifiers::OCL_None:
    645     llvm_unreachable("present but none");
    646 
    647   case Qualifiers::OCL_ExplicitNone:
    648     // nothing to do
    649     break;
    650 
    651   case Qualifiers::OCL_Strong:
    652     init = EmitARCRetain(lvalue.getType(), init);
    653     break;
    654 
    655   case Qualifiers::OCL_Weak:
    656     // Initialize and then skip the primitive store.
    657     EmitARCInitWeak(lvalue.getAddress(), init);
    658     return;
    659 
    660   case Qualifiers::OCL_Autoreleasing:
    661     init = EmitARCRetainAutorelease(lvalue.getType(), init);
    662     break;
    663   }
    664 
    665   EmitStoreOfScalar(init, lvalue, /* isInitialization */ true);
    666 }
    667 
    668 /// canEmitInitWithFewStoresAfterMemset - Decide whether we can emit the
    669 /// non-zero parts of the specified initializer with equal or fewer than
    670 /// NumStores scalar stores.
    671 static bool canEmitInitWithFewStoresAfterMemset(llvm::Constant *Init,
    672                                                 unsigned &NumStores) {
    673   // Zero and Undef never requires any extra stores.
    674   if (isa<llvm::ConstantAggregateZero>(Init) ||
    675       isa<llvm::ConstantPointerNull>(Init) ||
    676       isa<llvm::UndefValue>(Init))
    677     return true;
    678   if (isa<llvm::ConstantInt>(Init) || isa<llvm::ConstantFP>(Init) ||
    679       isa<llvm::ConstantVector>(Init) || isa<llvm::BlockAddress>(Init) ||
    680       isa<llvm::ConstantExpr>(Init))
    681     return Init->isNullValue() || NumStores--;
    682 
    683   // See if we can emit each element.
    684   if (isa<llvm::ConstantArray>(Init) || isa<llvm::ConstantStruct>(Init)) {
    685     for (unsigned i = 0, e = Init->getNumOperands(); i != e; ++i) {
    686       llvm::Constant *Elt = cast<llvm::Constant>(Init->getOperand(i));
    687       if (!canEmitInitWithFewStoresAfterMemset(Elt, NumStores))
    688         return false;
    689     }
    690     return true;
    691   }
    692 
    693   if (llvm::ConstantDataSequential *CDS =
    694         dyn_cast<llvm::ConstantDataSequential>(Init)) {
    695     for (unsigned i = 0, e = CDS->getNumElements(); i != e; ++i) {
    696       llvm::Constant *Elt = CDS->getElementAsConstant(i);
    697       if (!canEmitInitWithFewStoresAfterMemset(Elt, NumStores))
    698         return false;
    699     }
    700     return true;
    701   }
    702 
    703   // Anything else is hard and scary.
    704   return false;
    705 }
    706 
    707 /// emitStoresForInitAfterMemset - For inits that
    708 /// canEmitInitWithFewStoresAfterMemset returned true for, emit the scalar
    709 /// stores that would be required.
    710 static void emitStoresForInitAfterMemset(llvm::Constant *Init, llvm::Value *Loc,
    711                                          bool isVolatile, CGBuilderTy &Builder) {
    712   assert(!Init->isNullValue() && !isa<llvm::UndefValue>(Init) &&
    713          "called emitStoresForInitAfterMemset for zero or undef value.");
    714 
    715   if (isa<llvm::ConstantInt>(Init) || isa<llvm::ConstantFP>(Init) ||
    716       isa<llvm::ConstantVector>(Init) || isa<llvm::BlockAddress>(Init) ||
    717       isa<llvm::ConstantExpr>(Init)) {
    718     Builder.CreateStore(Init, Loc, isVolatile);
    719     return;
    720   }
    721 
    722   if (llvm::ConstantDataSequential *CDS =
    723         dyn_cast<llvm::ConstantDataSequential>(Init)) {
    724     for (unsigned i = 0, e = CDS->getNumElements(); i != e; ++i) {
    725       llvm::Constant *Elt = CDS->getElementAsConstant(i);
    726 
    727       // If necessary, get a pointer to the element and emit it.
    728       if (!Elt->isNullValue() && !isa<llvm::UndefValue>(Elt))
    729         emitStoresForInitAfterMemset(Elt, Builder.CreateConstGEP2_32(Loc, 0, i),
    730                                      isVolatile, Builder);
    731     }
    732     return;
    733   }
    734 
    735   assert((isa<llvm::ConstantStruct>(Init) || isa<llvm::ConstantArray>(Init)) &&
    736          "Unknown value type!");
    737 
    738   for (unsigned i = 0, e = Init->getNumOperands(); i != e; ++i) {
    739     llvm::Constant *Elt = cast<llvm::Constant>(Init->getOperand(i));
    740 
    741     // If necessary, get a pointer to the element and emit it.
    742     if (!Elt->isNullValue() && !isa<llvm::UndefValue>(Elt))
    743       emitStoresForInitAfterMemset(Elt, Builder.CreateConstGEP2_32(Loc, 0, i),
    744                                    isVolatile, Builder);
    745   }
    746 }
    747 
    748 
    749 /// shouldUseMemSetPlusStoresToInitialize - Decide whether we should use memset
    750 /// plus some stores to initialize a local variable instead of using a memcpy
    751 /// from a constant global.  It is beneficial to use memset if the global is all
    752 /// zeros, or mostly zeros and large.
    753 static bool shouldUseMemSetPlusStoresToInitialize(llvm::Constant *Init,
    754                                                   uint64_t GlobalSize) {
    755   // If a global is all zeros, always use a memset.
    756   if (isa<llvm::ConstantAggregateZero>(Init)) return true;
    757 
    758 
    759   // If a non-zero global is <= 32 bytes, always use a memcpy.  If it is large,
    760   // do it if it will require 6 or fewer scalar stores.
    761   // TODO: Should budget depends on the size?  Avoiding a large global warrants
    762   // plopping in more stores.
    763   unsigned StoreBudget = 6;
    764   uint64_t SizeLimit = 32;
    765 
    766   return GlobalSize > SizeLimit &&
    767          canEmitInitWithFewStoresAfterMemset(Init, StoreBudget);
    768 }
    769 
    770 
    771 /// EmitAutoVarDecl - Emit code and set up an entry in LocalDeclMap for a
    772 /// variable declaration with auto, register, or no storage class specifier.
    773 /// These turn into simple stack objects, or GlobalValues depending on target.
    774 void CodeGenFunction::EmitAutoVarDecl(const VarDecl &D) {
    775   AutoVarEmission emission = EmitAutoVarAlloca(D);
    776   EmitAutoVarInit(emission);
    777   EmitAutoVarCleanups(emission);
    778 }
    779 
    780 /// EmitAutoVarAlloca - Emit the alloca and debug information for a
    781 /// local variable.  Does not emit initalization or destruction.
    782 CodeGenFunction::AutoVarEmission
    783 CodeGenFunction::EmitAutoVarAlloca(const VarDecl &D) {
    784   QualType Ty = D.getType();
    785 
    786   AutoVarEmission emission(D);
    787 
    788   bool isByRef = D.hasAttr<BlocksAttr>();
    789   emission.IsByRef = isByRef;
    790 
    791   CharUnits alignment = getContext().getDeclAlign(&D);
    792   emission.Alignment = alignment;
    793 
    794   // If the type is variably-modified, emit all the VLA sizes for it.
    795   if (Ty->isVariablyModifiedType())
    796     EmitVariablyModifiedType(Ty);
    797 
    798   llvm::Value *DeclPtr;
    799   if (Ty->isConstantSizeType()) {
    800     if (!Target.useGlobalsForAutomaticVariables()) {
    801       bool NRVO = getLangOpts().ElideConstructors &&
    802                   D.isNRVOVariable();
    803 
    804       // If this value is a POD array or struct with a statically
    805       // determinable constant initializer, there are optimizations we can do.
    806       //
    807       // TODO: We should constant-evaluate the initializer of any variable,
    808       // as long as it is initialized by a constant expression. Currently,
    809       // isConstantInitializer produces wrong answers for structs with
    810       // reference or bitfield members, and a few other cases, and checking
    811       // for POD-ness protects us from some of these.
    812       if (D.getInit() &&
    813           (Ty->isArrayType() || Ty->isRecordType()) &&
    814           (Ty.isPODType(getContext()) ||
    815            getContext().getBaseElementType(Ty)->isObjCObjectPointerType()) &&
    816           D.getInit()->isConstantInitializer(getContext(), false)) {
    817 
    818         // If the variable's a const type, and it's neither an NRVO
    819         // candidate nor a __block variable and has no mutable members,
    820         // emit it as a global instead.
    821         if (CGM.getCodeGenOpts().MergeAllConstants && !NRVO && !isByRef &&
    822             CGM.isTypeConstant(Ty, true)) {
    823           EmitStaticVarDecl(D, llvm::GlobalValue::InternalLinkage);
    824 
    825           emission.Address = 0; // signal this condition to later callbacks
    826           assert(emission.wasEmittedAsGlobal());
    827           return emission;
    828         }
    829 
    830         // Otherwise, tell the initialization code that we're in this case.
    831         emission.IsConstantAggregate = true;
    832       }
    833 
    834       // A normal fixed sized variable becomes an alloca in the entry block,
    835       // unless it's an NRVO variable.
    836       llvm::Type *LTy = ConvertTypeForMem(Ty);
    837 
    838       if (NRVO) {
    839         // The named return value optimization: allocate this variable in the
    840         // return slot, so that we can elide the copy when returning this
    841         // variable (C++0x [class.copy]p34).
    842         DeclPtr = ReturnValue;
    843 
    844         if (const RecordType *RecordTy = Ty->getAs<RecordType>()) {
    845           if (!cast<CXXRecordDecl>(RecordTy->getDecl())->hasTrivialDestructor()) {
    846             // Create a flag that is used to indicate when the NRVO was applied
    847             // to this variable. Set it to zero to indicate that NRVO was not
    848             // applied.
    849             llvm::Value *Zero = Builder.getFalse();
    850             llvm::Value *NRVOFlag = CreateTempAlloca(Zero->getType(), "nrvo");
    851             EnsureInsertPoint();
    852             Builder.CreateStore(Zero, NRVOFlag);
    853 
    854             // Record the NRVO flag for this variable.
    855             NRVOFlags[&D] = NRVOFlag;
    856             emission.NRVOFlag = NRVOFlag;
    857           }
    858         }
    859       } else {
    860         if (isByRef)
    861           LTy = BuildByRefType(&D);
    862 
    863         llvm::AllocaInst *Alloc = CreateTempAlloca(LTy);
    864         Alloc->setName(D.getName());
    865 
    866         CharUnits allocaAlignment = alignment;
    867         if (isByRef)
    868           allocaAlignment = std::max(allocaAlignment,
    869               getContext().toCharUnitsFromBits(Target.getPointerAlign(0)));
    870         Alloc->setAlignment(allocaAlignment.getQuantity());
    871         DeclPtr = Alloc;
    872       }
    873     } else {
    874       // Targets that don't support recursion emit locals as globals.
    875       const char *Class =
    876         D.getStorageClass() == SC_Register ? ".reg." : ".auto.";
    877       DeclPtr = CreateStaticVarDecl(D, Class,
    878                                     llvm::GlobalValue::InternalLinkage);
    879     }
    880   } else {
    881     EnsureInsertPoint();
    882 
    883     if (!DidCallStackSave) {
    884       // Save the stack.
    885       llvm::Value *Stack = CreateTempAlloca(Int8PtrTy, "saved_stack");
    886 
    887       llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::stacksave);
    888       llvm::Value *V = Builder.CreateCall(F);
    889 
    890       Builder.CreateStore(V, Stack);
    891 
    892       DidCallStackSave = true;
    893 
    894       // Push a cleanup block and restore the stack there.
    895       // FIXME: in general circumstances, this should be an EH cleanup.
    896       EHStack.pushCleanup<CallStackRestore>(NormalCleanup, Stack);
    897     }
    898 
    899     llvm::Value *elementCount;
    900     QualType elementType;
    901     llvm::tie(elementCount, elementType) = getVLASize(Ty);
    902 
    903     llvm::Type *llvmTy = ConvertTypeForMem(elementType);
    904 
    905     // Allocate memory for the array.
    906     llvm::AllocaInst *vla = Builder.CreateAlloca(llvmTy, elementCount, "vla");
    907     vla->setAlignment(alignment.getQuantity());
    908 
    909     DeclPtr = vla;
    910   }
    911 
    912   llvm::Value *&DMEntry = LocalDeclMap[&D];
    913   assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
    914   DMEntry = DeclPtr;
    915   emission.Address = DeclPtr;
    916 
    917   // Emit debug info for local var declaration.
    918   if (HaveInsertPoint())
    919     if (CGDebugInfo *DI = getDebugInfo()) {
    920       if (CGM.getCodeGenOpts().getDebugInfo()
    921             >= CodeGenOptions::LimitedDebugInfo) {
    922         DI->setLocation(D.getLocation());
    923         if (Target.useGlobalsForAutomaticVariables()) {
    924           DI->EmitGlobalVariable(static_cast<llvm::GlobalVariable *>(DeclPtr),
    925                                  &D);
    926         } else
    927           DI->EmitDeclareOfAutoVariable(&D, DeclPtr, Builder);
    928       }
    929     }
    930 
    931   if (D.hasAttr<AnnotateAttr>())
    932       EmitVarAnnotations(&D, emission.Address);
    933 
    934   return emission;
    935 }
    936 
    937 /// Determines whether the given __block variable is potentially
    938 /// captured by the given expression.
    939 static bool isCapturedBy(const VarDecl &var, const Expr *e) {
    940   // Skip the most common kinds of expressions that make
    941   // hierarchy-walking expensive.
    942   e = e->IgnoreParenCasts();
    943 
    944   if (const BlockExpr *be = dyn_cast<BlockExpr>(e)) {
    945     const BlockDecl *block = be->getBlockDecl();
    946     for (BlockDecl::capture_const_iterator i = block->capture_begin(),
    947            e = block->capture_end(); i != e; ++i) {
    948       if (i->getVariable() == &var)
    949         return true;
    950     }
    951 
    952     // No need to walk into the subexpressions.
    953     return false;
    954   }
    955 
    956   if (const StmtExpr *SE = dyn_cast<StmtExpr>(e)) {
    957     const CompoundStmt *CS = SE->getSubStmt();
    958     for (CompoundStmt::const_body_iterator BI = CS->body_begin(),
    959 	   BE = CS->body_end(); BI != BE; ++BI)
    960       if (Expr *E = dyn_cast<Expr>((*BI))) {
    961         if (isCapturedBy(var, E))
    962             return true;
    963       }
    964       else if (DeclStmt *DS = dyn_cast<DeclStmt>((*BI))) {
    965           // special case declarations
    966           for (DeclStmt::decl_iterator I = DS->decl_begin(), E = DS->decl_end();
    967                I != E; ++I) {
    968               if (VarDecl *VD = dyn_cast<VarDecl>((*I))) {
    969                 Expr *Init = VD->getInit();
    970                 if (Init && isCapturedBy(var, Init))
    971                   return true;
    972               }
    973           }
    974       }
    975       else
    976         // FIXME. Make safe assumption assuming arbitrary statements cause capturing.
    977         // Later, provide code to poke into statements for capture analysis.
    978         return true;
    979     return false;
    980   }
    981 
    982   for (Stmt::const_child_range children = e->children(); children; ++children)
    983     if (isCapturedBy(var, cast<Expr>(*children)))
    984       return true;
    985 
    986   return false;
    987 }
    988 
    989 /// \brief Determine whether the given initializer is trivial in the sense
    990 /// that it requires no code to be generated.
    991 static bool isTrivialInitializer(const Expr *Init) {
    992   if (!Init)
    993     return true;
    994 
    995   if (const CXXConstructExpr *Construct = dyn_cast<CXXConstructExpr>(Init))
    996     if (CXXConstructorDecl *Constructor = Construct->getConstructor())
    997       if (Constructor->isTrivial() &&
    998           Constructor->isDefaultConstructor() &&
    999           !Construct->requiresZeroInitialization())
   1000         return true;
   1001 
   1002   return false;
   1003 }
   1004 void CodeGenFunction::EmitAutoVarInit(const AutoVarEmission &emission) {
   1005   assert(emission.Variable && "emission was not valid!");
   1006 
   1007   // If this was emitted as a global constant, we're done.
   1008   if (emission.wasEmittedAsGlobal()) return;
   1009 
   1010   const VarDecl &D = *emission.Variable;
   1011   QualType type = D.getType();
   1012 
   1013   // If this local has an initializer, emit it now.
   1014   const Expr *Init = D.getInit();
   1015 
   1016   // If we are at an unreachable point, we don't need to emit the initializer
   1017   // unless it contains a label.
   1018   if (!HaveInsertPoint()) {
   1019     if (!Init || !ContainsLabel(Init)) return;
   1020     EnsureInsertPoint();
   1021   }
   1022 
   1023   // Initialize the structure of a __block variable.
   1024   if (emission.IsByRef)
   1025     emitByrefStructureInit(emission);
   1026 
   1027   if (isTrivialInitializer(Init))
   1028     return;
   1029 
   1030   CharUnits alignment = emission.Alignment;
   1031 
   1032   // Check whether this is a byref variable that's potentially
   1033   // captured and moved by its own initializer.  If so, we'll need to
   1034   // emit the initializer first, then copy into the variable.
   1035   bool capturedByInit = emission.IsByRef && isCapturedBy(D, Init);
   1036 
   1037   llvm::Value *Loc =
   1038     capturedByInit ? emission.Address : emission.getObjectAddress(*this);
   1039 
   1040   llvm::Constant *constant = 0;
   1041   if (emission.IsConstantAggregate) {
   1042     assert(!capturedByInit && "constant init contains a capturing block?");
   1043     constant = CGM.EmitConstantInit(D, this);
   1044   }
   1045 
   1046   if (!constant) {
   1047     LValue lv = MakeAddrLValue(Loc, type, alignment);
   1048     lv.setNonGC(true);
   1049     return EmitExprAsInit(Init, &D, lv, capturedByInit);
   1050   }
   1051 
   1052   // If this is a simple aggregate initialization, we can optimize it
   1053   // in various ways.
   1054   bool isVolatile = type.isVolatileQualified();
   1055 
   1056   llvm::Value *SizeVal =
   1057     llvm::ConstantInt::get(IntPtrTy,
   1058                            getContext().getTypeSizeInChars(type).getQuantity());
   1059 
   1060   llvm::Type *BP = Int8PtrTy;
   1061   if (Loc->getType() != BP)
   1062     Loc = Builder.CreateBitCast(Loc, BP);
   1063 
   1064   // If the initializer is all or mostly zeros, codegen with memset then do
   1065   // a few stores afterward.
   1066   if (shouldUseMemSetPlusStoresToInitialize(constant,
   1067                 CGM.getDataLayout().getTypeAllocSize(constant->getType()))) {
   1068     Builder.CreateMemSet(Loc, llvm::ConstantInt::get(Int8Ty, 0), SizeVal,
   1069                          alignment.getQuantity(), isVolatile);
   1070     // Zero and undef don't require a stores.
   1071     if (!constant->isNullValue() && !isa<llvm::UndefValue>(constant)) {
   1072       Loc = Builder.CreateBitCast(Loc, constant->getType()->getPointerTo());
   1073       emitStoresForInitAfterMemset(constant, Loc, isVolatile, Builder);
   1074     }
   1075   } else {
   1076     // Otherwise, create a temporary global with the initializer then
   1077     // memcpy from the global to the alloca.
   1078     std::string Name = GetStaticDeclName(*this, D, ".");
   1079     llvm::GlobalVariable *GV =
   1080       new llvm::GlobalVariable(CGM.getModule(), constant->getType(), true,
   1081                                llvm::GlobalValue::PrivateLinkage,
   1082                                constant, Name);
   1083     GV->setAlignment(alignment.getQuantity());
   1084     GV->setUnnamedAddr(true);
   1085 
   1086     llvm::Value *SrcPtr = GV;
   1087     if (SrcPtr->getType() != BP)
   1088       SrcPtr = Builder.CreateBitCast(SrcPtr, BP);
   1089 
   1090     Builder.CreateMemCpy(Loc, SrcPtr, SizeVal, alignment.getQuantity(),
   1091                          isVolatile);
   1092   }
   1093 }
   1094 
   1095 /// Emit an expression as an initializer for a variable at the given
   1096 /// location.  The expression is not necessarily the normal
   1097 /// initializer for the variable, and the address is not necessarily
   1098 /// its normal location.
   1099 ///
   1100 /// \param init the initializing expression
   1101 /// \param var the variable to act as if we're initializing
   1102 /// \param loc the address to initialize; its type is a pointer
   1103 ///   to the LLVM mapping of the variable's type
   1104 /// \param alignment the alignment of the address
   1105 /// \param capturedByInit true if the variable is a __block variable
   1106 ///   whose address is potentially changed by the initializer
   1107 void CodeGenFunction::EmitExprAsInit(const Expr *init,
   1108                                      const ValueDecl *D,
   1109                                      LValue lvalue,
   1110                                      bool capturedByInit) {
   1111   QualType type = D->getType();
   1112 
   1113   if (type->isReferenceType()) {
   1114     RValue rvalue = EmitReferenceBindingToExpr(init, D);
   1115     if (capturedByInit)
   1116       drillIntoBlockVariable(*this, lvalue, cast<VarDecl>(D));
   1117     EmitStoreThroughLValue(rvalue, lvalue, true);
   1118     return;
   1119   }
   1120   switch (getEvaluationKind(type)) {
   1121   case TEK_Scalar:
   1122     EmitScalarInit(init, D, lvalue, capturedByInit);
   1123     return;
   1124   case TEK_Complex: {
   1125     ComplexPairTy complex = EmitComplexExpr(init);
   1126     if (capturedByInit)
   1127       drillIntoBlockVariable(*this, lvalue, cast<VarDecl>(D));
   1128     EmitStoreOfComplex(complex, lvalue, /*init*/ true);
   1129     return;
   1130   }
   1131   case TEK_Aggregate:
   1132     if (type->isAtomicType()) {
   1133       EmitAtomicInit(const_cast<Expr*>(init), lvalue);
   1134     } else {
   1135       // TODO: how can we delay here if D is captured by its initializer?
   1136       EmitAggExpr(init, AggValueSlot::forLValue(lvalue,
   1137                                               AggValueSlot::IsDestructed,
   1138                                          AggValueSlot::DoesNotNeedGCBarriers,
   1139                                               AggValueSlot::IsNotAliased));
   1140     }
   1141     MaybeEmitStdInitializerListCleanup(lvalue.getAddress(), init);
   1142     return;
   1143   }
   1144   llvm_unreachable("bad evaluation kind");
   1145 }
   1146 
   1147 /// Enter a destroy cleanup for the given local variable.
   1148 void CodeGenFunction::emitAutoVarTypeCleanup(
   1149                             const CodeGenFunction::AutoVarEmission &emission,
   1150                             QualType::DestructionKind dtorKind) {
   1151   assert(dtorKind != QualType::DK_none);
   1152 
   1153   // Note that for __block variables, we want to destroy the
   1154   // original stack object, not the possibly forwarded object.
   1155   llvm::Value *addr = emission.getObjectAddress(*this);
   1156 
   1157   const VarDecl *var = emission.Variable;
   1158   QualType type = var->getType();
   1159 
   1160   CleanupKind cleanupKind = NormalAndEHCleanup;
   1161   CodeGenFunction::Destroyer *destroyer = 0;
   1162 
   1163   switch (dtorKind) {
   1164   case QualType::DK_none:
   1165     llvm_unreachable("no cleanup for trivially-destructible variable");
   1166 
   1167   case QualType::DK_cxx_destructor:
   1168     // If there's an NRVO flag on the emission, we need a different
   1169     // cleanup.
   1170     if (emission.NRVOFlag) {
   1171       assert(!type->isArrayType());
   1172       CXXDestructorDecl *dtor = type->getAsCXXRecordDecl()->getDestructor();
   1173       EHStack.pushCleanup<DestroyNRVOVariable>(cleanupKind, addr, dtor,
   1174                                                emission.NRVOFlag);
   1175       return;
   1176     }
   1177     break;
   1178 
   1179   case QualType::DK_objc_strong_lifetime:
   1180     // Suppress cleanups for pseudo-strong variables.
   1181     if (var->isARCPseudoStrong()) return;
   1182 
   1183     // Otherwise, consider whether to use an EH cleanup or not.
   1184     cleanupKind = getARCCleanupKind();
   1185 
   1186     // Use the imprecise destroyer by default.
   1187     if (!var->hasAttr<ObjCPreciseLifetimeAttr>())
   1188       destroyer = CodeGenFunction::destroyARCStrongImprecise;
   1189     break;
   1190 
   1191   case QualType::DK_objc_weak_lifetime:
   1192     break;
   1193   }
   1194 
   1195   // If we haven't chosen a more specific destroyer, use the default.
   1196   if (!destroyer) destroyer = getDestroyer(dtorKind);
   1197 
   1198   // Use an EH cleanup in array destructors iff the destructor itself
   1199   // is being pushed as an EH cleanup.
   1200   bool useEHCleanup = (cleanupKind & EHCleanup);
   1201   EHStack.pushCleanup<DestroyObject>(cleanupKind, addr, type, destroyer,
   1202                                      useEHCleanup);
   1203 }
   1204 
   1205 void CodeGenFunction::EmitAutoVarCleanups(const AutoVarEmission &emission) {
   1206   assert(emission.Variable && "emission was not valid!");
   1207 
   1208   // If this was emitted as a global constant, we're done.
   1209   if (emission.wasEmittedAsGlobal()) return;
   1210 
   1211   // If we don't have an insertion point, we're done.  Sema prevents
   1212   // us from jumping into any of these scopes anyway.
   1213   if (!HaveInsertPoint()) return;
   1214 
   1215   const VarDecl &D = *emission.Variable;
   1216 
   1217   // Check the type for a cleanup.
   1218   if (QualType::DestructionKind dtorKind = D.getType().isDestructedType())
   1219     emitAutoVarTypeCleanup(emission, dtorKind);
   1220 
   1221   // In GC mode, honor objc_precise_lifetime.
   1222   if (getLangOpts().getGC() != LangOptions::NonGC &&
   1223       D.hasAttr<ObjCPreciseLifetimeAttr>()) {
   1224     EHStack.pushCleanup<ExtendGCLifetime>(NormalCleanup, &D);
   1225   }
   1226 
   1227   // Handle the cleanup attribute.
   1228   if (const CleanupAttr *CA = D.getAttr<CleanupAttr>()) {
   1229     const FunctionDecl *FD = CA->getFunctionDecl();
   1230 
   1231     llvm::Constant *F = CGM.GetAddrOfFunction(FD);
   1232     assert(F && "Could not find function!");
   1233 
   1234     const CGFunctionInfo &Info = CGM.getTypes().arrangeFunctionDeclaration(FD);
   1235     EHStack.pushCleanup<CallCleanupFunction>(NormalAndEHCleanup, F, &Info, &D);
   1236   }
   1237 
   1238   // If this is a block variable, call _Block_object_destroy
   1239   // (on the unforwarded address).
   1240   if (emission.IsByRef)
   1241     enterByrefCleanup(emission);
   1242 }
   1243 
   1244 CodeGenFunction::Destroyer *
   1245 CodeGenFunction::getDestroyer(QualType::DestructionKind kind) {
   1246   switch (kind) {
   1247   case QualType::DK_none: llvm_unreachable("no destroyer for trivial dtor");
   1248   case QualType::DK_cxx_destructor:
   1249     return destroyCXXObject;
   1250   case QualType::DK_objc_strong_lifetime:
   1251     return destroyARCStrongPrecise;
   1252   case QualType::DK_objc_weak_lifetime:
   1253     return destroyARCWeak;
   1254   }
   1255   llvm_unreachable("Unknown DestructionKind");
   1256 }
   1257 
   1258 /// pushEHDestroy - Push the standard destructor for the given type as
   1259 /// an EH-only cleanup.
   1260 void CodeGenFunction::pushEHDestroy(QualType::DestructionKind dtorKind,
   1261                                   llvm::Value *addr, QualType type) {
   1262   assert(dtorKind && "cannot push destructor for trivial type");
   1263   assert(needsEHCleanup(dtorKind));
   1264 
   1265   pushDestroy(EHCleanup, addr, type, getDestroyer(dtorKind), true);
   1266 }
   1267 
   1268 /// pushDestroy - Push the standard destructor for the given type as
   1269 /// at least a normal cleanup.
   1270 void CodeGenFunction::pushDestroy(QualType::DestructionKind dtorKind,
   1271                                   llvm::Value *addr, QualType type) {
   1272   assert(dtorKind && "cannot push destructor for trivial type");
   1273 
   1274   CleanupKind cleanupKind = getCleanupKind(dtorKind);
   1275   pushDestroy(cleanupKind, addr, type, getDestroyer(dtorKind),
   1276               cleanupKind & EHCleanup);
   1277 }
   1278 
   1279 void CodeGenFunction::pushDestroy(CleanupKind cleanupKind, llvm::Value *addr,
   1280                                   QualType type, Destroyer *destroyer,
   1281                                   bool useEHCleanupForArray) {
   1282   pushFullExprCleanup<DestroyObject>(cleanupKind, addr, type,
   1283                                      destroyer, useEHCleanupForArray);
   1284 }
   1285 
   1286 /// emitDestroy - Immediately perform the destruction of the given
   1287 /// object.
   1288 ///
   1289 /// \param addr - the address of the object; a type*
   1290 /// \param type - the type of the object; if an array type, all
   1291 ///   objects are destroyed in reverse order
   1292 /// \param destroyer - the function to call to destroy individual
   1293 ///   elements
   1294 /// \param useEHCleanupForArray - whether an EH cleanup should be
   1295 ///   used when destroying array elements, in case one of the
   1296 ///   destructions throws an exception
   1297 void CodeGenFunction::emitDestroy(llvm::Value *addr, QualType type,
   1298                                   Destroyer *destroyer,
   1299                                   bool useEHCleanupForArray) {
   1300   const ArrayType *arrayType = getContext().getAsArrayType(type);
   1301   if (!arrayType)
   1302     return destroyer(*this, addr, type);
   1303 
   1304   llvm::Value *begin = addr;
   1305   llvm::Value *length = emitArrayLength(arrayType, type, begin);
   1306 
   1307   // Normally we have to check whether the array is zero-length.
   1308   bool checkZeroLength = true;
   1309 
   1310   // But if the array length is constant, we can suppress that.
   1311   if (llvm::ConstantInt *constLength = dyn_cast<llvm::ConstantInt>(length)) {
   1312     // ...and if it's constant zero, we can just skip the entire thing.
   1313     if (constLength->isZero()) return;
   1314     checkZeroLength = false;
   1315   }
   1316 
   1317   llvm::Value *end = Builder.CreateInBoundsGEP(begin, length);
   1318   emitArrayDestroy(begin, end, type, destroyer,
   1319                    checkZeroLength, useEHCleanupForArray);
   1320 }
   1321 
   1322 /// emitArrayDestroy - Destroys all the elements of the given array,
   1323 /// beginning from last to first.  The array cannot be zero-length.
   1324 ///
   1325 /// \param begin - a type* denoting the first element of the array
   1326 /// \param end - a type* denoting one past the end of the array
   1327 /// \param type - the element type of the array
   1328 /// \param destroyer - the function to call to destroy elements
   1329 /// \param useEHCleanup - whether to push an EH cleanup to destroy
   1330 ///   the remaining elements in case the destruction of a single
   1331 ///   element throws
   1332 void CodeGenFunction::emitArrayDestroy(llvm::Value *begin,
   1333                                        llvm::Value *end,
   1334                                        QualType type,
   1335                                        Destroyer *destroyer,
   1336                                        bool checkZeroLength,
   1337                                        bool useEHCleanup) {
   1338   assert(!type->isArrayType());
   1339 
   1340   // The basic structure here is a do-while loop, because we don't
   1341   // need to check for the zero-element case.
   1342   llvm::BasicBlock *bodyBB = createBasicBlock("arraydestroy.body");
   1343   llvm::BasicBlock *doneBB = createBasicBlock("arraydestroy.done");
   1344 
   1345   if (checkZeroLength) {
   1346     llvm::Value *isEmpty = Builder.CreateICmpEQ(begin, end,
   1347                                                 "arraydestroy.isempty");
   1348     Builder.CreateCondBr(isEmpty, doneBB, bodyBB);
   1349   }
   1350 
   1351   // Enter the loop body, making that address the current address.
   1352   llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
   1353   EmitBlock(bodyBB);
   1354   llvm::PHINode *elementPast =
   1355     Builder.CreatePHI(begin->getType(), 2, "arraydestroy.elementPast");
   1356   elementPast->addIncoming(end, entryBB);
   1357 
   1358   // Shift the address back by one element.
   1359   llvm::Value *negativeOne = llvm::ConstantInt::get(SizeTy, -1, true);
   1360   llvm::Value *element = Builder.CreateInBoundsGEP(elementPast, negativeOne,
   1361                                                    "arraydestroy.element");
   1362 
   1363   if (useEHCleanup)
   1364     pushRegularPartialArrayCleanup(begin, element, type, destroyer);
   1365 
   1366   // Perform the actual destruction there.
   1367   destroyer(*this, element, type);
   1368 
   1369   if (useEHCleanup)
   1370     PopCleanupBlock();
   1371 
   1372   // Check whether we've reached the end.
   1373   llvm::Value *done = Builder.CreateICmpEQ(element, begin, "arraydestroy.done");
   1374   Builder.CreateCondBr(done, doneBB, bodyBB);
   1375   elementPast->addIncoming(element, Builder.GetInsertBlock());
   1376 
   1377   // Done.
   1378   EmitBlock(doneBB);
   1379 }
   1380 
   1381 /// Perform partial array destruction as if in an EH cleanup.  Unlike
   1382 /// emitArrayDestroy, the element type here may still be an array type.
   1383 static void emitPartialArrayDestroy(CodeGenFunction &CGF,
   1384                                     llvm::Value *begin, llvm::Value *end,
   1385                                     QualType type,
   1386                                     CodeGenFunction::Destroyer *destroyer) {
   1387   // If the element type is itself an array, drill down.
   1388   unsigned arrayDepth = 0;
   1389   while (const ArrayType *arrayType = CGF.getContext().getAsArrayType(type)) {
   1390     // VLAs don't require a GEP index to walk into.
   1391     if (!isa<VariableArrayType>(arrayType))
   1392       arrayDepth++;
   1393     type = arrayType->getElementType();
   1394   }
   1395 
   1396   if (arrayDepth) {
   1397     llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, arrayDepth+1);
   1398 
   1399     SmallVector<llvm::Value*,4> gepIndices(arrayDepth, zero);
   1400     begin = CGF.Builder.CreateInBoundsGEP(begin, gepIndices, "pad.arraybegin");
   1401     end = CGF.Builder.CreateInBoundsGEP(end, gepIndices, "pad.arrayend");
   1402   }
   1403 
   1404   // Destroy the array.  We don't ever need an EH cleanup because we
   1405   // assume that we're in an EH cleanup ourselves, so a throwing
   1406   // destructor causes an immediate terminate.
   1407   CGF.emitArrayDestroy(begin, end, type, destroyer,
   1408                        /*checkZeroLength*/ true, /*useEHCleanup*/ false);
   1409 }
   1410 
   1411 namespace {
   1412   /// RegularPartialArrayDestroy - a cleanup which performs a partial
   1413   /// array destroy where the end pointer is regularly determined and
   1414   /// does not need to be loaded from a local.
   1415   class RegularPartialArrayDestroy : public EHScopeStack::Cleanup {
   1416     llvm::Value *ArrayBegin;
   1417     llvm::Value *ArrayEnd;
   1418     QualType ElementType;
   1419     CodeGenFunction::Destroyer *Destroyer;
   1420   public:
   1421     RegularPartialArrayDestroy(llvm::Value *arrayBegin, llvm::Value *arrayEnd,
   1422                                QualType elementType,
   1423                                CodeGenFunction::Destroyer *destroyer)
   1424       : ArrayBegin(arrayBegin), ArrayEnd(arrayEnd),
   1425         ElementType(elementType), Destroyer(destroyer) {}
   1426 
   1427     void Emit(CodeGenFunction &CGF, Flags flags) {
   1428       emitPartialArrayDestroy(CGF, ArrayBegin, ArrayEnd,
   1429                               ElementType, Destroyer);
   1430     }
   1431   };
   1432 
   1433   /// IrregularPartialArrayDestroy - a cleanup which performs a
   1434   /// partial array destroy where the end pointer is irregularly
   1435   /// determined and must be loaded from a local.
   1436   class IrregularPartialArrayDestroy : public EHScopeStack::Cleanup {
   1437     llvm::Value *ArrayBegin;
   1438     llvm::Value *ArrayEndPointer;
   1439     QualType ElementType;
   1440     CodeGenFunction::Destroyer *Destroyer;
   1441   public:
   1442     IrregularPartialArrayDestroy(llvm::Value *arrayBegin,
   1443                                  llvm::Value *arrayEndPointer,
   1444                                  QualType elementType,
   1445                                  CodeGenFunction::Destroyer *destroyer)
   1446       : ArrayBegin(arrayBegin), ArrayEndPointer(arrayEndPointer),
   1447         ElementType(elementType), Destroyer(destroyer) {}
   1448 
   1449     void Emit(CodeGenFunction &CGF, Flags flags) {
   1450       llvm::Value *arrayEnd = CGF.Builder.CreateLoad(ArrayEndPointer);
   1451       emitPartialArrayDestroy(CGF, ArrayBegin, arrayEnd,
   1452                               ElementType, Destroyer);
   1453     }
   1454   };
   1455 }
   1456 
   1457 /// pushIrregularPartialArrayCleanup - Push an EH cleanup to destroy
   1458 /// already-constructed elements of the given array.  The cleanup
   1459 /// may be popped with DeactivateCleanupBlock or PopCleanupBlock.
   1460 ///
   1461 /// \param elementType - the immediate element type of the array;
   1462 ///   possibly still an array type
   1463 void CodeGenFunction::pushIrregularPartialArrayCleanup(llvm::Value *arrayBegin,
   1464                                                  llvm::Value *arrayEndPointer,
   1465                                                        QualType elementType,
   1466                                                        Destroyer *destroyer) {
   1467   pushFullExprCleanup<IrregularPartialArrayDestroy>(EHCleanup,
   1468                                                     arrayBegin, arrayEndPointer,
   1469                                                     elementType, destroyer);
   1470 }
   1471 
   1472 /// pushRegularPartialArrayCleanup - Push an EH cleanup to destroy
   1473 /// already-constructed elements of the given array.  The cleanup
   1474 /// may be popped with DeactivateCleanupBlock or PopCleanupBlock.
   1475 ///
   1476 /// \param elementType - the immediate element type of the array;
   1477 ///   possibly still an array type
   1478 void CodeGenFunction::pushRegularPartialArrayCleanup(llvm::Value *arrayBegin,
   1479                                                      llvm::Value *arrayEnd,
   1480                                                      QualType elementType,
   1481                                                      Destroyer *destroyer) {
   1482   pushFullExprCleanup<RegularPartialArrayDestroy>(EHCleanup,
   1483                                                   arrayBegin, arrayEnd,
   1484                                                   elementType, destroyer);
   1485 }
   1486 
   1487 namespace {
   1488   /// A cleanup to perform a release of an object at the end of a
   1489   /// function.  This is used to balance out the incoming +1 of a
   1490   /// ns_consumed argument when we can't reasonably do that just by
   1491   /// not doing the initial retain for a __block argument.
   1492   struct ConsumeARCParameter : EHScopeStack::Cleanup {
   1493     ConsumeARCParameter(llvm::Value *param,
   1494                         ARCPreciseLifetime_t precise)
   1495       : Param(param), Precise(precise) {}
   1496 
   1497     llvm::Value *Param;
   1498     ARCPreciseLifetime_t Precise;
   1499 
   1500     void Emit(CodeGenFunction &CGF, Flags flags) {
   1501       CGF.EmitARCRelease(Param, Precise);
   1502     }
   1503   };
   1504 }
   1505 
   1506 /// Emit an alloca (or GlobalValue depending on target)
   1507 /// for the specified parameter and set up LocalDeclMap.
   1508 void CodeGenFunction::EmitParmDecl(const VarDecl &D, llvm::Value *Arg,
   1509                                    unsigned ArgNo) {
   1510   // FIXME: Why isn't ImplicitParamDecl a ParmVarDecl?
   1511   assert((isa<ParmVarDecl>(D) || isa<ImplicitParamDecl>(D)) &&
   1512          "Invalid argument to EmitParmDecl");
   1513 
   1514   Arg->setName(D.getName());
   1515 
   1516   QualType Ty = D.getType();
   1517 
   1518   // Use better IR generation for certain implicit parameters.
   1519   if (isa<ImplicitParamDecl>(D)) {
   1520     // The only implicit argument a block has is its literal.
   1521     if (BlockInfo) {
   1522       LocalDeclMap[&D] = Arg;
   1523       llvm::Value *LocalAddr = 0;
   1524       if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
   1525         // Allocate a stack slot to let debug info survive the RA.
   1526         llvm::AllocaInst *Alloc = CreateTempAlloca(ConvertTypeForMem(Ty),
   1527                                                    D.getName() + ".addr");
   1528         Alloc->setAlignment(getContext().getDeclAlign(&D).getQuantity());
   1529         LValue lv = MakeAddrLValue(Alloc, Ty, getContext().getDeclAlign(&D));
   1530         EmitStoreOfScalar(Arg, lv, /* isInitialization */ true);
   1531         LocalAddr = Builder.CreateLoad(Alloc);
   1532       }
   1533 
   1534       if (CGDebugInfo *DI = getDebugInfo()) {
   1535         if (CGM.getCodeGenOpts().getDebugInfo()
   1536               >= CodeGenOptions::LimitedDebugInfo) {
   1537           DI->setLocation(D.getLocation());
   1538           DI->EmitDeclareOfBlockLiteralArgVariable(*BlockInfo, Arg, LocalAddr, Builder);
   1539         }
   1540       }
   1541 
   1542       return;
   1543     }
   1544   }
   1545 
   1546   llvm::Value *DeclPtr;
   1547   // If this is an aggregate or variable sized value, reuse the input pointer.
   1548   if (!Ty->isConstantSizeType() ||
   1549       !CodeGenFunction::hasScalarEvaluationKind(Ty)) {
   1550     DeclPtr = Arg;
   1551   } else {
   1552     // Otherwise, create a temporary to hold the value.
   1553     llvm::AllocaInst *Alloc = CreateTempAlloca(ConvertTypeForMem(Ty),
   1554                                                D.getName() + ".addr");
   1555     CharUnits Align = getContext().getDeclAlign(&D);
   1556     Alloc->setAlignment(Align.getQuantity());
   1557     DeclPtr = Alloc;
   1558 
   1559     bool doStore = true;
   1560 
   1561     Qualifiers qs = Ty.getQualifiers();
   1562     LValue lv = MakeAddrLValue(DeclPtr, Ty, Align);
   1563     if (Qualifiers::ObjCLifetime lt = qs.getObjCLifetime()) {
   1564       // We honor __attribute__((ns_consumed)) for types with lifetime.
   1565       // For __strong, it's handled by just skipping the initial retain;
   1566       // otherwise we have to balance out the initial +1 with an extra
   1567       // cleanup to do the release at the end of the function.
   1568       bool isConsumed = D.hasAttr<NSConsumedAttr>();
   1569 
   1570       // 'self' is always formally __strong, but if this is not an
   1571       // init method then we don't want to retain it.
   1572       if (D.isARCPseudoStrong()) {
   1573         const ObjCMethodDecl *method = cast<ObjCMethodDecl>(CurCodeDecl);
   1574         assert(&D == method->getSelfDecl());
   1575         assert(lt == Qualifiers::OCL_Strong);
   1576         assert(qs.hasConst());
   1577         assert(method->getMethodFamily() != OMF_init);
   1578         (void) method;
   1579         lt = Qualifiers::OCL_ExplicitNone;
   1580       }
   1581 
   1582       if (lt == Qualifiers::OCL_Strong) {
   1583         if (!isConsumed) {
   1584           if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
   1585             // use objc_storeStrong(&dest, value) for retaining the
   1586             // object. But first, store a null into 'dest' because
   1587             // objc_storeStrong attempts to release its old value.
   1588             llvm::Value * Null = CGM.EmitNullConstant(D.getType());
   1589             EmitStoreOfScalar(Null, lv, /* isInitialization */ true);
   1590             EmitARCStoreStrongCall(lv.getAddress(), Arg, true);
   1591             doStore = false;
   1592           }
   1593           else
   1594           // Don't use objc_retainBlock for block pointers, because we
   1595           // don't want to Block_copy something just because we got it
   1596           // as a parameter.
   1597             Arg = EmitARCRetainNonBlock(Arg);
   1598         }
   1599       } else {
   1600         // Push the cleanup for a consumed parameter.
   1601         if (isConsumed) {
   1602           ARCPreciseLifetime_t precise = (D.hasAttr<ObjCPreciseLifetimeAttr>()
   1603                                 ? ARCPreciseLifetime : ARCImpreciseLifetime);
   1604           EHStack.pushCleanup<ConsumeARCParameter>(getARCCleanupKind(), Arg,
   1605                                                    precise);
   1606         }
   1607 
   1608         if (lt == Qualifiers::OCL_Weak) {
   1609           EmitARCInitWeak(DeclPtr, Arg);
   1610           doStore = false; // The weak init is a store, no need to do two.
   1611         }
   1612       }
   1613 
   1614       // Enter the cleanup scope.
   1615       EmitAutoVarWithLifetime(*this, D, DeclPtr, lt);
   1616     }
   1617 
   1618     // Store the initial value into the alloca.
   1619     if (doStore)
   1620       EmitStoreOfScalar(Arg, lv, /* isInitialization */ true);
   1621   }
   1622 
   1623   llvm::Value *&DMEntry = LocalDeclMap[&D];
   1624   assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
   1625   DMEntry = DeclPtr;
   1626 
   1627   // Emit debug info for param declaration.
   1628   if (CGDebugInfo *DI = getDebugInfo()) {
   1629     if (CGM.getCodeGenOpts().getDebugInfo()
   1630           >= CodeGenOptions::LimitedDebugInfo) {
   1631       DI->EmitDeclareOfArgVariable(&D, DeclPtr, ArgNo, Builder);
   1632     }
   1633   }
   1634 
   1635   if (D.hasAttr<AnnotateAttr>())
   1636       EmitVarAnnotations(&D, DeclPtr);
   1637 }
   1638