Home | History | Annotate | Download | only in CodeGen
      1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // These classes support the generation of LLVM IR for cleanups.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #ifndef CLANG_CODEGEN_CGCLEANUP_H
     15 #define CLANG_CODEGEN_CGCLEANUP_H
     16 
     17 #include "EHScopeStack.h"
     18 #include "llvm/ADT/SmallPtrSet.h"
     19 #include "llvm/ADT/SmallVector.h"
     20 
     21 namespace llvm {
     22 class BasicBlock;
     23 class Value;
     24 class ConstantInt;
     25 class AllocaInst;
     26 }
     27 
     28 namespace clang {
     29 namespace CodeGen {
     30 
     31 /// A protected scope for zero-cost EH handling.
     32 class EHScope {
     33   llvm::BasicBlock *CachedLandingPad;
     34   llvm::BasicBlock *CachedEHDispatchBlock;
     35 
     36   EHScopeStack::stable_iterator EnclosingEHScope;
     37 
     38   class CommonBitFields {
     39     friend class EHScope;
     40     unsigned Kind : 2;
     41   };
     42   enum { NumCommonBits = 2 };
     43 
     44 protected:
     45   class CatchBitFields {
     46     friend class EHCatchScope;
     47     unsigned : NumCommonBits;
     48 
     49     unsigned NumHandlers : 32 - NumCommonBits;
     50   };
     51 
     52   class CleanupBitFields {
     53     friend class EHCleanupScope;
     54     unsigned : NumCommonBits;
     55 
     56     /// Whether this cleanup needs to be run along normal edges.
     57     unsigned IsNormalCleanup : 1;
     58 
     59     /// Whether this cleanup needs to be run along exception edges.
     60     unsigned IsEHCleanup : 1;
     61 
     62     /// Whether this cleanup is currently active.
     63     unsigned IsActive : 1;
     64 
     65     /// Whether the normal cleanup should test the activation flag.
     66     unsigned TestFlagInNormalCleanup : 1;
     67 
     68     /// Whether the EH cleanup should test the activation flag.
     69     unsigned TestFlagInEHCleanup : 1;
     70 
     71     /// The amount of extra storage needed by the Cleanup.
     72     /// Always a multiple of the scope-stack alignment.
     73     unsigned CleanupSize : 12;
     74 
     75     /// The number of fixups required by enclosing scopes (not including
     76     /// this one).  If this is the top cleanup scope, all the fixups
     77     /// from this index onwards belong to this scope.
     78     unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13
     79   };
     80 
     81   class FilterBitFields {
     82     friend class EHFilterScope;
     83     unsigned : NumCommonBits;
     84 
     85     unsigned NumFilters : 32 - NumCommonBits;
     86   };
     87 
     88   union {
     89     CommonBitFields CommonBits;
     90     CatchBitFields CatchBits;
     91     CleanupBitFields CleanupBits;
     92     FilterBitFields FilterBits;
     93   };
     94 
     95 public:
     96   enum Kind { Cleanup, Catch, Terminate, Filter };
     97 
     98   EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
     99     : CachedLandingPad(0), CachedEHDispatchBlock(0),
    100       EnclosingEHScope(enclosingEHScope) {
    101     CommonBits.Kind = kind;
    102   }
    103 
    104   Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
    105 
    106   llvm::BasicBlock *getCachedLandingPad() const {
    107     return CachedLandingPad;
    108   }
    109 
    110   void setCachedLandingPad(llvm::BasicBlock *block) {
    111     CachedLandingPad = block;
    112   }
    113 
    114   llvm::BasicBlock *getCachedEHDispatchBlock() const {
    115     return CachedEHDispatchBlock;
    116   }
    117 
    118   void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
    119     CachedEHDispatchBlock = block;
    120   }
    121 
    122   bool hasEHBranches() const {
    123     if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
    124       return !block->use_empty();
    125     return false;
    126   }
    127 
    128   EHScopeStack::stable_iterator getEnclosingEHScope() const {
    129     return EnclosingEHScope;
    130   }
    131 };
    132 
    133 /// A scope which attempts to handle some, possibly all, types of
    134 /// exceptions.
    135 ///
    136 /// Objective C \@finally blocks are represented using a cleanup scope
    137 /// after the catch scope.
    138 class EHCatchScope : public EHScope {
    139   // In effect, we have a flexible array member
    140   //   Handler Handlers[0];
    141   // But that's only standard in C99, not C++, so we have to do
    142   // annoying pointer arithmetic instead.
    143 
    144 public:
    145   struct Handler {
    146     /// A type info value, or null (C++ null, not an LLVM null pointer)
    147     /// for a catch-all.
    148     llvm::Value *Type;
    149 
    150     /// The catch handler for this type.
    151     llvm::BasicBlock *Block;
    152 
    153     bool isCatchAll() const { return Type == 0; }
    154   };
    155 
    156 private:
    157   friend class EHScopeStack;
    158 
    159   Handler *getHandlers() {
    160     return reinterpret_cast<Handler*>(this+1);
    161   }
    162 
    163   const Handler *getHandlers() const {
    164     return reinterpret_cast<const Handler*>(this+1);
    165   }
    166 
    167 public:
    168   static size_t getSizeForNumHandlers(unsigned N) {
    169     return sizeof(EHCatchScope) + N * sizeof(Handler);
    170   }
    171 
    172   EHCatchScope(unsigned numHandlers,
    173                EHScopeStack::stable_iterator enclosingEHScope)
    174     : EHScope(Catch, enclosingEHScope) {
    175     CatchBits.NumHandlers = numHandlers;
    176   }
    177 
    178   unsigned getNumHandlers() const {
    179     return CatchBits.NumHandlers;
    180   }
    181 
    182   void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
    183     setHandler(I, /*catchall*/ 0, Block);
    184   }
    185 
    186   void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
    187     assert(I < getNumHandlers());
    188     getHandlers()[I].Type = Type;
    189     getHandlers()[I].Block = Block;
    190   }
    191 
    192   const Handler &getHandler(unsigned I) const {
    193     assert(I < getNumHandlers());
    194     return getHandlers()[I];
    195   }
    196 
    197   typedef const Handler *iterator;
    198   iterator begin() const { return getHandlers(); }
    199   iterator end() const { return getHandlers() + getNumHandlers(); }
    200 
    201   static bool classof(const EHScope *Scope) {
    202     return Scope->getKind() == Catch;
    203   }
    204 };
    205 
    206 /// A cleanup scope which generates the cleanup blocks lazily.
    207 class EHCleanupScope : public EHScope {
    208   /// The nearest normal cleanup scope enclosing this one.
    209   EHScopeStack::stable_iterator EnclosingNormal;
    210 
    211   /// The nearest EH scope enclosing this one.
    212   EHScopeStack::stable_iterator EnclosingEH;
    213 
    214   /// The dual entry/exit block along the normal edge.  This is lazily
    215   /// created if needed before the cleanup is popped.
    216   llvm::BasicBlock *NormalBlock;
    217 
    218   /// An optional i1 variable indicating whether this cleanup has been
    219   /// activated yet.
    220   llvm::AllocaInst *ActiveFlag;
    221 
    222   /// Extra information required for cleanups that have resolved
    223   /// branches through them.  This has to be allocated on the side
    224   /// because everything on the cleanup stack has be trivially
    225   /// movable.
    226   struct ExtInfo {
    227     /// The destinations of normal branch-afters and branch-throughs.
    228     llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
    229 
    230     /// Normal branch-afters.
    231     SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
    232       BranchAfters;
    233   };
    234   mutable struct ExtInfo *ExtInfo;
    235 
    236   struct ExtInfo &getExtInfo() {
    237     if (!ExtInfo) ExtInfo = new struct ExtInfo();
    238     return *ExtInfo;
    239   }
    240 
    241   const struct ExtInfo &getExtInfo() const {
    242     if (!ExtInfo) ExtInfo = new struct ExtInfo();
    243     return *ExtInfo;
    244   }
    245 
    246 public:
    247   /// Gets the size required for a lazy cleanup scope with the given
    248   /// cleanup-data requirements.
    249   static size_t getSizeForCleanupSize(size_t Size) {
    250     return sizeof(EHCleanupScope) + Size;
    251   }
    252 
    253   size_t getAllocatedSize() const {
    254     return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
    255   }
    256 
    257   EHCleanupScope(bool isNormal, bool isEH, bool isActive,
    258                  unsigned cleanupSize, unsigned fixupDepth,
    259                  EHScopeStack::stable_iterator enclosingNormal,
    260                  EHScopeStack::stable_iterator enclosingEH)
    261     : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal),
    262       NormalBlock(0), ActiveFlag(0), ExtInfo(0) {
    263     CleanupBits.IsNormalCleanup = isNormal;
    264     CleanupBits.IsEHCleanup = isEH;
    265     CleanupBits.IsActive = isActive;
    266     CleanupBits.TestFlagInNormalCleanup = false;
    267     CleanupBits.TestFlagInEHCleanup = false;
    268     CleanupBits.CleanupSize = cleanupSize;
    269     CleanupBits.FixupDepth = fixupDepth;
    270 
    271     assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
    272   }
    273 
    274   ~EHCleanupScope() {
    275     delete ExtInfo;
    276   }
    277 
    278   bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
    279   llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
    280   void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
    281 
    282   bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
    283   llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); }
    284   void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); }
    285 
    286   bool isActive() const { return CleanupBits.IsActive; }
    287   void setActive(bool A) { CleanupBits.IsActive = A; }
    288 
    289   llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; }
    290   void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; }
    291 
    292   void setTestFlagInNormalCleanup() {
    293     CleanupBits.TestFlagInNormalCleanup = true;
    294   }
    295   bool shouldTestFlagInNormalCleanup() const {
    296     return CleanupBits.TestFlagInNormalCleanup;
    297   }
    298 
    299   void setTestFlagInEHCleanup() {
    300     CleanupBits.TestFlagInEHCleanup = true;
    301   }
    302   bool shouldTestFlagInEHCleanup() const {
    303     return CleanupBits.TestFlagInEHCleanup;
    304   }
    305 
    306   unsigned getFixupDepth() const { return CleanupBits.FixupDepth; }
    307   EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
    308     return EnclosingNormal;
    309   }
    310 
    311   size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
    312   void *getCleanupBuffer() { return this + 1; }
    313 
    314   EHScopeStack::Cleanup *getCleanup() {
    315     return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
    316   }
    317 
    318   /// True if this cleanup scope has any branch-afters or branch-throughs.
    319   bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
    320 
    321   /// Add a branch-after to this cleanup scope.  A branch-after is a
    322   /// branch from a point protected by this (normal) cleanup to a
    323   /// point in the normal cleanup scope immediately containing it.
    324   /// For example,
    325   ///   for (;;) { A a; break; }
    326   /// contains a branch-after.
    327   ///
    328   /// Branch-afters each have their own destination out of the
    329   /// cleanup, guaranteed distinct from anything else threaded through
    330   /// it.  Therefore branch-afters usually force a switch after the
    331   /// cleanup.
    332   void addBranchAfter(llvm::ConstantInt *Index,
    333                       llvm::BasicBlock *Block) {
    334     struct ExtInfo &ExtInfo = getExtInfo();
    335     if (ExtInfo.Branches.insert(Block))
    336       ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
    337   }
    338 
    339   /// Return the number of unique branch-afters on this scope.
    340   unsigned getNumBranchAfters() const {
    341     return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
    342   }
    343 
    344   llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
    345     assert(I < getNumBranchAfters());
    346     return ExtInfo->BranchAfters[I].first;
    347   }
    348 
    349   llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
    350     assert(I < getNumBranchAfters());
    351     return ExtInfo->BranchAfters[I].second;
    352   }
    353 
    354   /// Add a branch-through to this cleanup scope.  A branch-through is
    355   /// a branch from a scope protected by this (normal) cleanup to an
    356   /// enclosing scope other than the immediately-enclosing normal
    357   /// cleanup scope.
    358   ///
    359   /// In the following example, the branch through B's scope is a
    360   /// branch-through, while the branch through A's scope is a
    361   /// branch-after:
    362   ///   for (;;) { A a; B b; break; }
    363   ///
    364   /// All branch-throughs have a common destination out of the
    365   /// cleanup, one possibly shared with the fall-through.  Therefore
    366   /// branch-throughs usually don't force a switch after the cleanup.
    367   ///
    368   /// \return true if the branch-through was new to this scope
    369   bool addBranchThrough(llvm::BasicBlock *Block) {
    370     return getExtInfo().Branches.insert(Block);
    371   }
    372 
    373   /// Determines if this cleanup scope has any branch throughs.
    374   bool hasBranchThroughs() const {
    375     if (!ExtInfo) return false;
    376     return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
    377   }
    378 
    379   static bool classof(const EHScope *Scope) {
    380     return (Scope->getKind() == Cleanup);
    381   }
    382 };
    383 
    384 /// An exceptions scope which filters exceptions thrown through it.
    385 /// Only exceptions matching the filter types will be permitted to be
    386 /// thrown.
    387 ///
    388 /// This is used to implement C++ exception specifications.
    389 class EHFilterScope : public EHScope {
    390   // Essentially ends in a flexible array member:
    391   // llvm::Value *FilterTypes[0];
    392 
    393   llvm::Value **getFilters() {
    394     return reinterpret_cast<llvm::Value**>(this+1);
    395   }
    396 
    397   llvm::Value * const *getFilters() const {
    398     return reinterpret_cast<llvm::Value* const *>(this+1);
    399   }
    400 
    401 public:
    402   EHFilterScope(unsigned numFilters)
    403     : EHScope(Filter, EHScopeStack::stable_end()) {
    404     FilterBits.NumFilters = numFilters;
    405   }
    406 
    407   static size_t getSizeForNumFilters(unsigned numFilters) {
    408     return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
    409   }
    410 
    411   unsigned getNumFilters() const { return FilterBits.NumFilters; }
    412 
    413   void setFilter(unsigned i, llvm::Value *filterValue) {
    414     assert(i < getNumFilters());
    415     getFilters()[i] = filterValue;
    416   }
    417 
    418   llvm::Value *getFilter(unsigned i) const {
    419     assert(i < getNumFilters());
    420     return getFilters()[i];
    421   }
    422 
    423   static bool classof(const EHScope *scope) {
    424     return scope->getKind() == Filter;
    425   }
    426 };
    427 
    428 /// An exceptions scope which calls std::terminate if any exception
    429 /// reaches it.
    430 class EHTerminateScope : public EHScope {
    431 public:
    432   EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
    433     : EHScope(Terminate, enclosingEHScope) {}
    434   static size_t getSize() { return sizeof(EHTerminateScope); }
    435 
    436   static bool classof(const EHScope *scope) {
    437     return scope->getKind() == Terminate;
    438   }
    439 };
    440 
    441 /// A non-stable pointer into the scope stack.
    442 class EHScopeStack::iterator {
    443   char *Ptr;
    444 
    445   friend class EHScopeStack;
    446   explicit iterator(char *Ptr) : Ptr(Ptr) {}
    447 
    448 public:
    449   iterator() : Ptr(0) {}
    450 
    451   EHScope *get() const {
    452     return reinterpret_cast<EHScope*>(Ptr);
    453   }
    454 
    455   EHScope *operator->() const { return get(); }
    456   EHScope &operator*() const { return *get(); }
    457 
    458   iterator &operator++() {
    459     switch (get()->getKind()) {
    460     case EHScope::Catch:
    461       Ptr += EHCatchScope::getSizeForNumHandlers(
    462           static_cast<const EHCatchScope*>(get())->getNumHandlers());
    463       break;
    464 
    465     case EHScope::Filter:
    466       Ptr += EHFilterScope::getSizeForNumFilters(
    467           static_cast<const EHFilterScope*>(get())->getNumFilters());
    468       break;
    469 
    470     case EHScope::Cleanup:
    471       Ptr += static_cast<const EHCleanupScope*>(get())
    472         ->getAllocatedSize();
    473       break;
    474 
    475     case EHScope::Terminate:
    476       Ptr += EHTerminateScope::getSize();
    477       break;
    478     }
    479 
    480     return *this;
    481   }
    482 
    483   iterator next() {
    484     iterator copy = *this;
    485     ++copy;
    486     return copy;
    487   }
    488 
    489   iterator operator++(int) {
    490     iterator copy = *this;
    491     operator++();
    492     return copy;
    493   }
    494 
    495   bool encloses(iterator other) const { return Ptr >= other.Ptr; }
    496   bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
    497 
    498   bool operator==(iterator other) const { return Ptr == other.Ptr; }
    499   bool operator!=(iterator other) const { return Ptr != other.Ptr; }
    500 };
    501 
    502 inline EHScopeStack::iterator EHScopeStack::begin() const {
    503   return iterator(StartOfData);
    504 }
    505 
    506 inline EHScopeStack::iterator EHScopeStack::end() const {
    507   return iterator(EndOfBuffer);
    508 }
    509 
    510 inline void EHScopeStack::popCatch() {
    511   assert(!empty() && "popping exception stack when not empty");
    512 
    513   EHCatchScope &scope = cast<EHCatchScope>(*begin());
    514   InnermostEHScope = scope.getEnclosingEHScope();
    515   StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers());
    516 }
    517 
    518 inline void EHScopeStack::popTerminate() {
    519   assert(!empty() && "popping exception stack when not empty");
    520 
    521   EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
    522   InnermostEHScope = scope.getEnclosingEHScope();
    523   StartOfData += EHTerminateScope::getSize();
    524 }
    525 
    526 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
    527   assert(sp.isValid() && "finding invalid savepoint");
    528   assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
    529   return iterator(EndOfBuffer - sp.Size);
    530 }
    531 
    532 inline EHScopeStack::stable_iterator
    533 EHScopeStack::stabilize(iterator ir) const {
    534   assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
    535   return stable_iterator(EndOfBuffer - ir.Ptr);
    536 }
    537 
    538 }
    539 }
    540 
    541 #endif
    542