Home | History | Annotate | Download | only in CodeGen
      1 //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // These classes should be the minimum interface required for other parts of
     11 // CodeGen to emit cleanups.  The implementation is in CGCleanup.cpp and other
     12 // implemenentation details that are not widely needed are in CGCleanup.h.
     13 //
     14 //===----------------------------------------------------------------------===//
     15 
     16 #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H
     17 #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H
     18 
     19 #include "clang/Basic/LLVM.h"
     20 #include "llvm/ADT/STLExtras.h"
     21 #include "llvm/ADT/SmallVector.h"
     22 #include "llvm/IR/BasicBlock.h"
     23 #include "llvm/IR/Instructions.h"
     24 #include "llvm/IR/Value.h"
     25 
     26 namespace clang {
     27 namespace CodeGen {
     28 
     29 class CodeGenFunction;
     30 
     31 /// A branch fixup.  These are required when emitting a goto to a
     32 /// label which hasn't been emitted yet.  The goto is optimistically
     33 /// emitted as a branch to the basic block for the label, and (if it
     34 /// occurs in a scope with non-trivial cleanups) a fixup is added to
     35 /// the innermost cleanup.  When a (normal) cleanup is popped, any
     36 /// unresolved fixups in that scope are threaded through the cleanup.
     37 struct BranchFixup {
     38   /// The block containing the terminator which needs to be modified
     39   /// into a switch if this fixup is resolved into the current scope.
     40   /// If null, LatestBranch points directly to the destination.
     41   llvm::BasicBlock *OptimisticBranchBlock;
     42 
     43   /// The ultimate destination of the branch.
     44   ///
     45   /// This can be set to null to indicate that this fixup was
     46   /// successfully resolved.
     47   llvm::BasicBlock *Destination;
     48 
     49   /// The destination index value.
     50   unsigned DestinationIndex;
     51 
     52   /// The initial branch of the fixup.
     53   llvm::BranchInst *InitialBranch;
     54 };
     55 
     56 template <class T> struct InvariantValue {
     57   typedef T type;
     58   typedef T saved_type;
     59   static bool needsSaving(type value) { return false; }
     60   static saved_type save(CodeGenFunction &CGF, type value) { return value; }
     61   static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
     62 };
     63 
     64 /// A metaprogramming class for ensuring that a value will dominate an
     65 /// arbitrary position in a function.
     66 template <class T> struct DominatingValue : InvariantValue<T> {};
     67 
     68 template <class T, bool mightBeInstruction =
     69             std::is_base_of<llvm::Value, T>::value &&
     70             !std::is_base_of<llvm::Constant, T>::value &&
     71             !std::is_base_of<llvm::BasicBlock, T>::value>
     72 struct DominatingPointer;
     73 template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
     74 // template <class T> struct DominatingPointer<T,true> at end of file
     75 
     76 template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
     77 
     78 enum CleanupKind : unsigned {
     79   /// Denotes a cleanup that should run when a scope is exited using exceptional
     80   /// control flow (a throw statement leading to stack unwinding, ).
     81   EHCleanup = 0x1,
     82 
     83   /// Denotes a cleanup that should run when a scope is exited using normal
     84   /// control flow (falling off the end of the scope, return, goto, ...).
     85   NormalCleanup = 0x2,
     86 
     87   NormalAndEHCleanup = EHCleanup | NormalCleanup,
     88 
     89   InactiveCleanup = 0x4,
     90   InactiveEHCleanup = EHCleanup | InactiveCleanup,
     91   InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
     92   InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
     93 };
     94 
     95 /// A stack of scopes which respond to exceptions, including cleanups
     96 /// and catch blocks.
     97 class EHScopeStack {
     98 public:
     99   /// A saved depth on the scope stack.  This is necessary because
    100   /// pushing scopes onto the stack invalidates iterators.
    101   class stable_iterator {
    102     friend class EHScopeStack;
    103 
    104     /// Offset from StartOfData to EndOfBuffer.
    105     ptrdiff_t Size;
    106 
    107     stable_iterator(ptrdiff_t Size) : Size(Size) {}
    108 
    109   public:
    110     static stable_iterator invalid() { return stable_iterator(-1); }
    111     stable_iterator() : Size(-1) {}
    112 
    113     bool isValid() const { return Size >= 0; }
    114 
    115     /// Returns true if this scope encloses I.
    116     /// Returns false if I is invalid.
    117     /// This scope must be valid.
    118     bool encloses(stable_iterator I) const { return Size <= I.Size; }
    119 
    120     /// Returns true if this scope strictly encloses I: that is,
    121     /// if it encloses I and is not I.
    122     /// Returns false is I is invalid.
    123     /// This scope must be valid.
    124     bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
    125 
    126     friend bool operator==(stable_iterator A, stable_iterator B) {
    127       return A.Size == B.Size;
    128     }
    129     friend bool operator!=(stable_iterator A, stable_iterator B) {
    130       return A.Size != B.Size;
    131     }
    132   };
    133 
    134   /// Information for lazily generating a cleanup.  Subclasses must be
    135   /// POD-like: cleanups will not be destructed, and they will be
    136   /// allocated on the cleanup stack and freely copied and moved
    137   /// around.
    138   ///
    139   /// Cleanup implementations should generally be declared in an
    140   /// anonymous namespace.
    141   class Cleanup {
    142     // Anchor the construction vtable.
    143     virtual void anchor();
    144   public:
    145     /// Generation flags.
    146     class Flags {
    147       enum {
    148         F_IsForEH             = 0x1,
    149         F_IsNormalCleanupKind = 0x2,
    150         F_IsEHCleanupKind     = 0x4
    151       };
    152       unsigned flags;
    153 
    154     public:
    155       Flags() : flags(0) {}
    156 
    157       /// isForEH - true if the current emission is for an EH cleanup.
    158       bool isForEHCleanup() const { return flags & F_IsForEH; }
    159       bool isForNormalCleanup() const { return !isForEHCleanup(); }
    160       void setIsForEHCleanup() { flags |= F_IsForEH; }
    161 
    162       bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
    163       void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
    164 
    165       /// isEHCleanupKind - true if the cleanup was pushed as an EH
    166       /// cleanup.
    167       bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
    168       void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
    169     };
    170 
    171     // Provide a virtual destructor to suppress a very common warning
    172     // that unfortunately cannot be suppressed without this.  Cleanups
    173     // should not rely on this destructor ever being called.
    174     virtual ~Cleanup() {}
    175 
    176     /// Emit the cleanup.  For normal cleanups, this is run in the
    177     /// same EH context as when the cleanup was pushed, i.e. the
    178     /// immediately-enclosing context of the cleanup scope.  For
    179     /// EH cleanups, this is run in a terminate context.
    180     ///
    181     // \param flags cleanup kind.
    182     virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
    183   };
    184 
    185   /// ConditionalCleanup stores the saved form of its parameters,
    186   /// then restores them and performs the cleanup.
    187   template <class T, class... As> class ConditionalCleanup : public Cleanup {
    188     typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple;
    189     SavedTuple Saved;
    190 
    191     template <std::size_t... Is>
    192     T restore(CodeGenFunction &CGF, llvm::index_sequence<Is...>) {
    193       // It's important that the restores are emitted in order. The braced init
    194       // list guarentees that.
    195       return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...};
    196     }
    197 
    198     void Emit(CodeGenFunction &CGF, Flags flags) override {
    199       restore(CGF, llvm::index_sequence_for<As...>()).Emit(CGF, flags);
    200     }
    201 
    202   public:
    203     ConditionalCleanup(typename DominatingValue<As>::saved_type... A)
    204         : Saved(A...) {}
    205 
    206     ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {}
    207   };
    208 
    209 private:
    210   // The implementation for this class is in CGException.h and
    211   // CGException.cpp; the definition is here because it's used as a
    212   // member of CodeGenFunction.
    213 
    214   /// The start of the scope-stack buffer, i.e. the allocated pointer
    215   /// for the buffer.  All of these pointers are either simultaneously
    216   /// null or simultaneously valid.
    217   char *StartOfBuffer;
    218 
    219   /// The end of the buffer.
    220   char *EndOfBuffer;
    221 
    222   /// The first valid entry in the buffer.
    223   char *StartOfData;
    224 
    225   /// The innermost normal cleanup on the stack.
    226   stable_iterator InnermostNormalCleanup;
    227 
    228   /// The innermost EH scope on the stack.
    229   stable_iterator InnermostEHScope;
    230 
    231   /// The current set of branch fixups.  A branch fixup is a jump to
    232   /// an as-yet unemitted label, i.e. a label for which we don't yet
    233   /// know the EH stack depth.  Whenever we pop a cleanup, we have
    234   /// to thread all the current branch fixups through it.
    235   ///
    236   /// Fixups are recorded as the Use of the respective branch or
    237   /// switch statement.  The use points to the final destination.
    238   /// When popping out of a cleanup, these uses are threaded through
    239   /// the cleanup and adjusted to point to the new cleanup.
    240   ///
    241   /// Note that branches are allowed to jump into protected scopes
    242   /// in certain situations;  e.g. the following code is legal:
    243   ///     struct A { ~A(); }; // trivial ctor, non-trivial dtor
    244   ///     goto foo;
    245   ///     A a;
    246   ///    foo:
    247   ///     bar();
    248   SmallVector<BranchFixup, 8> BranchFixups;
    249 
    250   char *allocate(size_t Size);
    251 
    252   void *pushCleanup(CleanupKind K, size_t DataSize);
    253 
    254 public:
    255   EHScopeStack() : StartOfBuffer(nullptr), EndOfBuffer(nullptr),
    256                    StartOfData(nullptr), InnermostNormalCleanup(stable_end()),
    257                    InnermostEHScope(stable_end()) {}
    258   ~EHScopeStack() { delete[] StartOfBuffer; }
    259 
    260   /// Push a lazily-created cleanup on the stack.
    261   template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) {
    262     void *Buffer = pushCleanup(Kind, sizeof(T));
    263     Cleanup *Obj = new (Buffer) T(A...);
    264     (void) Obj;
    265   }
    266 
    267   /// Push a lazily-created cleanup on the stack. Tuple version.
    268   template <class T, class... As>
    269   void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) {
    270     void *Buffer = pushCleanup(Kind, sizeof(T));
    271     Cleanup *Obj = new (Buffer) T(std::move(A));
    272     (void) Obj;
    273   }
    274 
    275   // Feel free to add more variants of the following:
    276 
    277   /// Push a cleanup with non-constant storage requirements on the
    278   /// stack.  The cleanup type must provide an additional static method:
    279   ///   static size_t getExtraSize(size_t);
    280   /// The argument to this method will be the value N, which will also
    281   /// be passed as the first argument to the constructor.
    282   ///
    283   /// The data stored in the extra storage must obey the same
    284   /// restrictions as normal cleanup member data.
    285   ///
    286   /// The pointer returned from this method is valid until the cleanup
    287   /// stack is modified.
    288   template <class T, class... As>
    289   T *pushCleanupWithExtra(CleanupKind Kind, size_t N, As... A) {
    290     void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
    291     return new (Buffer) T(N, A...);
    292   }
    293 
    294   void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) {
    295     void *Buffer = pushCleanup(Kind, Size);
    296     std::memcpy(Buffer, Cleanup, Size);
    297   }
    298 
    299   /// Pops a cleanup scope off the stack.  This is private to CGCleanup.cpp.
    300   void popCleanup();
    301 
    302   /// Push a set of catch handlers on the stack.  The catch is
    303   /// uninitialized and will need to have the given number of handlers
    304   /// set on it.
    305   class EHCatchScope *pushCatch(unsigned NumHandlers);
    306 
    307   /// Pops a catch scope off the stack.  This is private to CGException.cpp.
    308   void popCatch();
    309 
    310   /// Push an exceptions filter on the stack.
    311   class EHFilterScope *pushFilter(unsigned NumFilters);
    312 
    313   /// Pops an exceptions filter off the stack.
    314   void popFilter();
    315 
    316   /// Push a terminate handler on the stack.
    317   void pushTerminate();
    318 
    319   /// Pops a terminate handler off the stack.
    320   void popTerminate();
    321 
    322   /// Determines whether the exception-scopes stack is empty.
    323   bool empty() const { return StartOfData == EndOfBuffer; }
    324 
    325   bool requiresLandingPad() const {
    326     return InnermostEHScope != stable_end();
    327   }
    328 
    329   /// Determines whether there are any normal cleanups on the stack.
    330   bool hasNormalCleanups() const {
    331     return InnermostNormalCleanup != stable_end();
    332   }
    333 
    334   /// Returns the innermost normal cleanup on the stack, or
    335   /// stable_end() if there are no normal cleanups.
    336   stable_iterator getInnermostNormalCleanup() const {
    337     return InnermostNormalCleanup;
    338   }
    339   stable_iterator getInnermostActiveNormalCleanup() const;
    340 
    341   stable_iterator getInnermostEHScope() const {
    342     return InnermostEHScope;
    343   }
    344 
    345   stable_iterator getInnermostActiveEHScope() const;
    346 
    347   /// An unstable reference to a scope-stack depth.  Invalidated by
    348   /// pushes but not pops.
    349   class iterator;
    350 
    351   /// Returns an iterator pointing to the innermost EH scope.
    352   iterator begin() const;
    353 
    354   /// Returns an iterator pointing to the outermost EH scope.
    355   iterator end() const;
    356 
    357   /// Create a stable reference to the top of the EH stack.  The
    358   /// returned reference is valid until that scope is popped off the
    359   /// stack.
    360   stable_iterator stable_begin() const {
    361     return stable_iterator(EndOfBuffer - StartOfData);
    362   }
    363 
    364   /// Create a stable reference to the bottom of the EH stack.
    365   static stable_iterator stable_end() {
    366     return stable_iterator(0);
    367   }
    368 
    369   /// Translates an iterator into a stable_iterator.
    370   stable_iterator stabilize(iterator it) const;
    371 
    372   /// Turn a stable reference to a scope depth into a unstable pointer
    373   /// to the EH stack.
    374   iterator find(stable_iterator save) const;
    375 
    376   /// Removes the cleanup pointed to by the given stable_iterator.
    377   void removeCleanup(stable_iterator save);
    378 
    379   /// Add a branch fixup to the current cleanup scope.
    380   BranchFixup &addBranchFixup() {
    381     assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
    382     BranchFixups.push_back(BranchFixup());
    383     return BranchFixups.back();
    384   }
    385 
    386   unsigned getNumBranchFixups() const { return BranchFixups.size(); }
    387   BranchFixup &getBranchFixup(unsigned I) {
    388     assert(I < getNumBranchFixups());
    389     return BranchFixups[I];
    390   }
    391 
    392   /// Pops lazily-removed fixups from the end of the list.  This
    393   /// should only be called by procedures which have just popped a
    394   /// cleanup or resolved one or more fixups.
    395   void popNullFixups();
    396 
    397   /// Clears the branch-fixups list.  This should only be called by
    398   /// ResolveAllBranchFixups.
    399   void clearFixups() { BranchFixups.clear(); }
    400 };
    401 
    402 } // namespace CodeGen
    403 } // namespace clang
    404 
    405 #endif
    406