Home | History | Annotate | Download | only in CodeGen
      1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // These classes support the generation of LLVM IR for cleanups.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
     15 #define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
     16 
     17 #include "EHScopeStack.h"
     18 
     19 #include "Address.h"
     20 #include "llvm/ADT/SmallPtrSet.h"
     21 #include "llvm/ADT/SmallVector.h"
     22 
     23 namespace llvm {
     24 class BasicBlock;
     25 class Value;
     26 class ConstantInt;
     27 class AllocaInst;
     28 }
     29 
     30 namespace clang {
     31 class FunctionDecl;
     32 namespace CodeGen {
     33 class CodeGenModule;
     34 class CodeGenFunction;
     35 
     36 /// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
     37 /// type of a catch handler, so we use this wrapper.
     38 struct CatchTypeInfo {
     39   llvm::Constant *RTTI;
     40   unsigned Flags;
     41 };
     42 
     43 /// A protected scope for zero-cost EH handling.
     44 class EHScope {
     45   llvm::BasicBlock *CachedLandingPad;
     46   llvm::BasicBlock *CachedEHDispatchBlock;
     47 
     48   EHScopeStack::stable_iterator EnclosingEHScope;
     49 
     50   class CommonBitFields {
     51     friend class EHScope;
     52     unsigned Kind : 3;
     53   };
     54   enum { NumCommonBits = 3 };
     55 
     56 protected:
     57   class CatchBitFields {
     58     friend class EHCatchScope;
     59     unsigned : NumCommonBits;
     60 
     61     unsigned NumHandlers : 32 - NumCommonBits;
     62   };
     63 
     64   class CleanupBitFields {
     65     friend class EHCleanupScope;
     66     unsigned : NumCommonBits;
     67 
     68     /// Whether this cleanup needs to be run along normal edges.
     69     unsigned IsNormalCleanup : 1;
     70 
     71     /// Whether this cleanup needs to be run along exception edges.
     72     unsigned IsEHCleanup : 1;
     73 
     74     /// Whether this cleanup is currently active.
     75     unsigned IsActive : 1;
     76 
     77     /// Whether this cleanup is a lifetime marker
     78     unsigned IsLifetimeMarker : 1;
     79 
     80     /// Whether the normal cleanup should test the activation flag.
     81     unsigned TestFlagInNormalCleanup : 1;
     82 
     83     /// Whether the EH cleanup should test the activation flag.
     84     unsigned TestFlagInEHCleanup : 1;
     85 
     86     /// The amount of extra storage needed by the Cleanup.
     87     /// Always a multiple of the scope-stack alignment.
     88     unsigned CleanupSize : 12;
     89 
     90     /// The number of fixups required by enclosing scopes (not including
     91     /// this one).  If this is the top cleanup scope, all the fixups
     92     /// from this index onwards belong to this scope.
     93     unsigned FixupDepth : 32 - 18 - NumCommonBits; // currently 12
     94   };
     95 
     96   class FilterBitFields {
     97     friend class EHFilterScope;
     98     unsigned : NumCommonBits;
     99 
    100     unsigned NumFilters : 32 - NumCommonBits;
    101   };
    102 
    103   union {
    104     CommonBitFields CommonBits;
    105     CatchBitFields CatchBits;
    106     CleanupBitFields CleanupBits;
    107     FilterBitFields FilterBits;
    108   };
    109 
    110 public:
    111   enum Kind { Cleanup, Catch, Terminate, Filter, PadEnd };
    112 
    113   EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
    114     : CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
    115       EnclosingEHScope(enclosingEHScope) {
    116     CommonBits.Kind = kind;
    117   }
    118 
    119   Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
    120 
    121   llvm::BasicBlock *getCachedLandingPad() const {
    122     return CachedLandingPad;
    123   }
    124 
    125   void setCachedLandingPad(llvm::BasicBlock *block) {
    126     CachedLandingPad = block;
    127   }
    128 
    129   llvm::BasicBlock *getCachedEHDispatchBlock() const {
    130     return CachedEHDispatchBlock;
    131   }
    132 
    133   void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
    134     CachedEHDispatchBlock = block;
    135   }
    136 
    137   bool hasEHBranches() const {
    138     if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
    139       return !block->use_empty();
    140     return false;
    141   }
    142 
    143   EHScopeStack::stable_iterator getEnclosingEHScope() const {
    144     return EnclosingEHScope;
    145   }
    146 };
    147 
    148 /// A scope which attempts to handle some, possibly all, types of
    149 /// exceptions.
    150 ///
    151 /// Objective C \@finally blocks are represented using a cleanup scope
    152 /// after the catch scope.
    153 class EHCatchScope : public EHScope {
    154   // In effect, we have a flexible array member
    155   //   Handler Handlers[0];
    156   // But that's only standard in C99, not C++, so we have to do
    157   // annoying pointer arithmetic instead.
    158 
    159 public:
    160   struct Handler {
    161     /// A type info value, or null (C++ null, not an LLVM null pointer)
    162     /// for a catch-all.
    163     CatchTypeInfo Type;
    164 
    165     /// The catch handler for this type.
    166     llvm::BasicBlock *Block;
    167 
    168     bool isCatchAll() const { return Type.RTTI == nullptr; }
    169   };
    170 
    171 private:
    172   friend class EHScopeStack;
    173 
    174   Handler *getHandlers() {
    175     return reinterpret_cast<Handler*>(this+1);
    176   }
    177 
    178   const Handler *getHandlers() const {
    179     return reinterpret_cast<const Handler*>(this+1);
    180   }
    181 
    182 public:
    183   static size_t getSizeForNumHandlers(unsigned N) {
    184     return sizeof(EHCatchScope) + N * sizeof(Handler);
    185   }
    186 
    187   EHCatchScope(unsigned numHandlers,
    188                EHScopeStack::stable_iterator enclosingEHScope)
    189     : EHScope(Catch, enclosingEHScope) {
    190     CatchBits.NumHandlers = numHandlers;
    191   }
    192 
    193   unsigned getNumHandlers() const {
    194     return CatchBits.NumHandlers;
    195   }
    196 
    197   void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
    198     setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
    199   }
    200 
    201   void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
    202     assert(I < getNumHandlers());
    203     getHandlers()[I].Type = CatchTypeInfo{Type, 0};
    204     getHandlers()[I].Block = Block;
    205   }
    206 
    207   void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
    208     assert(I < getNumHandlers());
    209     getHandlers()[I].Type = Type;
    210     getHandlers()[I].Block = Block;
    211   }
    212 
    213   const Handler &getHandler(unsigned I) const {
    214     assert(I < getNumHandlers());
    215     return getHandlers()[I];
    216   }
    217 
    218   // Clear all handler blocks.
    219   // FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
    220   // 'takeHandler' or some such function which removes ownership from the
    221   // EHCatchScope object if the handlers should live longer than EHCatchScope.
    222   void clearHandlerBlocks() {
    223     for (unsigned I = 0, N = getNumHandlers(); I != N; ++I)
    224       delete getHandler(I).Block;
    225   }
    226 
    227   typedef const Handler *iterator;
    228   iterator begin() const { return getHandlers(); }
    229   iterator end() const { return getHandlers() + getNumHandlers(); }
    230 
    231   static bool classof(const EHScope *Scope) {
    232     return Scope->getKind() == Catch;
    233   }
    234 };
    235 
    236 /// A cleanup scope which generates the cleanup blocks lazily.
    237 class LLVM_ALIGNAS(/*alignof(uint64_t)*/ 8) EHCleanupScope : public EHScope {
    238   /// The nearest normal cleanup scope enclosing this one.
    239   EHScopeStack::stable_iterator EnclosingNormal;
    240 
    241   /// The nearest EH scope enclosing this one.
    242   EHScopeStack::stable_iterator EnclosingEH;
    243 
    244   /// The dual entry/exit block along the normal edge.  This is lazily
    245   /// created if needed before the cleanup is popped.
    246   llvm::BasicBlock *NormalBlock;
    247 
    248   /// An optional i1 variable indicating whether this cleanup has been
    249   /// activated yet.
    250   llvm::AllocaInst *ActiveFlag;
    251 
    252   /// Extra information required for cleanups that have resolved
    253   /// branches through them.  This has to be allocated on the side
    254   /// because everything on the cleanup stack has be trivially
    255   /// movable.
    256   struct ExtInfo {
    257     /// The destinations of normal branch-afters and branch-throughs.
    258     llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
    259 
    260     /// Normal branch-afters.
    261     SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
    262       BranchAfters;
    263   };
    264   mutable struct ExtInfo *ExtInfo;
    265 
    266   struct ExtInfo &getExtInfo() {
    267     if (!ExtInfo) ExtInfo = new struct ExtInfo();
    268     return *ExtInfo;
    269   }
    270 
    271   const struct ExtInfo &getExtInfo() const {
    272     if (!ExtInfo) ExtInfo = new struct ExtInfo();
    273     return *ExtInfo;
    274   }
    275 
    276 public:
    277   /// Gets the size required for a lazy cleanup scope with the given
    278   /// cleanup-data requirements.
    279   static size_t getSizeForCleanupSize(size_t Size) {
    280     return sizeof(EHCleanupScope) + Size;
    281   }
    282 
    283   size_t getAllocatedSize() const {
    284     return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
    285   }
    286 
    287   EHCleanupScope(bool isNormal, bool isEH, bool isActive,
    288                  unsigned cleanupSize, unsigned fixupDepth,
    289                  EHScopeStack::stable_iterator enclosingNormal,
    290                  EHScopeStack::stable_iterator enclosingEH)
    291     : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal),
    292       NormalBlock(nullptr), ActiveFlag(nullptr), ExtInfo(nullptr) {
    293     CleanupBits.IsNormalCleanup = isNormal;
    294     CleanupBits.IsEHCleanup = isEH;
    295     CleanupBits.IsActive = isActive;
    296     CleanupBits.IsLifetimeMarker = false;
    297     CleanupBits.TestFlagInNormalCleanup = false;
    298     CleanupBits.TestFlagInEHCleanup = false;
    299     CleanupBits.CleanupSize = cleanupSize;
    300     CleanupBits.FixupDepth = fixupDepth;
    301 
    302     assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
    303   }
    304 
    305   void Destroy() {
    306     delete ExtInfo;
    307   }
    308   // Objects of EHCleanupScope are not destructed. Use Destroy().
    309   ~EHCleanupScope() = delete;
    310 
    311   bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
    312   llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
    313   void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
    314 
    315   bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
    316 
    317   bool isActive() const { return CleanupBits.IsActive; }
    318   void setActive(bool A) { CleanupBits.IsActive = A; }
    319 
    320   bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
    321   void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
    322 
    323   bool hasActiveFlag() const { return ActiveFlag != nullptr; }
    324   Address getActiveFlag() const {
    325     return Address(ActiveFlag, CharUnits::One());
    326   }
    327   void setActiveFlag(Address Var) {
    328     assert(Var.getAlignment().isOne());
    329     ActiveFlag = cast<llvm::AllocaInst>(Var.getPointer());
    330   }
    331 
    332   void setTestFlagInNormalCleanup() {
    333     CleanupBits.TestFlagInNormalCleanup = true;
    334   }
    335   bool shouldTestFlagInNormalCleanup() const {
    336     return CleanupBits.TestFlagInNormalCleanup;
    337   }
    338 
    339   void setTestFlagInEHCleanup() {
    340     CleanupBits.TestFlagInEHCleanup = true;
    341   }
    342   bool shouldTestFlagInEHCleanup() const {
    343     return CleanupBits.TestFlagInEHCleanup;
    344   }
    345 
    346   unsigned getFixupDepth() const { return CleanupBits.FixupDepth; }
    347   EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
    348     return EnclosingNormal;
    349   }
    350 
    351   size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
    352   void *getCleanupBuffer() { return this + 1; }
    353 
    354   EHScopeStack::Cleanup *getCleanup() {
    355     return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
    356   }
    357 
    358   /// True if this cleanup scope has any branch-afters or branch-throughs.
    359   bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
    360 
    361   /// Add a branch-after to this cleanup scope.  A branch-after is a
    362   /// branch from a point protected by this (normal) cleanup to a
    363   /// point in the normal cleanup scope immediately containing it.
    364   /// For example,
    365   ///   for (;;) { A a; break; }
    366   /// contains a branch-after.
    367   ///
    368   /// Branch-afters each have their own destination out of the
    369   /// cleanup, guaranteed distinct from anything else threaded through
    370   /// it.  Therefore branch-afters usually force a switch after the
    371   /// cleanup.
    372   void addBranchAfter(llvm::ConstantInt *Index,
    373                       llvm::BasicBlock *Block) {
    374     struct ExtInfo &ExtInfo = getExtInfo();
    375     if (ExtInfo.Branches.insert(Block).second)
    376       ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
    377   }
    378 
    379   /// Return the number of unique branch-afters on this scope.
    380   unsigned getNumBranchAfters() const {
    381     return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
    382   }
    383 
    384   llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
    385     assert(I < getNumBranchAfters());
    386     return ExtInfo->BranchAfters[I].first;
    387   }
    388 
    389   llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
    390     assert(I < getNumBranchAfters());
    391     return ExtInfo->BranchAfters[I].second;
    392   }
    393 
    394   /// Add a branch-through to this cleanup scope.  A branch-through is
    395   /// a branch from a scope protected by this (normal) cleanup to an
    396   /// enclosing scope other than the immediately-enclosing normal
    397   /// cleanup scope.
    398   ///
    399   /// In the following example, the branch through B's scope is a
    400   /// branch-through, while the branch through A's scope is a
    401   /// branch-after:
    402   ///   for (;;) { A a; B b; break; }
    403   ///
    404   /// All branch-throughs have a common destination out of the
    405   /// cleanup, one possibly shared with the fall-through.  Therefore
    406   /// branch-throughs usually don't force a switch after the cleanup.
    407   ///
    408   /// \return true if the branch-through was new to this scope
    409   bool addBranchThrough(llvm::BasicBlock *Block) {
    410     return getExtInfo().Branches.insert(Block).second;
    411   }
    412 
    413   /// Determines if this cleanup scope has any branch throughs.
    414   bool hasBranchThroughs() const {
    415     if (!ExtInfo) return false;
    416     return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
    417   }
    418 
    419   static bool classof(const EHScope *Scope) {
    420     return (Scope->getKind() == Cleanup);
    421   }
    422 };
    423 // NOTE: there's a bunch of different data classes tacked on after an
    424 // EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
    425 // they don't require greater alignment than ScopeStackAlignment. So,
    426 // EHCleanupScope ought to have alignment equal to that -- not more
    427 // (would be misaligned by the stack allocator), and not less (would
    428 // break the appended classes).
    429 static_assert(llvm::AlignOf<EHCleanupScope>::Alignment ==
    430                   EHScopeStack::ScopeStackAlignment,
    431               "EHCleanupScope expected alignment");
    432 
    433 /// An exceptions scope which filters exceptions thrown through it.
    434 /// Only exceptions matching the filter types will be permitted to be
    435 /// thrown.
    436 ///
    437 /// This is used to implement C++ exception specifications.
    438 class EHFilterScope : public EHScope {
    439   // Essentially ends in a flexible array member:
    440   // llvm::Value *FilterTypes[0];
    441 
    442   llvm::Value **getFilters() {
    443     return reinterpret_cast<llvm::Value**>(this+1);
    444   }
    445 
    446   llvm::Value * const *getFilters() const {
    447     return reinterpret_cast<llvm::Value* const *>(this+1);
    448   }
    449 
    450 public:
    451   EHFilterScope(unsigned numFilters)
    452     : EHScope(Filter, EHScopeStack::stable_end()) {
    453     FilterBits.NumFilters = numFilters;
    454   }
    455 
    456   static size_t getSizeForNumFilters(unsigned numFilters) {
    457     return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
    458   }
    459 
    460   unsigned getNumFilters() const { return FilterBits.NumFilters; }
    461 
    462   void setFilter(unsigned i, llvm::Value *filterValue) {
    463     assert(i < getNumFilters());
    464     getFilters()[i] = filterValue;
    465   }
    466 
    467   llvm::Value *getFilter(unsigned i) const {
    468     assert(i < getNumFilters());
    469     return getFilters()[i];
    470   }
    471 
    472   static bool classof(const EHScope *scope) {
    473     return scope->getKind() == Filter;
    474   }
    475 };
    476 
    477 /// An exceptions scope which calls std::terminate if any exception
    478 /// reaches it.
    479 class EHTerminateScope : public EHScope {
    480 public:
    481   EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
    482     : EHScope(Terminate, enclosingEHScope) {}
    483   static size_t getSize() { return sizeof(EHTerminateScope); }
    484 
    485   static bool classof(const EHScope *scope) {
    486     return scope->getKind() == Terminate;
    487   }
    488 };
    489 
    490 class EHPadEndScope : public EHScope {
    491 public:
    492   EHPadEndScope(EHScopeStack::stable_iterator enclosingEHScope)
    493       : EHScope(PadEnd, enclosingEHScope) {}
    494   static size_t getSize() { return sizeof(EHPadEndScope); }
    495 
    496   static bool classof(const EHScope *scope) {
    497     return scope->getKind() == PadEnd;
    498   }
    499 };
    500 
    501 /// A non-stable pointer into the scope stack.
    502 class EHScopeStack::iterator {
    503   char *Ptr;
    504 
    505   friend class EHScopeStack;
    506   explicit iterator(char *Ptr) : Ptr(Ptr) {}
    507 
    508 public:
    509   iterator() : Ptr(nullptr) {}
    510 
    511   EHScope *get() const {
    512     return reinterpret_cast<EHScope*>(Ptr);
    513   }
    514 
    515   EHScope *operator->() const { return get(); }
    516   EHScope &operator*() const { return *get(); }
    517 
    518   iterator &operator++() {
    519     size_t Size;
    520     switch (get()->getKind()) {
    521     case EHScope::Catch:
    522       Size = EHCatchScope::getSizeForNumHandlers(
    523           static_cast<const EHCatchScope *>(get())->getNumHandlers());
    524       break;
    525 
    526     case EHScope::Filter:
    527       Size = EHFilterScope::getSizeForNumFilters(
    528           static_cast<const EHFilterScope *>(get())->getNumFilters());
    529       break;
    530 
    531     case EHScope::Cleanup:
    532       Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
    533       break;
    534 
    535     case EHScope::Terminate:
    536       Size = EHTerminateScope::getSize();
    537       break;
    538 
    539     case EHScope::PadEnd:
    540       Size = EHPadEndScope::getSize();
    541       break;
    542     }
    543     Ptr += llvm::RoundUpToAlignment(Size, ScopeStackAlignment);
    544     return *this;
    545   }
    546 
    547   iterator next() {
    548     iterator copy = *this;
    549     ++copy;
    550     return copy;
    551   }
    552 
    553   iterator operator++(int) {
    554     iterator copy = *this;
    555     operator++();
    556     return copy;
    557   }
    558 
    559   bool encloses(iterator other) const { return Ptr >= other.Ptr; }
    560   bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
    561 
    562   bool operator==(iterator other) const { return Ptr == other.Ptr; }
    563   bool operator!=(iterator other) const { return Ptr != other.Ptr; }
    564 };
    565 
    566 inline EHScopeStack::iterator EHScopeStack::begin() const {
    567   return iterator(StartOfData);
    568 }
    569 
    570 inline EHScopeStack::iterator EHScopeStack::end() const {
    571   return iterator(EndOfBuffer);
    572 }
    573 
    574 inline void EHScopeStack::popCatch() {
    575   assert(!empty() && "popping exception stack when not empty");
    576 
    577   EHCatchScope &scope = cast<EHCatchScope>(*begin());
    578   InnermostEHScope = scope.getEnclosingEHScope();
    579   deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
    580 }
    581 
    582 inline void EHScopeStack::popTerminate() {
    583   assert(!empty() && "popping exception stack when not empty");
    584 
    585   EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
    586   InnermostEHScope = scope.getEnclosingEHScope();
    587   deallocate(EHTerminateScope::getSize());
    588 }
    589 
    590 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
    591   assert(sp.isValid() && "finding invalid savepoint");
    592   assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
    593   return iterator(EndOfBuffer - sp.Size);
    594 }
    595 
    596 inline EHScopeStack::stable_iterator
    597 EHScopeStack::stabilize(iterator ir) const {
    598   assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
    599   return stable_iterator(EndOfBuffer - ir.Ptr);
    600 }
    601 
    602 /// The exceptions personality for a function.
    603 struct EHPersonality {
    604   const char *PersonalityFn;
    605 
    606   // If this is non-null, this personality requires a non-standard
    607   // function for rethrowing an exception after a catchall cleanup.
    608   // This function must have prototype void(void*).
    609   const char *CatchallRethrowFn;
    610 
    611   static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
    612   static const EHPersonality &get(CodeGenFunction &CGF);
    613 
    614   static const EHPersonality GNU_C;
    615   static const EHPersonality GNU_C_SJLJ;
    616   static const EHPersonality GNU_C_SEH;
    617   static const EHPersonality GNU_ObjC;
    618   static const EHPersonality GNUstep_ObjC;
    619   static const EHPersonality GNU_ObjCXX;
    620   static const EHPersonality NeXT_ObjC;
    621   static const EHPersonality GNU_CPlusPlus;
    622   static const EHPersonality GNU_CPlusPlus_SJLJ;
    623   static const EHPersonality GNU_CPlusPlus_SEH;
    624   static const EHPersonality MSVC_except_handler;
    625   static const EHPersonality MSVC_C_specific_handler;
    626   static const EHPersonality MSVC_CxxFrameHandler3;
    627 
    628   /// Does this personality use landingpads or the family of pad instructions
    629   /// designed to form funclets?
    630   bool usesFuncletPads() const { return isMSVCPersonality(); }
    631 
    632   bool isMSVCPersonality() const {
    633     return this == &MSVC_except_handler || this == &MSVC_C_specific_handler ||
    634            this == &MSVC_CxxFrameHandler3;
    635   }
    636 
    637   bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
    638 };
    639 }
    640 }
    641 
    642 #endif
    643